hexsha
stringlengths
40
40
size
int64
5
2.06M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
3
248
max_stars_repo_name
stringlengths
5
125
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
sequencelengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
248
max_issues_repo_name
stringlengths
5
125
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
sequencelengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
3
248
max_forks_repo_name
stringlengths
5
125
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
sequencelengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
5
2.06M
avg_line_length
float64
1
1.02M
max_line_length
int64
3
1.03M
alphanum_fraction
float64
0
1
count_classes
int64
0
1.6M
score_classes
float64
0
1
count_generators
int64
0
651k
score_generators
float64
0
1
count_decorators
int64
0
990k
score_decorators
float64
0
1
count_async_functions
int64
0
235k
score_async_functions
float64
0
1
count_documentation
int64
0
1.04M
score_documentation
float64
0
1
d9e182705452fe461a2142c0afa4786d47f19c46
2,131
py
Python
dags/treinos_igti/treino03.py
rafaelols/airflow
8e4af5fb576a9568af476c0607819649b724adea
[ "Apache-2.0" ]
null
null
null
dags/treinos_igti/treino03.py
rafaelols/airflow
8e4af5fb576a9568af476c0607819649b724adea
[ "Apache-2.0" ]
null
null
null
dags/treinos_igti/treino03.py
rafaelols/airflow
8e4af5fb576a9568af476c0607819649b724adea
[ "Apache-2.0" ]
null
null
null
from airflow import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import pandas as pd import random # Default args definition default_args = { 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18, 20), 'email': ['[email protected]', '[email protected]'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag definition dag = DAG( 'treino-03', description="Extrai dados do Titanic e calcula idade media para homens ou mulheres", default_args = default_args, schedule_interval='*/20 * * * *' ) get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos homens no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das mulheres no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m >> male_female >> [branch_homem, branch_mulher]
25.987805
147
0.697325
0
0
0
0
0
0
0
0
737
0.345847
d9e359c85a06bdc44937457ee401aaa8bebc7f50
2,560
py
Python
tclCommands/TclCommandListSys.py
DannyPol/flatcam
25a8634d0658e98b7fae31a095f8bef40c1b3067
[ "MIT" ]
1
2022-02-11T06:19:34.000Z
2022-02-11T06:19:34.000Z
tclCommands/TclCommandListSys.py
MRemy2/FlatCam
d4f941335ca8a8d5351aab23b396f99da06a9029
[ "MIT" ]
null
null
null
tclCommands/TclCommandListSys.py
MRemy2/FlatCam
d4f941335ca8a8d5351aab23b396f99da06a9029
[ "MIT" ]
null
null
null
# ########################################################## # FlatCAM: 2D Post-processing for Manufacturing # # File Author: Marius Adrian Stanciu (c) # # Date: 8/17/2019 # # MIT Licence # # ########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): """ Tcl shell command to get the list of system variables example: list_sys """ # List of all command aliases, to be able use old names for backward compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description = '%s %s' % ("--", "Outputs in Tcl Shell the list with the names of system variables.") # Dictionary of types from Tcl command, needs to be ordered arg_names = collections.OrderedDict([ ('selection', str), ]) # Dictionary of types from Tcl command, needs to be ordered , this is for options like -optionname value option_types = collections.OrderedDict([ ]) # array of mandatory options for current Tcl command: required = {'name','outname'} required = [] # structured help for current command, args needs to be ordered help = { 'main': "Returns the list of the names of system variables.\n" "Without an argument it will list all the system parameters. " "As an argument use first letter or first letters from the name " "of the system variable.\n" "In that case it will list only the system variables that starts with that string.\n" "Main categories start with: gerber or excellon or geometry or cncjob or global.\n" "Note: Use 'get_sys system variable' to get the value and 'set_sys system variable value' to set it.\n", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj'] } def execute(self, args, unnamed_args): """ :param args: :param unnamed_args: :return: """ if 'selection' in args: argument = args['selection'] return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return str(ret_val) # return str([*self.app.defaults])
37.647059
120
0.55
2,153
0.841016
0
0
0
0
0
0
1,682
0.657031
d9e3f1d0e6ad9650ceb745dc1536525917eaef63
2,694
py
Python
ogs5py/fileclasses/mcp/core.py
MuellerSeb/ogs5py
752e7bd2298fbd476406d168f6b7d1a85863dccd
[ "MIT" ]
3
2018-05-27T15:39:07.000Z
2018-10-29T17:02:11.000Z
ogs5py/fileclasses/mcp/core.py
MuellerSeb/ogs5py
752e7bd2298fbd476406d168f6b7d1a85863dccd
[ "MIT" ]
1
2018-11-12T11:32:12.000Z
2018-11-12T13:07:48.000Z
ogs5py/fileclasses/mcp/core.py
MuellerSeb/ogs5py
752e7bd2298fbd476406d168f6b7d1a85863dccd
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Class for the ogs COMPONENT_PROPERTIES file.""" from ogs5py.fileclasses.base import BlockFile class MCP(BlockFile): """ Class for the ogs COMPONENT_PROPERTIES file. Parameters ---------- task_root : str, optional Path to the destiny model folder. Default: cwd+"ogs5model" task_id : str, optional Name for the ogs task. Default: "model" Notes ----- Main-Keywords (#): - COMPONENT_PROPERTIES Sub-Keywords ($) per Main-Keyword: - COMPONENT_PROPERTIES - ACENTRIC_FACTOR - A_ZERO - BUBBLE_VELOCITY - CRITICAL_PRESSURE - CRITICAL_TEMPERATURE - DECAY - DIFFUSION - FLUID_ID - FLUID_PHASE - FORMULA - ISOTHERM - MAXIMUM_AQUEOUS_SOLUBILITY - MINERAL_DENSITY - MOBILE - MOLAR_DENSITY - MOLAR_VOLUME - MOLAR_WEIGHT - MOL_MASS - NAME - OutputMassOfComponentInModel - TRANSPORT_PHASE - VALENCE - VOLUME_DIFFUSION Standard block: None Keyword documentation: https://ogs5-keywords.netlify.com/ogs/wiki/public/doc-auto/by_ext/mcp Reading routines: https://github.com/ufz/ogs5/blob/master/FEM/rfmat_cp.cpp#L269 See Also -------- add_block """ MKEYS = ["COMPONENT_PROPERTIES"] # sorted SKEYS = [ [ "NAME", "FORMULA", "MOBILE", "TRANSPORT_PHASE", "FLUID_PHASE", "MOL_MASS", "CRITICAL_PRESSURE", "CRITICAL_TEMPERATURE", "ACENTRIC_FACTOR", "FLUID_ID", "MOLAR_VOLUME", "VOLUME_DIFFUSION", "MINERAL_DENSITY", "DIFFUSION", "DECAY", "ISOTHERM", "BUBBLE_VELOCITY", "MOLAR_DENSITY", "MOLAR_WEIGHT", "MAXIMUM_AQUEOUS_SOLUBILITY", "OutputMassOfComponentInModel", "VALENCE", "A_ZERO", "CRITICAL_VOLUME", # really? "CRITICAL_DENSITY", # really? "COMP_CAPACITY", # really? "COMP_CONDUCTIVITY", # really? "SOLUTE", # really? "MOLECULAR_WEIGHT", # really? ] ] STD = {} def __init__(self, **OGS_Config): super().__init__(**OGS_Config) self.file_ext = ".mcp"
25.415094
78
0.485523
2,564
0.951745
0
0
0
0
0
0
1,980
0.734967
d9e4389915d24f650fdb65abd21f0125dba8ab5c
175,651
py
Python
keystone/tests/unit/test_v3_assignment.py
crowdy/keystone
1e7ecca881a51144d61ae8026e1a77d6669997e2
[ "Apache-2.0" ]
null
null
null
keystone/tests/unit/test_v3_assignment.py
crowdy/keystone
1e7ecca881a51144d61ae8026e1a77d6669997e2
[ "Apache-2.0" ]
4
2021-10-05T12:01:08.000Z
2021-10-10T07:06:33.000Z
keystone/tests/unit/test_v3_assignment.py
crowdy/keystone
1e7ecca881a51144d61ae8026e1a77d6669997e2
[ "Apache-2.0" ]
null
null
null
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import random import uuid import freezegun import http.client from testtools import matchers from keystone.common import provider_api import keystone.conf from keystone import exception from keystone.resource.backends import base as resource_base from keystone.tests import unit from keystone.tests.unit import test_v3 CONF = keystone.conf.CONF PROVIDERS = provider_api.ProviderAPIs class SystemRoleAssignmentMixin(object): def _create_new_role(self): """Create a role available for use anywhere and return the ID.""" ref = unit.new_role_ref() response = self.post('/roles', body={'role': ref}) # We only really need the role ID, so omit the rest of the response and # return the ID of the role we just created. return response.json_body['role']['id'] def _create_group(self): body = { 'group': { 'domain_id': self.domain_id, 'name': uuid.uuid4().hex } } response = self.post('/groups/', body=body) return response.json_body['group'] def _create_user(self): body = { 'user': { 'domain_id': self.domain_id, 'name': uuid.uuid4().hex } } response = self.post('/users/', body=body) return response.json_body['user'] class AssignmentTestCase(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, SystemRoleAssignmentMixin): """Test roles and role assignments.""" def setUp(self): super(AssignmentTestCase, self).setUp() self.group = unit.new_group_ref(domain_id=self.domain_id) self.group = PROVIDERS.identity_api.create_group(self.group) self.group_id = self.group['id'] # Role CRUD tests def test_create_role(self): """Call ``POST /roles``.""" ref = unit.new_role_ref() r = self.post( '/roles', body={'role': ref}) return self.assertValidRoleResponse(r, ref) def test_create_role_bad_request(self): """Call ``POST /roles``.""" self.post('/roles', body={'role': {}}, expected_status=http.client.BAD_REQUEST) def test_list_head_roles(self): """Call ``GET & HEAD /roles``.""" resource_url = '/roles' r = self.get(resource_url) self.assertValidRoleListResponse(r, ref=self.role, resource_url=resource_url) self.head(resource_url, expected_status=http.client.OK) def test_get_head_role(self): """Call ``GET & HEAD /roles/{role_id}``.""" resource_url = '/roles/%(role_id)s' % { 'role_id': self.role_id} r = self.get(resource_url) self.assertValidRoleResponse(r, self.role) self.head(resource_url, expected_status=http.client.OK) def test_update_role(self): """Call ``PATCH /roles/{role_id}``.""" ref = unit.new_role_ref() del ref['id'] r = self.patch('/roles/%(role_id)s' % { 'role_id': self.role_id}, body={'role': ref}) self.assertValidRoleResponse(r, ref) def test_delete_role(self): """Call ``DELETE /roles/{role_id}``.""" self.delete('/roles/%(role_id)s' % { 'role_id': self.role_id}) # Role Grants tests def test_crud_user_project_role_grants(self): role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) collection_url = ( '/projects/%(project_id)s/users/%(user_id)s/roles' % { 'project_id': self.project['id'], 'user_id': self.user['id']}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': role['id']} # There is a role assignment for self.user on self.project r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=self.role, expected_length=1) self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=role, resource_url=collection_url, expected_length=2) self.head(collection_url, expected_status=http.client.OK) self.delete(member_url) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=self.role, expected_length=1) self.assertIn(collection_url, r.result['links']['self']) self.head(collection_url, expected_status=http.client.OK) def test_crud_user_project_role_grants_no_user(self): """Grant role on a project to a user that doesn't exist. When grant a role on a project to a user that doesn't exist, the server returns Not Found for the user. """ user_id = uuid.uuid4().hex collection_url = ( '/projects/%(project_id)s/users/%(user_id)s/roles' % { 'project_id': self.project['id'], 'user_id': user_id}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} self.put(member_url, expected_status=http.client.NOT_FOUND) self.head(member_url, expected_status=http.client.NOT_FOUND) self.get(member_url, expected_status=http.client.NOT_FOUND) def test_crud_user_domain_role_grants(self): time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: collection_url = ( '/domains/%(domain_id)s/users/%(user_id)s/roles' % { 'domain_id': self.domain_id, 'user_id': self.user['id']}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=self.role, resource_url=collection_url) self.head(collection_url, expected_status=http.client.OK) self.delete(member_url) # NOTE(lbragstad): Make sure we wait a second before we ask for the # roles. This ensures the token we use isn't considered revoked # because it was issued within the same second as a revocation # event. frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) r = self.get(collection_url) self.assertValidRoleListResponse(r, expected_length=0, resource_url=collection_url) self.head(collection_url, expected_status=http.client.OK) def test_crud_user_domain_role_grants_no_user(self): """Grant role on a domain to a user that doesn't exist. When grant a role on a domain to a user that doesn't exist, the server returns 404 Not Found for the user. """ user_id = uuid.uuid4().hex collection_url = ( '/domains/%(domain_id)s/users/%(user_id)s/roles' % { 'domain_id': self.domain_id, 'user_id': user_id}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} self.put(member_url, expected_status=http.client.NOT_FOUND) self.head(member_url, expected_status=http.client.NOT_FOUND) self.get(member_url, expected_status=http.client.NOT_FOUND) def test_crud_group_project_role_grants(self): time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: collection_url = ( '/projects/%(project_id)s/groups/%(group_id)s/roles' % { 'project_id': self.project_id, 'group_id': self.group_id}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=self.role, resource_url=collection_url) self.head(collection_url, expected_status=http.client.OK) self.delete(member_url) # NOTE(lbragstad): Make sure we wait a second before we ask for the # roles. This ensures the token we use isn't considered revoked # because it was issued within the same second as a revocation # event. frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) r = self.get(collection_url) self.assertValidRoleListResponse(r, expected_length=0, resource_url=collection_url) self.head(collection_url, expected_status=http.client.OK) def test_crud_group_project_role_grants_no_group(self): """Grant role on a project to a group that doesn't exist. When grant a role on a project to a group that doesn't exist, the server returns 404 Not Found for the group. """ group_id = uuid.uuid4().hex collection_url = ( '/projects/%(project_id)s/groups/%(group_id)s/roles' % { 'project_id': self.project_id, 'group_id': group_id}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} self.put(member_url, expected_status=http.client.NOT_FOUND) self.head(member_url, expected_status=http.client.NOT_FOUND) self.get(member_url, expected_status=http.client.NOT_FOUND) def test_crud_group_domain_role_grants(self): time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: collection_url = ( '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { 'domain_id': self.domain_id, 'group_id': self.group_id}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=self.role, resource_url=collection_url) self.head(collection_url, expected_status=http.client.OK) self.delete(member_url) # NOTE(lbragstad): Make sure we wait a second before we ask for the # roles. This ensures the token we use isn't considered revoked # because it was issued within the same second as a revocation # event. frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) r = self.get(collection_url) self.assertValidRoleListResponse(r, expected_length=0, resource_url=collection_url) self.head(collection_url, expected_status=http.client.OK) def test_crud_group_domain_role_grants_no_group(self): """Grant role on a domain to a group that doesn't exist. When grant a role on a domain to a group that doesn't exist, the server returns 404 Not Found for the group. """ group_id = uuid.uuid4().hex collection_url = ( '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { 'domain_id': self.domain_id, 'group_id': group_id}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} self.put(member_url, expected_status=http.client.NOT_FOUND) self.head(member_url, expected_status=http.client.NOT_FOUND) self.get(member_url, expected_status=http.client.NOT_FOUND) def _create_new_user_and_assign_role_on_project(self): """Create a new user and assign user a role on a project.""" # Create a new user new_user = unit.new_user_ref(domain_id=self.domain_id) user_ref = PROVIDERS.identity_api.create_user(new_user) # Assign the user a role on the project collection_url = ( '/projects/%(project_id)s/users/%(user_id)s/roles' % { 'project_id': self.project_id, 'user_id': user_ref['id']}) member_url = ('%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id}) self.put(member_url) # Check the user has the role assigned self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) return member_url, user_ref def test_delete_user_before_removing_role_assignment_succeeds(self): """Call ``DELETE`` on the user before the role assignment.""" member_url, user = self._create_new_user_and_assign_role_on_project() # Delete the user from identity backend PROVIDERS.identity_api.driver.delete_user(user['id']) # Clean up the role assignment self.delete(member_url) # Make sure the role is gone self.head(member_url, expected_status=http.client.NOT_FOUND) def test_delete_group_before_removing_role_assignment_succeeds(self): # Disable the cache so that we perform a fresh check of the identity # backend when attempting to remove the role assignment. self.config_fixture.config(group='cache', enabled=False) # Create a new group group = unit.new_group_ref(domain_id=self.domain_id) group_ref = PROVIDERS.identity_api.create_group(group) # Assign the user a role on the project collection_url = ( '/projects/%(project_id)s/groups/%(group_id)s/roles' % { 'project_id': self.project_id, 'group_id': group_ref['id']}) member_url = ('%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id}) self.put(member_url) # Check the user has the role assigned self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) # Simulate removing the group via LDAP by directly removing it from the # identity backend. PROVIDERS.identity_api.driver.delete_group(group_ref['id']) # Ensure we can clean up the role assignment even though the group # doesn't exist self.delete(member_url) def test_delete_user_before_removing_system_assignments_succeeds(self): system_role = self._create_new_role() user = self._create_user() path = ( '/system/users/%(user_id)s/roles/%(role_id)s' % {'user_id': user['id'], 'role_id': system_role} ) self.put(path) response = self.get('/role_assignments') number_of_assignments = len(response.json_body['role_assignments']) path = '/users/%(user_id)s' % {'user_id': user['id']} self.delete(path) # The user with the system role assignment is a new user and only has # one role on the system. We should expect one less role assignment in # the list. response = self.get('/role_assignments') self.assertValidRoleAssignmentListResponse( response, expected_length=number_of_assignments - 1 ) def test_delete_user_and_check_role_assignment_fails(self): """Call ``DELETE`` on the user and check the role assignment.""" member_url, user = self._create_new_user_and_assign_role_on_project() # Delete the user from identity backend PROVIDERS.identity_api.delete_user(user['id']) # We should get a 404 Not Found when looking for the user in the # identity backend because we're not performing a delete operation on # the role. self.head(member_url, expected_status=http.client.NOT_FOUND) def test_token_revoked_once_group_role_grant_revoked(self): """Test token invalid when direct & indirect role on user is revoked. When a role granted to a group is revoked for a given scope, and user direct role is revoked, then tokens created by user will be invalid. """ time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: # creates grant from group on project. PROVIDERS.assignment_api.create_grant( role_id=self.role['id'], project_id=self.project['id'], group_id=self.group['id'] ) # adds user to the group. PROVIDERS.identity_api.add_user_to_group( user_id=self.user['id'], group_id=self.group['id'] ) # creates a token for the user auth_body = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], project_id=self.project['id']) token_resp = self.post('/auth/tokens', body=auth_body) token = token_resp.headers.get('x-subject-token') # validates the returned token; it should be valid. self.head('/auth/tokens', headers={'x-subject-token': token}, expected_status=http.client.OK) frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) # revokes the grant from group on project. PROVIDERS.assignment_api.delete_grant( role_id=self.role['id'], project_id=self.project['id'], group_id=self.group['id']) # revokes the direct role form user on project PROVIDERS.assignment_api.delete_grant( role_id=self.role['id'], project_id=self.project['id'], user_id=self.user['id'] ) frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) # validates the same token again; it should not longer be valid. self.head('/auth/tokens', token=token, expected_status=http.client.UNAUTHORIZED) def test_delete_group_before_removing_system_assignments_succeeds(self): system_role = self._create_new_role() group = self._create_group() path = ( '/system/groups/%(group_id)s/roles/%(role_id)s' % {'group_id': group['id'], 'role_id': system_role} ) self.put(path) response = self.get('/role_assignments') number_of_assignments = len(response.json_body['role_assignments']) path = '/groups/%(group_id)s' % {'group_id': group['id']} self.delete(path) # The group with the system role assignment is a new group and only has # one role on the system. We should expect one less role assignment in # the list. response = self.get('/role_assignments') self.assertValidRoleAssignmentListResponse( response, expected_length=number_of_assignments - 1 ) @unit.skip_if_cache_disabled('assignment') def test_delete_grant_from_user_and_project_invalidate_cache(self): # create a new project new_project = unit.new_project_ref(domain_id=self.domain_id) PROVIDERS.resource_api.create_project(new_project['id'], new_project) collection_url = ( '/projects/%(project_id)s/users/%(user_id)s/roles' % { 'project_id': new_project['id'], 'user_id': self.user['id']}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} # create the user a grant on the new project self.put(member_url) # check the grant that was just created self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) resp = self.get(collection_url) self.assertValidRoleListResponse(resp, ref=self.role, resource_url=collection_url) # delete the grant self.delete(member_url) # get the collection and ensure there are no roles on the project resp = self.get(collection_url) self.assertListEqual(resp.json_body['roles'], []) @unit.skip_if_cache_disabled('assignment') def test_delete_grant_from_user_and_domain_invalidates_cache(self): # create a new domain new_domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(new_domain['id'], new_domain) collection_url = ( '/domains/%(domain_id)s/users/%(user_id)s/roles' % { 'domain_id': new_domain['id'], 'user_id': self.user['id']}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} # create the user a grant on the new domain self.put(member_url) # check the grant that was just created self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) resp = self.get(collection_url) self.assertValidRoleListResponse(resp, ref=self.role, resource_url=collection_url) # delete the grant self.delete(member_url) # get the collection and ensure there are no roles on the domain resp = self.get(collection_url) self.assertListEqual(resp.json_body['roles'], []) @unit.skip_if_cache_disabled('assignment') def test_delete_grant_from_group_and_project_invalidates_cache(self): # create a new project new_project = unit.new_project_ref(domain_id=self.domain_id) PROVIDERS.resource_api.create_project(new_project['id'], new_project) collection_url = ( '/projects/%(project_id)s/groups/%(group_id)s/roles' % { 'project_id': new_project['id'], 'group_id': self.group['id']}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} # create the group a grant on the new project self.put(member_url) # check the grant that was just created self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) resp = self.get(collection_url) self.assertValidRoleListResponse(resp, ref=self.role, resource_url=collection_url) # delete the grant self.delete(member_url) # get the collection and ensure there are no roles on the project resp = self.get(collection_url) self.assertListEqual(resp.json_body['roles'], []) @unit.skip_if_cache_disabled('assignment') def test_delete_grant_from_group_and_domain_invalidates_cache(self): # create a new domain new_domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(new_domain['id'], new_domain) collection_url = ( '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { 'domain_id': new_domain['id'], 'group_id': self.group['id']}) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, 'role_id': self.role_id} # create the group a grant on the new domain self.put(member_url) # check the grant that was just created self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) resp = self.get(collection_url) self.assertValidRoleListResponse(resp, ref=self.role, resource_url=collection_url) # delete the grant self.delete(member_url) # get the collection and ensure there are no roles on the domain resp = self.get(collection_url) self.assertListEqual(resp.json_body['roles'], []) # Role Assignments tests def test_get_head_role_assignments(self): """Call ``GET & HEAD /role_assignments``. The sample data set up already has a user, group and project that is part of self.domain. We use these plus a new user we create as our data set, making sure we ignore any role assignments that are already in existence. Since we don't yet support a first class entity for role assignments, we are only testing the LIST API. To create and delete the role assignments we use the old grant APIs. Test Plan: - Create extra user for tests - Get a list of all existing role assignments - Add a new assignment for each of the four combinations, i.e. group+domain, user+domain, group+project, user+project, using the same role each time - Get a new list of all role assignments, checking these four new ones have been added - Then delete the four we added - Get a new list of all role assignments, checking the four have been removed """ time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: # Since the default fixtures already assign some roles to the # user it creates, we also need a new user that will not have any # existing assignments user1 = unit.new_user_ref(domain_id=self.domain['id']) user1 = PROVIDERS.identity_api.create_user(user1) role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) collection_url = '/role_assignments' r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, resource_url=collection_url) self.head(collection_url, expected_status=http.client.OK) existing_assignments = len(r.result.get('role_assignments')) # Now add one of each of the four types of assignment, making sure # that we get them all back. gd_entity = self.build_role_assignment_entity( domain_id=self.domain_id, group_id=self.group_id, role_id=role['id']) self.put(gd_entity['links']['assignment']) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 1, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, gd_entity) self.head(collection_url, expected_status=http.client.OK) ud_entity = self.build_role_assignment_entity( domain_id=self.domain_id, user_id=user1['id'], role_id=role['id']) self.put(ud_entity['links']['assignment']) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 2, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, ud_entity) self.head(collection_url, expected_status=http.client.OK) gp_entity = self.build_role_assignment_entity( project_id=self.project_id, group_id=self.group_id, role_id=role['id']) self.put(gp_entity['links']['assignment']) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 3, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, gp_entity) self.head(collection_url, expected_status=http.client.OK) up_entity = self.build_role_assignment_entity( project_id=self.project_id, user_id=user1['id'], role_id=role['id']) self.put(up_entity['links']['assignment']) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 4, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, up_entity) self.head(collection_url, expected_status=http.client.OK) # Now delete the four we added and make sure they are removed # from the collection. self.delete(gd_entity['links']['assignment']) self.delete(ud_entity['links']['assignment']) self.delete(gp_entity['links']['assignment']) self.delete(up_entity['links']['assignment']) frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments, resource_url=collection_url) self.assertRoleAssignmentNotInListResponse(r, gd_entity) self.assertRoleAssignmentNotInListResponse(r, ud_entity) self.assertRoleAssignmentNotInListResponse(r, gp_entity) self.assertRoleAssignmentNotInListResponse(r, up_entity) self.head(collection_url, expected_status=http.client.OK) def test_get_effective_role_assignments(self): """Call ``GET /role_assignments?effective``. Test Plan: - Create two extra user for tests - Add these users to a group - Add a role assignment for the group on a domain - Get a list of all role assignments, checking one has been added - Then get a list of all effective role assignments - the group assignment should have turned into assignments on the domain for each of the group members. """ user1 = unit.create_user(PROVIDERS.identity_api, domain_id=self.domain['id']) user2 = unit.create_user(PROVIDERS.identity_api, domain_id=self.domain['id']) PROVIDERS.identity_api.add_user_to_group(user1['id'], self.group['id']) PROVIDERS.identity_api.add_user_to_group(user2['id'], self.group['id']) collection_url = '/role_assignments' r = self.get(collection_url) self.assertValidRoleAssignmentListResponse(r, resource_url=collection_url) existing_assignments = len(r.result.get('role_assignments')) gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id, group_id=self.group_id, role_id=self.role_id) self.put(gd_entity['links']['assignment']) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 1, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, gd_entity) # Now re-read the collection asking for effective roles - this # should mean the group assignment is translated into the two # member user assignments collection_url = '/role_assignments?effective' r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 2, resource_url=collection_url) ud_entity = self.build_role_assignment_entity( link=gd_entity['links']['assignment'], domain_id=self.domain_id, user_id=user1['id'], role_id=self.role_id) self.assertRoleAssignmentInListResponse(r, ud_entity) ud_entity = self.build_role_assignment_entity( link=gd_entity['links']['assignment'], domain_id=self.domain_id, user_id=user2['id'], role_id=self.role_id) self.assertRoleAssignmentInListResponse(r, ud_entity) def test_check_effective_values_for_role_assignments(self): """Call ``GET & HEAD /role_assignments?effective=value``. Check the various ways of specifying the 'effective' query parameter. If the 'effective' query parameter is included then this should always be treated as meaning 'True' unless it is specified as: {url}?effective=0 This is by design to match the agreed way of handling policy checking on query/filter parameters. Test Plan: - Create two extra user for tests - Add these users to a group - Add a role assignment for the group on a domain - Get a list of all role assignments, checking one has been added - Then issue various request with different ways of defining the 'effective' query parameter. As we have tested the correctness of the data coming back when we get effective roles in other tests, here we just use the count of entities to know if we are getting effective roles or not """ user1 = unit.create_user(PROVIDERS.identity_api, domain_id=self.domain['id']) user2 = unit.create_user(PROVIDERS.identity_api, domain_id=self.domain['id']) PROVIDERS.identity_api.add_user_to_group(user1['id'], self.group['id']) PROVIDERS.identity_api.add_user_to_group(user2['id'], self.group['id']) collection_url = '/role_assignments' r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse(r, resource_url=collection_url) existing_assignments = len(r.result.get('role_assignments')) gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id, group_id=self.group_id, role_id=self.role_id) self.put(gd_entity['links']['assignment']) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 1, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, gd_entity) # Now re-read the collection asking for effective roles, # using the most common way of defining "effective'. This # should mean the group assignment is translated into the two # member user assignments collection_url = '/role_assignments?effective' r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 2, resource_url=collection_url) # Now set 'effective' to false explicitly - should get # back the regular roles collection_url = '/role_assignments?effective=0' r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 1, resource_url=collection_url) # Now try setting 'effective' to 'False' explicitly- this is # NOT supported as a way of setting a query or filter # parameter to false by design. Hence we should get back # effective roles. collection_url = '/role_assignments?effective=False' r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 2, resource_url=collection_url) # Now set 'effective' to True explicitly collection_url = '/role_assignments?effective=True' r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 2, resource_url=collection_url) def test_filtered_role_assignments(self): """Call ``GET /role_assignments?filters``. Test Plan: - Create extra users, group, role and project for tests - Make the following assignments: Give group1, role1 on project1 and domain Give user1, role2 on project1 and domain Make User1 a member of Group1 - Test a series of single filter list calls, checking that the correct results are obtained - Test a multi-filtered list call - Test listing all effective roles for a given user - Test the equivalent of the list of roles in a project scoped token (all effective roles for a user on a project) """ # Since the default fixtures already assign some roles to the # user it creates, we also need a new user that will not have any # existing assignments user1 = unit.create_user(PROVIDERS.identity_api, domain_id=self.domain['id']) user2 = unit.create_user(PROVIDERS.identity_api, domain_id=self.domain['id']) group1 = unit.new_group_ref(domain_id=self.domain['id']) group1 = PROVIDERS.identity_api.create_group(group1) PROVIDERS.identity_api.add_user_to_group(user1['id'], group1['id']) PROVIDERS.identity_api.add_user_to_group(user2['id'], group1['id']) project1 = unit.new_project_ref(domain_id=self.domain['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) self.role1 = unit.new_role_ref() PROVIDERS.role_api.create_role(self.role1['id'], self.role1) self.role2 = unit.new_role_ref() PROVIDERS.role_api.create_role(self.role2['id'], self.role2) # Now add one of each of the six types of assignment gd_entity = self.build_role_assignment_entity( domain_id=self.domain_id, group_id=group1['id'], role_id=self.role1['id']) self.put(gd_entity['links']['assignment']) ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id, user_id=user1['id'], role_id=self.role2['id']) self.put(ud_entity['links']['assignment']) gp_entity = self.build_role_assignment_entity( project_id=project1['id'], group_id=group1['id'], role_id=self.role1['id']) self.put(gp_entity['links']['assignment']) up_entity = self.build_role_assignment_entity( project_id=project1['id'], user_id=user1['id'], role_id=self.role2['id']) self.put(up_entity['links']['assignment']) gs_entity = self.build_role_assignment_entity( system='all', group_id=group1['id'], role_id=self.role1['id']) self.put(gs_entity['links']['assignment']) us_entity = self.build_role_assignment_entity( system='all', user_id=user1['id'], role_id=self.role2['id']) self.put(us_entity['links']['assignment']) us2_entity = self.build_role_assignment_entity( system='all', user_id=user2['id'], role_id=self.role2['id']) self.put(us2_entity['links']['assignment']) # Now list by various filters to make sure we get back the right ones collection_url = ('/role_assignments?scope.project.id=%s' % project1['id']) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse(r, expected_length=2, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, up_entity) self.assertRoleAssignmentInListResponse(r, gp_entity) collection_url = ('/role_assignments?scope.domain.id=%s' % self.domain['id']) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse(r, expected_length=2, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, ud_entity) self.assertRoleAssignmentInListResponse(r, gd_entity) collection_url = '/role_assignments?user.id=%s' % user1['id'] r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse(r, expected_length=3, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, up_entity) self.assertRoleAssignmentInListResponse(r, ud_entity) collection_url = '/role_assignments?group.id=%s' % group1['id'] r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse(r, expected_length=3, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, gd_entity) self.assertRoleAssignmentInListResponse(r, gp_entity) collection_url = '/role_assignments?role.id=%s' % self.role1['id'] r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse(r, expected_length=3, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, gd_entity) self.assertRoleAssignmentInListResponse(r, gp_entity) self.assertRoleAssignmentInListResponse(r, gs_entity) collection_url = '/role_assignments?role.id=%s' % self.role2['id'] r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse(r, expected_length=4, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, ud_entity) self.assertRoleAssignmentInListResponse(r, up_entity) self.assertRoleAssignmentInListResponse(r, us_entity) # Let's try combining two filers together.... collection_url = ( '/role_assignments?user.id=%(user_id)s' '&scope.project.id=%(project_id)s' % { 'user_id': user1['id'], 'project_id': project1['id']}) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse(r, expected_length=1, resource_url=collection_url) self.assertRoleAssignmentInListResponse(r, up_entity) # Now for a harder one - filter for user with effective # roles - this should return role assignment that were directly # assigned as well as by virtue of group membership collection_url = ('/role_assignments?effective&user.id=%s' % user1['id']) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse(r, expected_length=4, resource_url=collection_url) # Should have the two direct roles... self.assertRoleAssignmentInListResponse(r, up_entity) self.assertRoleAssignmentInListResponse(r, ud_entity) # ...and the two via group membership... gp1_link = self.build_role_assignment_link( project_id=project1['id'], group_id=group1['id'], role_id=self.role1['id']) gd1_link = self.build_role_assignment_link(domain_id=self.domain_id, group_id=group1['id'], role_id=self.role1['id']) up1_entity = self.build_role_assignment_entity( link=gp1_link, project_id=project1['id'], user_id=user1['id'], role_id=self.role1['id']) ud1_entity = self.build_role_assignment_entity( link=gd1_link, domain_id=self.domain_id, user_id=user1['id'], role_id=self.role1['id']) self.assertRoleAssignmentInListResponse(r, up1_entity) self.assertRoleAssignmentInListResponse(r, ud1_entity) # ...and for the grand-daddy of them all, simulate the request # that would generate the list of effective roles in a project # scoped token. collection_url = ( '/role_assignments?effective&user.id=%(user_id)s' '&scope.project.id=%(project_id)s' % { 'user_id': user1['id'], 'project_id': project1['id']}) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse(r, expected_length=2, resource_url=collection_url) # Should have one direct role and one from group membership... self.assertRoleAssignmentInListResponse(r, up_entity) self.assertRoleAssignmentInListResponse(r, up1_entity) def test_list_system_role_assignments(self): # create a bunch of roles user_system_role_id = self._create_new_role() user_domain_role_id = self._create_new_role() user_project_role_id = self._create_new_role() group_system_role_id = self._create_new_role() group_domain_role_id = self._create_new_role() group_project_role_id = self._create_new_role() # create a user and grant the user a role on the system, domain, and # project user = self._create_user() url = '/system/users/%s/roles/%s' % (user['id'], user_system_role_id) self.put(url) url = '/domains/%s/users/%s/roles/%s' % ( self.domain_id, user['id'], user_domain_role_id ) self.put(url) url = '/projects/%s/users/%s/roles/%s' % ( self.project_id, user['id'], user_project_role_id ) self.put(url) # create a group and grant the group a role on the system, domain, and # project group = self._create_group() url = '/system/groups/%s/roles/%s' % ( group['id'], group_system_role_id ) self.put(url) url = '/domains/%s/groups/%s/roles/%s' % ( self.domain_id, group['id'], group_domain_role_id ) self.put(url) url = '/projects/%s/groups/%s/roles/%s' % ( self.project_id, group['id'], group_project_role_id ) self.put(url) # /v3/role_assignments?scope.system=all should return two assignments response = self.get('/role_assignments?scope.system=all') self.assertValidRoleAssignmentListResponse(response, expected_length=2) for assignment in response.json_body['role_assignments']: self.assertTrue(assignment['scope']['system']['all']) if assignment.get('user'): self.assertEqual(user_system_role_id, assignment['role']['id']) if assignment.get('group'): self.assertEqual( group_system_role_id, assignment['role']['id'] ) # /v3/role_assignments?scope_system=all&user.id=$USER_ID should return # one role assignment url = '/role_assignments?scope.system=all&user.id=%s' % user['id'] response = self.get(url) self.assertValidRoleAssignmentListResponse(response, expected_length=1) self.assertEqual( user_system_role_id, response.json_body['role_assignments'][0]['role']['id'] ) # /v3/role_assignments?scope_system=all&group.id=$GROUP_ID should # return one role assignment url = '/role_assignments?scope.system=all&group.id=%s' % group['id'] response = self.get(url) self.assertValidRoleAssignmentListResponse(response, expected_length=1) self.assertEqual( group_system_role_id, response.json_body['role_assignments'][0]['role']['id'] ) # /v3/role_assignments?user.id=$USER_ID should return 3 assignments # and system should be in that list of assignments url = '/role_assignments?user.id=%s' % user['id'] response = self.get(url) self.assertValidRoleAssignmentListResponse(response, expected_length=3) for assignment in response.json_body['role_assignments']: if 'system' in assignment['scope']: self.assertEqual( user_system_role_id, assignment['role']['id'] ) if 'domain' in assignment['scope']: self.assertEqual( user_domain_role_id, assignment['role']['id'] ) if 'project' in assignment['scope']: self.assertEqual( user_project_role_id, assignment['role']['id'] ) # /v3/role_assignments?group.id=$GROUP_ID should return 3 assignments # and system should be in that list of assignments url = '/role_assignments?group.id=%s' % group['id'] response = self.get(url) self.assertValidRoleAssignmentListResponse(response, expected_length=3) for assignment in response.json_body['role_assignments']: if 'system' in assignment['scope']: self.assertEqual( group_system_role_id, assignment['role']['id'] ) if 'domain' in assignment['scope']: self.assertEqual( group_domain_role_id, assignment['role']['id'] ) if 'project' in assignment['scope']: self.assertEqual( group_project_role_id, assignment['role']['id'] ) class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin): """Base class for testing /v3/role_assignments API behavior.""" MAX_HIERARCHY_BREADTH = 3 MAX_HIERARCHY_DEPTH = CONF.max_project_tree_depth - 1 def load_sample_data(self): """Create sample data to be used on tests. Created data are i) a role and ii) a domain containing: a project hierarchy and 3 users within 3 groups. """ def create_project_hierarchy(parent_id, depth): """Create a random project hierarchy.""" if depth == 0: return breadth = random.randint(1, self.MAX_HIERARCHY_BREADTH) subprojects = [] for i in range(breadth): subprojects.append(unit.new_project_ref( domain_id=self.domain_id, parent_id=parent_id)) PROVIDERS.resource_api.create_project( subprojects[-1]['id'], subprojects[-1] ) new_parent = subprojects[random.randint(0, breadth - 1)] create_project_hierarchy(new_parent['id'], depth - 1) super(RoleAssignmentBaseTestCase, self).load_sample_data() # Create a domain self.domain = unit.new_domain_ref() self.domain_id = self.domain['id'] PROVIDERS.resource_api.create_domain(self.domain_id, self.domain) # Create a project hierarchy self.project = unit.new_project_ref(domain_id=self.domain_id) self.project_id = self.project['id'] PROVIDERS.resource_api.create_project(self.project_id, self.project) # Create a random project hierarchy create_project_hierarchy(self.project_id, random.randint(1, self.MAX_HIERARCHY_DEPTH)) # Create 3 users self.user_ids = [] for i in range(3): user = unit.new_user_ref(domain_id=self.domain_id) user = PROVIDERS.identity_api.create_user(user) self.user_ids.append(user['id']) # Create 3 groups self.group_ids = [] for i in range(3): group = unit.new_group_ref(domain_id=self.domain_id) group = PROVIDERS.identity_api.create_group(group) self.group_ids.append(group['id']) # Put 2 members on each group PROVIDERS.identity_api.add_user_to_group( user_id=self.user_ids[i], group_id=group['id'] ) PROVIDERS.identity_api.add_user_to_group( user_id=self.user_ids[i % 2], group_id=group['id'] ) PROVIDERS.assignment_api.create_grant( user_id=self.user_id, project_id=self.project_id, role_id=self.role_id ) # Create a role self.role = unit.new_role_ref() self.role_id = self.role['id'] PROVIDERS.role_api.create_role(self.role_id, self.role) # Set default user and group to be used on tests self.default_user_id = self.user_ids[0] self.default_group_id = self.group_ids[0] def get_role_assignments(self, expected_status=http.client.OK, **filters): """Return the result from querying role assignment API + queried URL. Calls GET /v3/role_assignments?<params> and returns its result, where <params> is the HTTP query parameters form of effective option plus filters, if provided. Queried URL is returned as well. :returns: a tuple containing the list role assignments API response and queried URL. """ query_url = self._get_role_assignments_query_url(**filters) response = self.get(query_url, expected_status=expected_status) return (response, query_url) def _get_role_assignments_query_url(self, **filters): """Return non-effective role assignments query URL from given filters. :param filters: query parameters are created with the provided filters on role assignments attributes. Valid filters are: role_id, domain_id, project_id, group_id, user_id and inherited_to_projects. :returns: role assignments query URL. """ return self.build_role_assignment_query_url(**filters) class RoleAssignmentFailureTestCase(RoleAssignmentBaseTestCase): """Class for testing invalid query params on /v3/role_assignments API. Querying domain and project, or user and group results in a HTTP 400 Bad Request, since a role assignment must contain only a single pair of (actor, target). In addition, since filtering on role assignments applies only to the final result, effective mode cannot be combined with i) group or ii) domain and inherited, because it would always result in an empty list. """ def test_get_role_assignments_by_domain_and_project(self): self.get_role_assignments(domain_id=self.domain_id, project_id=self.project_id, expected_status=http.client.BAD_REQUEST) def test_get_role_assignments_by_user_and_group(self): self.get_role_assignments(user_id=self.default_user_id, group_id=self.default_group_id, expected_status=http.client.BAD_REQUEST) def test_get_role_assignments_by_effective_and_inherited(self): self.get_role_assignments(domain_id=self.domain_id, effective=True, inherited_to_projects=True, expected_status=http.client.BAD_REQUEST) def test_get_role_assignments_by_effective_and_group(self): self.get_role_assignments(effective=True, group_id=self.default_group_id, expected_status=http.client.BAD_REQUEST) class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase): """Class for testing direct assignments on /v3/role_assignments API. Direct assignments on a domain or project have effect on them directly, instead of on their project hierarchy, i.e they are non-inherited. In addition, group direct assignments are not expanded to group's users. Tests on this class make assertions on the representation and API filtering of direct assignments. """ def _test_get_role_assignments(self, **filters): """Generic filtering test method. According to the provided filters, this method: - creates a new role assignment; - asserts that list role assignments API reponds correctly; - deletes the created role assignment. :param filters: filters to be considered when listing role assignments. Valid filters are: role_id, domain_id, project_id, group_id, user_id and inherited_to_projects. """ # Fills default assignment with provided filters test_assignment = self._set_default_assignment_attributes(**filters) # Create new role assignment for this test PROVIDERS.assignment_api.create_grant(**test_assignment) # Get expected role assignments expected_assignments = self._list_expected_role_assignments( **test_assignment) # Get role assignments from API response, query_url = self.get_role_assignments(**test_assignment) self.assertValidRoleAssignmentListResponse(response, resource_url=query_url) self.assertEqual(len(expected_assignments), len(response.result.get('role_assignments'))) # Assert that expected role assignments were returned by the API call for assignment in expected_assignments: self.assertRoleAssignmentInListResponse(response, assignment) # Delete created role assignment PROVIDERS.assignment_api.delete_grant(**test_assignment) def _set_default_assignment_attributes(self, **attribs): """Insert default values for missing attributes of role assignment. If no actor, target or role are provided, they will default to values from sample data. :param attribs: info from a role assignment entity. Valid attributes are: role_id, domain_id, project_id, group_id, user_id and inherited_to_projects. """ if not any(target in attribs for target in ('domain_id', 'projects_id')): attribs['project_id'] = self.project_id if not any(actor in attribs for actor in ('user_id', 'group_id')): attribs['user_id'] = self.default_user_id if 'role_id' not in attribs: attribs['role_id'] = self.role_id return attribs def _list_expected_role_assignments(self, **filters): """Given the filters, it returns expected direct role assignments. :param filters: filters that will be considered when listing role assignments. Valid filters are: role_id, domain_id, project_id, group_id, user_id and inherited_to_projects. :returns: the list of the expected role assignments. """ return [self.build_role_assignment_entity(**filters)] # Test cases below call the generic test method, providing different filter # combinations. Filters are provided as specified in the method name, after # 'by'. For example, test_get_role_assignments_by_project_user_and_role # calls the generic test method with project_id, user_id and role_id. def test_get_role_assignments_by_domain(self, **filters): self._test_get_role_assignments(domain_id=self.domain_id, **filters) def test_get_role_assignments_by_project(self, **filters): self._test_get_role_assignments(project_id=self.project_id, **filters) def test_get_role_assignments_by_user(self, **filters): self._test_get_role_assignments(user_id=self.default_user_id, **filters) def test_get_role_assignments_by_group(self, **filters): self._test_get_role_assignments(group_id=self.default_group_id, **filters) def test_get_role_assignments_by_role(self, **filters): self._test_get_role_assignments(role_id=self.role_id, **filters) def test_get_role_assignments_by_domain_and_user(self, **filters): self.test_get_role_assignments_by_domain(user_id=self.default_user_id, **filters) def test_get_role_assignments_by_domain_and_group(self, **filters): self.test_get_role_assignments_by_domain( group_id=self.default_group_id, **filters) def test_get_role_assignments_by_project_and_user(self, **filters): self.test_get_role_assignments_by_project(user_id=self.default_user_id, **filters) def test_get_role_assignments_by_project_and_group(self, **filters): self.test_get_role_assignments_by_project( group_id=self.default_group_id, **filters) def test_get_role_assignments_by_domain_user_and_role(self, **filters): self.test_get_role_assignments_by_domain_and_user(role_id=self.role_id, **filters) def test_get_role_assignments_by_domain_group_and_role(self, **filters): self.test_get_role_assignments_by_domain_and_group( role_id=self.role_id, **filters) def test_get_role_assignments_by_project_user_and_role(self, **filters): self.test_get_role_assignments_by_project_and_user( role_id=self.role_id, **filters) def test_get_role_assignments_by_project_group_and_role(self, **filters): self.test_get_role_assignments_by_project_and_group( role_id=self.role_id, **filters) class RoleAssignmentInheritedTestCase(RoleAssignmentDirectTestCase): """Class for testing inherited assignments on /v3/role_assignments API. Inherited assignments on a domain or project have no effect on them directly, but on the projects under them instead. Tests on this class do not make assertions on the effect of inherited assignments, but in their representation and API filtering. """ def _test_get_role_assignments(self, **filters): """Add inherited_to_project filter to expected entity in tests.""" super(RoleAssignmentInheritedTestCase, self)._test_get_role_assignments(inherited_to_projects=True, **filters) class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase): """Class for testing inheritance effects on /v3/role_assignments API. Inherited assignments on a domain or project have no effect on them directly, but on the projects under them instead. Tests on this class make assertions on the effect of inherited assignments and API filtering. """ def _get_role_assignments_query_url(self, **filters): """Return effective role assignments query URL from given filters. For test methods in this class, effetive will always be true. As in effective mode, inherited_to_projects, group_id, domain_id and project_id will always be desconsidered from provided filters. :param filters: query parameters are created with the provided filters. Valid filters are: role_id, domain_id, project_id, group_id, user_id and inherited_to_projects. :returns: role assignments query URL. """ query_filters = filters.copy() query_filters.pop('inherited_to_projects') query_filters.pop('group_id', None) query_filters.pop('domain_id', None) query_filters.pop('project_id', None) return self.build_role_assignment_query_url(effective=True, **query_filters) def _list_expected_role_assignments(self, **filters): """Given the filters, it returns expected direct role assignments. :param filters: filters that will be considered when listing role assignments. Valid filters are: role_id, domain_id, project_id, group_id, user_id and inherited_to_projects. :returns: the list of the expected role assignments. """ # Get assignment link, to be put on 'links': {'assignment': link} assignment_link = self.build_role_assignment_link(**filters) # Expand group membership user_ids = [None] if filters.get('group_id'): user_ids = [user['id'] for user in PROVIDERS.identity_api.list_users_in_group( filters['group_id'])] else: user_ids = [self.default_user_id] # Expand role inheritance project_ids = [None] if filters.get('domain_id'): project_ids = [project['id'] for project in PROVIDERS.resource_api.list_projects_in_domain( filters.pop('domain_id'))] else: project_ids = [project['id'] for project in PROVIDERS.resource_api.list_projects_in_subtree( self.project_id)] # Compute expected role assignments assignments = [] for project_id in project_ids: filters['project_id'] = project_id for user_id in user_ids: filters['user_id'] = user_id assignments.append(self.build_role_assignment_entity( link=assignment_link, **filters)) return assignments class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin): """Test inheritance crud and its effects.""" def test_get_token_from_inherited_user_domain_role_grants(self): # Create a new user to ensure that no grant is loaded from sample data user = unit.create_user( PROVIDERS.identity_api, domain_id=self.domain_id ) # Define domain and project authentication data domain_auth_data = self.build_authentication_request( user_id=user['id'], password=user['password'], domain_id=self.domain_id) project_auth_data = self.build_authentication_request( user_id=user['id'], password=user['password'], project_id=self.project_id) # Check the user cannot get a domain nor a project token self.v3_create_token(domain_auth_data, expected_status=http.client.UNAUTHORIZED) self.v3_create_token(project_auth_data, expected_status=http.client.UNAUTHORIZED) # Grant non-inherited role for user on domain non_inher_ud_link = self.build_role_assignment_link( domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id) self.put(non_inher_ud_link) # Check the user can get only a domain token self.v3_create_token(domain_auth_data) self.v3_create_token(project_auth_data, expected_status=http.client.UNAUTHORIZED) # Create inherited role inherited_role = unit.new_role_ref(name='inherited') PROVIDERS.role_api.create_role(inherited_role['id'], inherited_role) # Grant inherited role for user on domain inher_ud_link = self.build_role_assignment_link( domain_id=self.domain_id, user_id=user['id'], role_id=inherited_role['id'], inherited_to_projects=True) self.put(inher_ud_link) # Check the user can get both a domain and a project token self.v3_create_token(domain_auth_data) self.v3_create_token(project_auth_data) # Delete inherited grant self.delete(inher_ud_link) # Check the user can only get a domain token self.v3_create_token(domain_auth_data) self.v3_create_token(project_auth_data, expected_status=http.client.UNAUTHORIZED) # Delete non-inherited grant self.delete(non_inher_ud_link) # Check the user cannot get a domain token anymore self.v3_create_token(domain_auth_data, expected_status=http.client.UNAUTHORIZED) def test_get_token_from_inherited_group_domain_role_grants(self): # Create a new group and put a new user in it to # ensure that no grant is loaded from sample data user = unit.create_user( PROVIDERS.identity_api, domain_id=self.domain_id ) group = unit.new_group_ref(domain_id=self.domain['id']) group = PROVIDERS.identity_api.create_group(group) PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) # Define domain and project authentication data domain_auth_data = self.build_authentication_request( user_id=user['id'], password=user['password'], domain_id=self.domain_id) project_auth_data = self.build_authentication_request( user_id=user['id'], password=user['password'], project_id=self.project_id) # Check the user cannot get a domain nor a project token self.v3_create_token(domain_auth_data, expected_status=http.client.UNAUTHORIZED) self.v3_create_token(project_auth_data, expected_status=http.client.UNAUTHORIZED) # Grant non-inherited role for user on domain non_inher_gd_link = self.build_role_assignment_link( domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id) self.put(non_inher_gd_link) # Check the user can get only a domain token self.v3_create_token(domain_auth_data) self.v3_create_token(project_auth_data, expected_status=http.client.UNAUTHORIZED) # Create inherited role inherited_role = unit.new_role_ref(name='inherited') PROVIDERS.role_api.create_role(inherited_role['id'], inherited_role) # Grant inherited role for user on domain inher_gd_link = self.build_role_assignment_link( domain_id=self.domain_id, user_id=user['id'], role_id=inherited_role['id'], inherited_to_projects=True) self.put(inher_gd_link) # Check the user can get both a domain and a project token self.v3_create_token(domain_auth_data) self.v3_create_token(project_auth_data) # Delete inherited grant self.delete(inher_gd_link) # Check the user can only get a domain token self.v3_create_token(domain_auth_data) self.v3_create_token(project_auth_data, expected_status=http.client.UNAUTHORIZED) # Delete non-inherited grant self.delete(non_inher_gd_link) # Check the user cannot get a domain token anymore self.v3_create_token(domain_auth_data, expected_status=http.client.UNAUTHORIZED) def _test_crud_inherited_and_direct_assignment_on_target(self, target_url): time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: # Create a new role to avoid assignments loaded from sample data role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) # Define URLs direct_url = '%s/users/%s/roles/%s' % ( target_url, self.user_id, role['id']) inherited_url = ('/OS-INHERIT/%s/inherited_to_projects' % direct_url.lstrip('/')) # Create the direct assignment self.put(direct_url) # Check the direct assignment exists, but the inherited one does # not self.head(direct_url) self.head(inherited_url, expected_status=http.client.NOT_FOUND) # Now add the inherited assignment self.put(inherited_url) # Check both the direct and inherited assignment exist self.head(direct_url) self.head(inherited_url) # Delete indirect assignment self.delete(inherited_url) frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) # Check the direct assignment exists, but the inherited one does # not self.head(direct_url) self.head(inherited_url, expected_status=http.client.NOT_FOUND) # Now delete the inherited assignment self.delete(direct_url) # Check that none of them exist self.head(direct_url, expected_status=http.client.NOT_FOUND) self.head(inherited_url, expected_status=http.client.NOT_FOUND) def test_crud_inherited_and_direct_assignment_on_domains(self): self._test_crud_inherited_and_direct_assignment_on_target( '/domains/%s' % self.domain_id) def test_crud_inherited_and_direct_assignment_on_projects(self): self._test_crud_inherited_and_direct_assignment_on_target( '/projects/%s' % self.project_id) def test_crud_user_inherited_domain_role_grants(self): role_list = [] for _ in range(2): role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) role_list.append(role) # Create a non-inherited role as a spoiler PROVIDERS.assignment_api.create_grant( role_list[1]['id'], user_id=self.user['id'], domain_id=self.domain_id) base_collection_url = ( '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { 'domain_id': self.domain_id, 'user_id': self.user['id']}) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, 'role_id': role_list[0]['id']} collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) # Check we can read it back self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=role_list[0], resource_url=collection_url) # Now delete and check its gone self.delete(member_url) r = self.get(collection_url) self.assertValidRoleListResponse(r, expected_length=0, resource_url=collection_url) def test_list_role_assignments_for_inherited_domain_grants(self): """Call ``GET /role_assignments with inherited domain grants``. Test Plan: - Create 4 roles - Create a domain with a user and two projects - Assign two direct roles to project1 - Assign a spoiler role to project2 - Issue the URL to add inherited role to the domain - Issue the URL to check it is indeed on the domain - Issue the URL to check effective roles on project1 - this should return 3 roles. """ role_list = [] for _ in range(4): role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) role_list.append(role) domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) user1 = unit.create_user( PROVIDERS.identity_api, domain_id=domain['id'] ) project1 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) project2 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project2['id'], project2) # Add some roles to the project PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project1['id'], role_list[0]['id']) PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project1['id'], role_list[1]['id']) # ..and one on a different project as a spoiler PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project2['id'], role_list[2]['id']) # Now create our inherited role on the domain base_collection_url = ( '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { 'domain_id': domain['id'], 'user_id': user1['id']}) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, 'role_id': role_list[3]['id']} collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=role_list[3], resource_url=collection_url) # Now use the list domain role assignments api to check if this # is included collection_url = ( '/role_assignments?user.id=%(user_id)s' '&scope.domain.id=%(domain_id)s' % { 'user_id': user1['id'], 'domain_id': domain['id']}) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse(r, expected_length=1, resource_url=collection_url) ud_entity = self.build_role_assignment_entity( domain_id=domain['id'], user_id=user1['id'], role_id=role_list[3]['id'], inherited_to_projects=True) self.assertRoleAssignmentInListResponse(r, ud_entity) # Now ask for effective list role assignments - the role should # turn into a project role, along with the two direct roles that are # on the project collection_url = ( '/role_assignments?effective&user.id=%(user_id)s' '&scope.project.id=%(project_id)s' % { 'user_id': user1['id'], 'project_id': project1['id']}) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse(r, expected_length=3, resource_url=collection_url) # An effective role for an inherited role will be a project # entity, with a domain link to the inherited assignment ud_url = self.build_role_assignment_link( domain_id=domain['id'], user_id=user1['id'], role_id=role_list[3]['id'], inherited_to_projects=True) up_entity = self.build_role_assignment_entity( link=ud_url, project_id=project1['id'], user_id=user1['id'], role_id=role_list[3]['id'], inherited_to_projects=True) self.assertRoleAssignmentInListResponse(r, up_entity) def _test_list_role_assignments_include_names(self, role1): """Call ``GET /role_assignments with include names``. Test Plan: - Create a domain with a group and a user - Create a project with a group and a user """ role1 = unit.new_role_ref() PROVIDERS.role_api.create_role(role1['id'], role1) user1 = unit.create_user( PROVIDERS.identity_api, domain_id=self.domain_id ) group = unit.new_group_ref(domain_id=self.domain_id) group = PROVIDERS.identity_api.create_group(group) project1 = unit.new_project_ref(domain_id=self.domain_id) PROVIDERS.resource_api.create_project(project1['id'], project1) expected_entity1 = self.build_role_assignment_entity_include_names( role_ref=role1, project_ref=project1, user_ref=user1) self.put(expected_entity1['links']['assignment']) expected_entity2 = self.build_role_assignment_entity_include_names( role_ref=role1, domain_ref=self.domain, group_ref=group) self.put(expected_entity2['links']['assignment']) expected_entity3 = self.build_role_assignment_entity_include_names( role_ref=role1, domain_ref=self.domain, user_ref=user1) self.put(expected_entity3['links']['assignment']) expected_entity4 = self.build_role_assignment_entity_include_names( role_ref=role1, project_ref=project1, group_ref=group) self.put(expected_entity4['links']['assignment']) collection_url_domain = ( '/role_assignments?include_names&scope.domain.id=%(domain_id)s' % { 'domain_id': self.domain_id}) rs_domain = self.get(collection_url_domain) collection_url_project = ( '/role_assignments?include_names&' 'scope.project.id=%(project_id)s' % { 'project_id': project1['id']}) rs_project = self.get(collection_url_project) collection_url_group = ( '/role_assignments?include_names&group.id=%(group_id)s' % { 'group_id': group['id']}) rs_group = self.get(collection_url_group) collection_url_user = ( '/role_assignments?include_names&user.id=%(user_id)s' % { 'user_id': user1['id']}) rs_user = self.get(collection_url_user) collection_url_role = ( '/role_assignments?include_names&role.id=%(role_id)s' % { 'role_id': role1['id']}) rs_role = self.get(collection_url_role) # Make sure all entities were created successfully self.assertEqual(http.client.OK, rs_domain.status_int) self.assertEqual(http.client.OK, rs_project.status_int) self.assertEqual(http.client.OK, rs_group.status_int) self.assertEqual(http.client.OK, rs_user.status_int) # Make sure we can get back the correct number of entities self.assertValidRoleAssignmentListResponse( rs_domain, expected_length=2, resource_url=collection_url_domain) self.assertValidRoleAssignmentListResponse( rs_project, expected_length=2, resource_url=collection_url_project) self.assertValidRoleAssignmentListResponse( rs_group, expected_length=2, resource_url=collection_url_group) self.assertValidRoleAssignmentListResponse( rs_user, expected_length=2, resource_url=collection_url_user) self.assertValidRoleAssignmentListResponse( rs_role, expected_length=4, resource_url=collection_url_role) # Verify all types of entities have the correct format self.assertRoleAssignmentInListResponse(rs_domain, expected_entity2) self.assertRoleAssignmentInListResponse(rs_project, expected_entity1) self.assertRoleAssignmentInListResponse(rs_group, expected_entity4) self.assertRoleAssignmentInListResponse(rs_user, expected_entity3) self.assertRoleAssignmentInListResponse(rs_role, expected_entity1) def test_list_role_assignments_include_names_global_role(self): role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) self._test_list_role_assignments_include_names(role) def test_list_role_assignments_include_names_domain_role(self): role = unit.new_role_ref(domain_id=self.domain['id']) PROVIDERS.role_api.create_role(role['id'], role) self._test_list_role_assignments_include_names(role) def test_remove_assignment_for_project_acting_as_domain(self): """Test goal: remove assignment for project acting as domain. Ensure when we have two role assignments for the project acting as domain, one dealing with it as a domain and other as a project, we still able to remove those assignments later. Test plan: - Create a role and a domain with a user; - Grant a role for this user in this domain; - Grant a role for this user in the same entity as a project; - Ensure that both assignments were created and it was valid; - Remove the domain assignment for the user and show that the project assignment for him still valid """ role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) user = unit.create_user(PROVIDERS.identity_api, domain_id=domain['id']) assignment_domain = self.build_role_assignment_entity( role_id=role['id'], domain_id=domain['id'], user_id=user['id'], inherited_to_projects=False) assignment_project = self.build_role_assignment_entity( role_id=role['id'], project_id=domain['id'], user_id=user['id'], inherited_to_projects=False) self.put(assignment_domain['links']['assignment']) self.put(assignment_project['links']['assignment']) collection_url = '/role_assignments?user.id=%(user_id)s' % ( {'user_id': user['id']}) result = self.get(collection_url) # We have two role assignments based in both roles for the domain and # project scope self.assertValidRoleAssignmentListResponse( result, expected_length=2, resource_url=collection_url) self.assertRoleAssignmentInListResponse(result, assignment_domain) domain_url = '/domains/%s/users/%s/roles/%s' % ( domain['id'], user['id'], role['id']) self.delete(domain_url) collection_url = '/role_assignments?user.id=%(user_id)s' % ( {'user_id': user['id']}) result = self.get(collection_url) # Now we only have one assignment for the project scope since the # domain scope was removed. self.assertValidRoleAssignmentListResponse( result, expected_length=1, resource_url=collection_url) self.assertRoleAssignmentInListResponse(result, assignment_project) def test_list_inherited_role_assignments_include_names(self): """Call ``GET /role_assignments?include_names``. Test goal: ensure calling list role assignments including names honors the inherited role assignments flag. Test plan: - Create a role and a domain with a user; - Create a inherited role assignment; - List role assignments for that user; - List role assignments for that user including names. """ role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) user = unit.create_user(PROVIDERS.identity_api, domain_id=domain['id']) # Create and store expected assignment refs assignment = self.build_role_assignment_entity( role_id=role['id'], domain_id=domain['id'], user_id=user['id'], inherited_to_projects=True) assignment_names = self.build_role_assignment_entity_include_names( role_ref=role, domain_ref=domain, user_ref=user, inherited_assignment=True) # Ensure expected assignment refs are inherited and have the same URL self.assertEqual('projects', assignment['scope']['OS-INHERIT:inherited_to']) self.assertEqual('projects', assignment_names['scope']['OS-INHERIT:inherited_to']) self.assertEqual(assignment['links']['assignment'], assignment_names['links']['assignment']) self.put(assignment['links']['assignment']) collection_url = '/role_assignments?user.id=%(user_id)s' % ( {'user_id': user['id']}) result = self.get(collection_url) self.assertValidRoleAssignmentListResponse( result, expected_length=1, resource_url=collection_url) self.assertRoleAssignmentInListResponse(result, assignment) collection_url = ('/role_assignments?include_names&' 'user.id=%(user_id)s' % {'user_id': user['id']}) result = self.get(collection_url) self.assertValidRoleAssignmentListResponse( result, expected_length=1, resource_url=collection_url) self.assertRoleAssignmentInListResponse(result, assignment_names) def test_list_role_assignments_for_disabled_inheritance_extension(self): """Call ``GET /role_assignments with inherited domain grants``. Test Plan: - Issue the URL to add inherited role to the domain - Issue the URL to check effective roles on project include the inherited role - Disable the extension - Re-check the effective roles, proving the inherited role no longer shows up. """ role_list = [] for _ in range(4): role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) role_list.append(role) domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) user1 = unit.create_user( PROVIDERS.identity_api, domain_id=domain['id'] ) project1 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) project2 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project2['id'], project2) # Add some roles to the project PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project1['id'], role_list[0]['id']) PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project1['id'], role_list[1]['id']) # ..and one on a different project as a spoiler PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project2['id'], role_list[2]['id']) # Now create our inherited role on the domain base_collection_url = ( '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { 'domain_id': domain['id'], 'user_id': user1['id']}) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, 'role_id': role_list[3]['id']} collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=role_list[3], resource_url=collection_url) # Get effective list role assignments - the role should # turn into a project role, along with the two direct roles that are # on the project collection_url = ( '/role_assignments?effective&user.id=%(user_id)s' '&scope.project.id=%(project_id)s' % { 'user_id': user1['id'], 'project_id': project1['id']}) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse(r, expected_length=3, resource_url=collection_url) ud_url = self.build_role_assignment_link( domain_id=domain['id'], user_id=user1['id'], role_id=role_list[3]['id'], inherited_to_projects=True) up_entity = self.build_role_assignment_entity( link=ud_url, project_id=project1['id'], user_id=user1['id'], role_id=role_list[3]['id'], inherited_to_projects=True) self.assertRoleAssignmentInListResponse(r, up_entity) def test_list_role_assignments_for_inherited_group_domain_grants(self): """Call ``GET /role_assignments with inherited group domain grants``. Test Plan: - Create 4 roles - Create a domain with a user and two projects - Assign two direct roles to project1 - Assign a spoiler role to project2 - Issue the URL to add inherited role to the domain - Issue the URL to check it is indeed on the domain - Issue the URL to check effective roles on project1 - this should return 3 roles. """ role_list = [] for _ in range(4): role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) role_list.append(role) domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) user1 = unit.create_user( PROVIDERS.identity_api, domain_id=domain['id'] ) user2 = unit.create_user( PROVIDERS.identity_api, domain_id=domain['id'] ) group1 = unit.new_group_ref(domain_id=domain['id']) group1 = PROVIDERS.identity_api.create_group(group1) PROVIDERS.identity_api.add_user_to_group( user1['id'], group1['id'] ) PROVIDERS.identity_api.add_user_to_group( user2['id'], group1['id'] ) project1 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) project2 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project2['id'], project2) # Add some roles to the project PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project1['id'], role_list[0]['id']) PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project1['id'], role_list[1]['id']) # ..and one on a different project as a spoiler PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project2['id'], role_list[2]['id']) # Now create our inherited role on the domain base_collection_url = ( '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % { 'domain_id': domain['id'], 'group_id': group1['id']}) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, 'role_id': role_list[3]['id']} collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=role_list[3], resource_url=collection_url) # Now use the list domain role assignments api to check if this # is included collection_url = ( '/role_assignments?group.id=%(group_id)s' '&scope.domain.id=%(domain_id)s' % { 'group_id': group1['id'], 'domain_id': domain['id']}) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse(r, expected_length=1, resource_url=collection_url) gd_entity = self.build_role_assignment_entity( domain_id=domain['id'], group_id=group1['id'], role_id=role_list[3]['id'], inherited_to_projects=True) self.assertRoleAssignmentInListResponse(r, gd_entity) # Now ask for effective list role assignments - the role should # turn into a user project role, along with the two direct roles # that are on the project collection_url = ( '/role_assignments?effective&user.id=%(user_id)s' '&scope.project.id=%(project_id)s' % { 'user_id': user1['id'], 'project_id': project1['id']}) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse(r, expected_length=3, resource_url=collection_url) # An effective role for an inherited role will be a project # entity, with a domain link to the inherited assignment up_entity = self.build_role_assignment_entity( link=gd_entity['links']['assignment'], project_id=project1['id'], user_id=user1['id'], role_id=role_list[3]['id'], inherited_to_projects=True) self.assertRoleAssignmentInListResponse(r, up_entity) def test_filtered_role_assignments_for_inherited_grants(self): """Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``. Test Plan: - Create 5 roles - Create a domain with a user, group and two projects - Assign three direct spoiler roles to projects - Issue the URL to add an inherited user role to the domain - Issue the URL to add an inherited group role to the domain - Issue the URL to filter by inherited roles - this should return just the 2 inherited roles. """ role_list = [] for _ in range(5): role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) role_list.append(role) domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) user1 = unit.create_user( PROVIDERS.identity_api, domain_id=domain['id'] ) group1 = unit.new_group_ref(domain_id=domain['id']) group1 = PROVIDERS.identity_api.create_group(group1) project1 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) project2 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project2['id'], project2) # Add some spoiler roles to the projects PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project1['id'], role_list[0]['id']) PROVIDERS.assignment_api.add_role_to_user_and_project( user1['id'], project2['id'], role_list[1]['id']) # Create a non-inherited role as a spoiler PROVIDERS.assignment_api.create_grant( role_list[2]['id'], user_id=user1['id'], domain_id=domain['id']) # Now create two inherited roles on the domain, one for a user # and one for a domain base_collection_url = ( '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { 'domain_id': domain['id'], 'user_id': user1['id']}) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, 'role_id': role_list[3]['id']} collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=role_list[3], resource_url=collection_url) base_collection_url = ( '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % { 'domain_id': domain['id'], 'group_id': group1['id']}) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, 'role_id': role_list[4]['id']} collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) self.assertValidRoleListResponse(r, ref=role_list[4], resource_url=collection_url) # Now use the list role assignments api to get a list of inherited # roles on the domain - should get back the two roles collection_url = ( '/role_assignments?scope.OS-INHERIT:inherited_to=projects') r = self.get(collection_url) self.assertValidRoleAssignmentListResponse(r, expected_length=2, resource_url=collection_url) ud_entity = self.build_role_assignment_entity( domain_id=domain['id'], user_id=user1['id'], role_id=role_list[3]['id'], inherited_to_projects=True) gd_entity = self.build_role_assignment_entity( domain_id=domain['id'], group_id=group1['id'], role_id=role_list[4]['id'], inherited_to_projects=True) self.assertRoleAssignmentInListResponse(r, ud_entity) self.assertRoleAssignmentInListResponse(r, gd_entity) def _setup_hierarchical_projects_scenario(self): """Create basic hierarchical projects scenario. This basic scenario contains a root with one leaf project and two roles with the following names: non-inherited and inherited. """ # Create project hierarchy root = unit.new_project_ref(domain_id=self.domain['id']) leaf = unit.new_project_ref(domain_id=self.domain['id'], parent_id=root['id']) PROVIDERS.resource_api.create_project(root['id'], root) PROVIDERS.resource_api.create_project(leaf['id'], leaf) # Create 'non-inherited' and 'inherited' roles non_inherited_role = unit.new_role_ref(name='non-inherited') PROVIDERS.role_api.create_role( non_inherited_role['id'], non_inherited_role ) inherited_role = unit.new_role_ref(name='inherited') PROVIDERS.role_api.create_role(inherited_role['id'], inherited_role) return (root['id'], leaf['id'], non_inherited_role['id'], inherited_role['id']) def test_get_token_from_inherited_user_project_role_grants(self): # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( self._setup_hierarchical_projects_scenario()) # Define root and leaf projects authentication data root_project_auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], project_id=root_id) leaf_project_auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], project_id=leaf_id) # Check the user cannot get a token on root nor leaf project self.v3_create_token(root_project_auth_data, expected_status=http.client.UNAUTHORIZED) self.v3_create_token(leaf_project_auth_data, expected_status=http.client.UNAUTHORIZED) # Grant non-inherited role for user on leaf project non_inher_up_link = self.build_role_assignment_link( project_id=leaf_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.put(non_inher_up_link) # Check the user can only get a token on leaf project self.v3_create_token(root_project_auth_data, expected_status=http.client.UNAUTHORIZED) self.v3_create_token(leaf_project_auth_data) # Grant inherited role for user on root project inher_up_link = self.build_role_assignment_link( project_id=root_id, user_id=self.user['id'], role_id=inherited_role_id, inherited_to_projects=True) self.put(inher_up_link) # Check the user still can get a token only on leaf project self.v3_create_token(root_project_auth_data, expected_status=http.client.UNAUTHORIZED) self.v3_create_token(leaf_project_auth_data) # Delete non-inherited grant self.delete(non_inher_up_link) # Check the inherited role still applies for leaf project self.v3_create_token(root_project_auth_data, expected_status=http.client.UNAUTHORIZED) self.v3_create_token(leaf_project_auth_data) # Delete inherited grant self.delete(inher_up_link) # Check the user cannot get a token on leaf project anymore self.v3_create_token(leaf_project_auth_data, expected_status=http.client.UNAUTHORIZED) def test_get_token_from_inherited_group_project_role_grants(self): # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( self._setup_hierarchical_projects_scenario()) # Create group and add user to it group = unit.new_group_ref(domain_id=self.domain['id']) group = PROVIDERS.identity_api.create_group(group) PROVIDERS.identity_api.add_user_to_group(self.user['id'], group['id']) # Define root and leaf projects authentication data root_project_auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], project_id=root_id) leaf_project_auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], project_id=leaf_id) # Check the user cannot get a token on root nor leaf project self.v3_create_token(root_project_auth_data, expected_status=http.client.UNAUTHORIZED) self.v3_create_token(leaf_project_auth_data, expected_status=http.client.UNAUTHORIZED) # Grant non-inherited role for group on leaf project non_inher_gp_link = self.build_role_assignment_link( project_id=leaf_id, group_id=group['id'], role_id=non_inherited_role_id) self.put(non_inher_gp_link) # Check the user can only get a token on leaf project self.v3_create_token(root_project_auth_data, expected_status=http.client.UNAUTHORIZED) self.v3_create_token(leaf_project_auth_data) # Grant inherited role for group on root project inher_gp_link = self.build_role_assignment_link( project_id=root_id, group_id=group['id'], role_id=inherited_role_id, inherited_to_projects=True) self.put(inher_gp_link) # Check the user still can get a token only on leaf project self.v3_create_token(root_project_auth_data, expected_status=http.client.UNAUTHORIZED) self.v3_create_token(leaf_project_auth_data) # Delete no-inherited grant self.delete(non_inher_gp_link) # Check the inherited role still applies for leaf project self.v3_create_token(leaf_project_auth_data) # Delete inherited grant self.delete(inher_gp_link) # Check the user cannot get a token on leaf project anymore self.v3_create_token(leaf_project_auth_data, expected_status=http.client.UNAUTHORIZED) def test_get_role_assignments_for_project_hierarchy(self): """Call ``GET /role_assignments``. Test Plan: - Create 2 roles - Create a hierarchy of projects with one root and one leaf project - Issue the URL to add a non-inherited user role to the root project - Issue the URL to add an inherited user role to the root project - Issue the URL to get all role assignments - this should return just 2 roles (non-inherited and inherited) in the root project. """ # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( self._setup_hierarchical_projects_scenario()) # Grant non-inherited role non_inher_up_entity = self.build_role_assignment_entity( project_id=root_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.put(non_inher_up_entity['links']['assignment']) # Grant inherited role inher_up_entity = self.build_role_assignment_entity( project_id=root_id, user_id=self.user['id'], role_id=inherited_role_id, inherited_to_projects=True) self.put(inher_up_entity['links']['assignment']) # Get role assignments collection_url = '/role_assignments' r = self.get(collection_url) self.assertValidRoleAssignmentListResponse(r, resource_url=collection_url) # Assert that the user has non-inherited role on root project self.assertRoleAssignmentInListResponse(r, non_inher_up_entity) # Assert that the user has inherited role on root project self.assertRoleAssignmentInListResponse(r, inher_up_entity) # Assert that the user does not have non-inherited role on leaf project non_inher_up_entity = self.build_role_assignment_entity( project_id=leaf_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) # Assert that the user does not have inherited role on leaf project inher_up_entity['scope']['project']['id'] = leaf_id self.assertRoleAssignmentNotInListResponse(r, inher_up_entity) def test_get_effective_role_assignments_for_project_hierarchy(self): """Call ``GET /role_assignments?effective``. Test Plan: - Create 2 roles - Create a hierarchy of projects with one root and one leaf project - Issue the URL to add a non-inherited user role to the root project - Issue the URL to add an inherited user role to the root project - Issue the URL to get effective role assignments - this should return 1 role (non-inherited) on the root project and 1 role (inherited) on the leaf project. """ # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( self._setup_hierarchical_projects_scenario()) # Grant non-inherited role non_inher_up_entity = self.build_role_assignment_entity( project_id=root_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.put(non_inher_up_entity['links']['assignment']) # Grant inherited role inher_up_entity = self.build_role_assignment_entity( project_id=root_id, user_id=self.user['id'], role_id=inherited_role_id, inherited_to_projects=True) self.put(inher_up_entity['links']['assignment']) # Get effective role assignments collection_url = '/role_assignments?effective' r = self.get(collection_url) self.assertValidRoleAssignmentListResponse(r, resource_url=collection_url) # Assert that the user has non-inherited role on root project self.assertRoleAssignmentInListResponse(r, non_inher_up_entity) # Assert that the user does not have inherited role on root project self.assertRoleAssignmentNotInListResponse(r, inher_up_entity) # Assert that the user does not have non-inherited role on leaf project non_inher_up_entity = self.build_role_assignment_entity( project_id=leaf_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) # Assert that the user has inherited role on leaf project inher_up_entity['scope']['project']['id'] = leaf_id self.assertRoleAssignmentInListResponse(r, inher_up_entity) def test_project_id_specified_if_include_subtree_specified(self): """When using include_subtree, you must specify a project ID.""" r = self.get('/role_assignments?include_subtree=True', expected_status=http.client.BAD_REQUEST) error_msg = ("scope.project.id must be specified if include_subtree " "is also specified") self.assertEqual(error_msg, r.result['error']['message']) r = self.get('/role_assignments?scope.project.id&' 'include_subtree=True', expected_status=http.client.BAD_REQUEST) self.assertEqual(error_msg, r.result['error']['message']) def test_get_role_assignments_for_project_tree(self): """Get role_assignment?scope.project.id=X&include_subtree``. Test Plan: - Create 2 roles and a hierarchy of projects with one root and one leaf - Issue the URL to add a non-inherited user role to the root project and the leaf project - Issue the URL to get role assignments for the root project but not the subtree - this should return just the root assignment - Issue the URL to get role assignments for the root project and it's subtree - this should return both assignments - Check that explicitly setting include_subtree to False is the equivalent to not including it at all in the query. """ # Create default scenario root_id, leaf_id, non_inherited_role_id, unused_role_id = ( self._setup_hierarchical_projects_scenario()) # Grant non-inherited role to root and leaf projects non_inher_entity_root = self.build_role_assignment_entity( project_id=root_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.put(non_inher_entity_root['links']['assignment']) non_inher_entity_leaf = self.build_role_assignment_entity( project_id=leaf_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.put(non_inher_entity_leaf['links']['assignment']) # Without the subtree, we should get the one assignment on the # root project collection_url = ( '/role_assignments?scope.project.id=%(project)s' % { 'project': root_id}) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, resource_url=collection_url) self.assertThat(r.result['role_assignments'], matchers.HasLength(1)) self.assertRoleAssignmentInListResponse(r, non_inher_entity_root) # With the subtree, we should get both assignments collection_url = ( '/role_assignments?scope.project.id=%(project)s' '&include_subtree=True' % { 'project': root_id}) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, resource_url=collection_url) self.assertThat(r.result['role_assignments'], matchers.HasLength(2)) self.assertRoleAssignmentInListResponse(r, non_inher_entity_root) self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf) # With subtree=0, we should also only get the one assignment on the # root project collection_url = ( '/role_assignments?scope.project.id=%(project)s' '&include_subtree=0' % { 'project': root_id}) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, resource_url=collection_url) self.assertThat(r.result['role_assignments'], matchers.HasLength(1)) self.assertRoleAssignmentInListResponse(r, non_inher_entity_root) def test_get_effective_role_assignments_for_project_tree(self): """Get role_assignment ?project_id=X&include_subtree=True&effective``. Test Plan: - Create 2 roles and a hierarchy of projects with one root and 4 levels of child project - Issue the URL to add a non-inherited user role to the root project and a level 1 project - Issue the URL to add an inherited user role on the level 2 project - Issue the URL to get effective role assignments for the level 1 project and it's subtree - this should return a role (non-inherited) on the level 1 project and roles (inherited) on each of the level 2, 3 and 4 projects """ # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( self._setup_hierarchical_projects_scenario()) # Add some extra projects to the project hierarchy level2 = unit.new_project_ref(domain_id=self.domain['id'], parent_id=leaf_id) level3 = unit.new_project_ref(domain_id=self.domain['id'], parent_id=level2['id']) level4 = unit.new_project_ref(domain_id=self.domain['id'], parent_id=level3['id']) PROVIDERS.resource_api.create_project(level2['id'], level2) PROVIDERS.resource_api.create_project(level3['id'], level3) PROVIDERS.resource_api.create_project(level4['id'], level4) # Grant non-inherited role to root (as a spoiler) and to # the level 1 (leaf) project non_inher_entity_root = self.build_role_assignment_entity( project_id=root_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.put(non_inher_entity_root['links']['assignment']) non_inher_entity_leaf = self.build_role_assignment_entity( project_id=leaf_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.put(non_inher_entity_leaf['links']['assignment']) # Grant inherited role to level 2 inher_entity = self.build_role_assignment_entity( project_id=level2['id'], user_id=self.user['id'], role_id=inherited_role_id, inherited_to_projects=True) self.put(inher_entity['links']['assignment']) # Get effective role assignments collection_url = ( '/role_assignments?scope.project.id=%(project)s' '&include_subtree=True&effective' % { 'project': leaf_id}) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, resource_url=collection_url) # There should be three assignments returned in total self.assertThat(r.result['role_assignments'], matchers.HasLength(3)) # Assert that the user does not non-inherited role on root project self.assertRoleAssignmentNotInListResponse(r, non_inher_entity_root) # Assert that the user does have non-inherited role on leaf project self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf) # Assert that the user has inherited role on levels 3 and 4 inher_entity['scope']['project']['id'] = level3['id'] self.assertRoleAssignmentInListResponse(r, inher_entity) inher_entity['scope']['project']['id'] = level4['id'] self.assertRoleAssignmentInListResponse(r, inher_entity) def test_get_inherited_role_assignments_for_project_hierarchy(self): """Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``. Test Plan: - Create 2 roles - Create a hierarchy of projects with one root and one leaf project - Issue the URL to add a non-inherited user role to the root project - Issue the URL to add an inherited user role to the root project - Issue the URL to filter inherited to projects role assignments - this should return 1 role (inherited) on the root project. """ # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( self._setup_hierarchical_projects_scenario()) # Grant non-inherited role non_inher_up_entity = self.build_role_assignment_entity( project_id=root_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.put(non_inher_up_entity['links']['assignment']) # Grant inherited role inher_up_entity = self.build_role_assignment_entity( project_id=root_id, user_id=self.user['id'], role_id=inherited_role_id, inherited_to_projects=True) self.put(inher_up_entity['links']['assignment']) # Get inherited role assignments collection_url = ('/role_assignments' '?scope.OS-INHERIT:inherited_to=projects') r = self.get(collection_url) self.assertValidRoleAssignmentListResponse(r, resource_url=collection_url) # Assert that the user does not have non-inherited role on root project self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) # Assert that the user has inherited role on root project self.assertRoleAssignmentInListResponse(r, inher_up_entity) # Assert that the user does not have non-inherited role on leaf project non_inher_up_entity = self.build_role_assignment_entity( project_id=leaf_id, user_id=self.user['id'], role_id=non_inherited_role_id) self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) # Assert that the user does not have inherited role on leaf project inher_up_entity['scope']['project']['id'] = leaf_id self.assertRoleAssignmentNotInListResponse(r, inher_up_entity) class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, unit.TestCase): def _create_role(self): """Call ``POST /roles``.""" ref = unit.new_role_ref() r = self.post('/roles', body={'role': ref}) return self.assertValidRoleResponse(r, ref) def test_list_implied_roles_none(self): self.prior = self._create_role() url = '/roles/%s/implies' % (self.prior['id']) response = self.get(url).json["role_inference"] self.head(url, expected_status=http.client.OK) self.assertEqual(self.prior['id'], response['prior_role']['id']) self.assertEqual(0, len(response['implies'])) def _create_implied_role(self, prior, implied): self.put('/roles/%s/implies/%s' % (prior['id'], implied['id']), expected_status=http.client.CREATED) def _delete_implied_role(self, prior, implied): self.delete('/roles/%s/implies/%s' % (prior['id'], implied['id'])) def _setup_prior_two_implied(self): self.prior = self._create_role() self.implied1 = self._create_role() self._create_implied_role(self.prior, self.implied1) self.implied2 = self._create_role() self._create_implied_role(self.prior, self.implied2) def _assert_expected_implied_role_response( self, expected_prior_id, expected_implied_ids): r = self.get('/roles/%s/implies' % expected_prior_id) response = r.json role_inference = response['role_inference'] self.assertEqual(expected_prior_id, role_inference['prior_role']['id']) prior_link = '/v3/roles/' + expected_prior_id + '/implies' self.assertThat(response['links']['self'], matchers.EndsWith(prior_link)) actual_implied_ids = [implied['id'] for implied in role_inference['implies']] self.assertItemsEqual(expected_implied_ids, actual_implied_ids) self.assertIsNotNone(role_inference['prior_role']['links']['self']) for implied in role_inference['implies']: self.assertIsNotNone(implied['links']['self']) def _assert_expected_role_inference_rule_response( self, expected_prior_id, expected_implied_id): url = '/roles/%s/implies/%s' % (expected_prior_id, expected_implied_id) response = self.get(url).json self.assertThat(response['links']['self'], matchers.EndsWith('/v3%s' % url)) role_inference = response['role_inference'] prior_role = role_inference['prior_role'] self.assertEqual(expected_prior_id, prior_role['id']) self.assertIsNotNone(prior_role['name']) self.assertThat(prior_role['links']['self'], matchers.EndsWith('/v3/roles/%s' % expected_prior_id)) implied_role = role_inference['implies'] self.assertEqual(expected_implied_id, implied_role['id']) self.assertIsNotNone(implied_role['name']) self.assertThat(implied_role['links']['self'], matchers.EndsWith( '/v3/roles/%s' % expected_implied_id)) def _assert_two_roles_implied(self): self._assert_expected_implied_role_response( self.prior['id'], [self.implied1['id'], self.implied2['id']]) self._assert_expected_role_inference_rule_response( self.prior['id'], self.implied1['id']) self._assert_expected_role_inference_rule_response( self.prior['id'], self.implied2['id']) def _assert_one_role_implied(self): self._assert_expected_implied_role_response( self.prior['id'], [self.implied1['id']]) self.get('/roles/%s/implies/%s' % (self.prior['id'], self.implied2['id']), expected_status=http.client.NOT_FOUND) def _assert_two_rules_defined(self): r = self.get('/role_inferences/') rules = r.result['role_inferences'] self.assertEqual(self.prior['id'], rules[0]['prior_role']['id']) self.assertEqual(2, len(rules[0]['implies'])) implied_ids = [implied['id'] for implied in rules[0]['implies']] implied_names = [implied['name'] for implied in rules[0]['implies']] self.assertIn(self.implied1['id'], implied_ids) self.assertIn(self.implied2['id'], implied_ids) self.assertIn(self.implied1['name'], implied_names) self.assertIn(self.implied2['name'], implied_names) def _assert_one_rule_defined(self): r = self.get('/role_inferences/') rules = r.result['role_inferences'] self.assertEqual(self.prior['id'], rules[0]['prior_role']['id']) self.assertEqual(self.implied1['id'], rules[0]['implies'][0]['id']) self.assertEqual(self.implied1['name'], rules[0]['implies'][0]['name']) self.assertEqual(1, len(rules[0]['implies'])) def test_list_all_rules(self): self._setup_prior_two_implied() self._assert_two_rules_defined() self._delete_implied_role(self.prior, self.implied2) self._assert_one_rule_defined() def test_CRD_implied_roles(self): self._setup_prior_two_implied() self._assert_two_roles_implied() self._delete_implied_role(self.prior, self.implied2) self._assert_one_role_implied() def _create_three_roles(self): self.role_list = [] for _ in range(3): role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) self.role_list.append(role) def _create_test_domain_user_project(self): domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) user = unit.create_user(PROVIDERS.identity_api, domain_id=domain['id']) project = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project['id'], project) return domain, user, project def _assign_top_role_to_user_on_project(self, user, project): PROVIDERS.assignment_api.add_role_to_user_and_project( user['id'], project['id'], self.role_list[0]['id']) def _build_effective_role_assignments_url(self, user): return '/role_assignments?effective&user.id=%(user_id)s' % { 'user_id': user['id']} def _assert_all_roles_in_assignment(self, response, user): # Now use the list role assignments api to check that all three roles # appear in the collection self.assertValidRoleAssignmentListResponse( response, expected_length=len(self.role_list), resource_url=self._build_effective_role_assignments_url(user)) def _assert_initial_assignment_in_effective(self, response, user, project): # The initial assignment should be there (the link url will be # generated and checked automatically since it matches the assignment) entity = self.build_role_assignment_entity( project_id=project['id'], user_id=user['id'], role_id=self.role_list[0]['id']) self.assertRoleAssignmentInListResponse(response, entity) def _assert_effective_role_for_implied_has_prior_in_links( self, response, user, project, prior_index, implied_index): # An effective role for an implied role will have the prior role # assignment in the links prior_link = '/prior_roles/%(prior)s/implies/%(implied)s' % { 'prior': self.role_list[prior_index]['id'], 'implied': self.role_list[implied_index]['id']} link = self.build_role_assignment_link( project_id=project['id'], user_id=user['id'], role_id=self.role_list[prior_index]['id']) entity = self.build_role_assignment_entity( link=link, project_id=project['id'], user_id=user['id'], role_id=self.role_list[implied_index]['id'], prior_link=prior_link) self.assertRoleAssignmentInListResponse(response, entity) def test_list_role_assignments_with_implied_roles(self): """Call ``GET /role_assignments`` with implied role grant. Test Plan: - Create a domain with a user and a project - Create 3 roles - Role 0 implies role 1 and role 1 implies role 2 - Assign the top role to the project - Issue the URL to check effective roles on project - this should return all 3 roles. - Check the links of the 3 roles indicate the prior role where appropriate """ (domain, user, project) = self._create_test_domain_user_project() self._create_three_roles() self._create_implied_role(self.role_list[0], self.role_list[1]) self._create_implied_role(self.role_list[1], self.role_list[2]) self._assign_top_role_to_user_on_project(user, project) response = self.get(self._build_effective_role_assignments_url(user)) r = response self._assert_all_roles_in_assignment(r, user) self._assert_initial_assignment_in_effective(response, user, project) self._assert_effective_role_for_implied_has_prior_in_links( response, user, project, 0, 1) self._assert_effective_role_for_implied_has_prior_in_links( response, user, project, 1, 2) def _create_named_role(self, name): role = unit.new_role_ref() role['name'] = name PROVIDERS.role_api.create_role(role['id'], role) return role def test_root_role_as_implied_role_forbidden(self): """Test root role is forbidden to be set as an implied role. Create 2 roles that are prohibited from being an implied role. Create 1 additional role which should be accepted as an implied role. Assure the prohibited role names cannot be set as an implied role. Assure the accepted role name which is not a member of the prohibited implied role list can be successfully set an implied role. """ prohibited_name1 = 'root1' prohibited_name2 = 'root2' accepted_name1 = 'implied1' prohibited_names = [prohibited_name1, prohibited_name2] self.config_fixture.config(group='assignment', prohibited_implied_role=prohibited_names) prior_role = self._create_role() prohibited_role1 = self._create_named_role(prohibited_name1) url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format( prior_role_id=prior_role['id'], implied_role_id=prohibited_role1['id']) self.put(url, expected_status=http.client.FORBIDDEN) prohibited_role2 = self._create_named_role(prohibited_name2) url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format( prior_role_id=prior_role['id'], implied_role_id=prohibited_role2['id']) self.put(url, expected_status=http.client.FORBIDDEN) accepted_role1 = self._create_named_role(accepted_name1) url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format( prior_role_id=prior_role['id'], implied_role_id=accepted_role1['id']) self.put(url, expected_status=http.client.CREATED) def test_trusts_from_implied_role(self): self._create_three_roles() self._create_implied_role(self.role_list[0], self.role_list[1]) self._create_implied_role(self.role_list[1], self.role_list[2]) self._assign_top_role_to_user_on_project(self.user, self.project) # Create a trustee and assign the prior role to her trustee = unit.create_user( PROVIDERS.identity_api, domain_id=self.domain_id ) ref = unit.new_trust_ref( trustor_user_id=self.user['id'], trustee_user_id=trustee['id'], project_id=self.project['id'], role_ids=[self.role_list[0]['id']]) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = r.result['trust'] # Only the role that was specified is in the trust, NOT implied roles self.assertEqual(self.role_list[0]['id'], trust['roles'][0]['id']) self.assertThat(trust['roles'], matchers.HasLength(1)) # Authenticate as the trustee auth_data = self.build_authentication_request( user_id=trustee['id'], password=trustee['password'], trust_id=trust['id']) r = self.v3_create_token(auth_data) token = r.result['token'] self.assertThat(token['roles'], matchers.HasLength(len(self.role_list))) for role in token['roles']: self.assertIn(role, self.role_list) for role in self.role_list: self.assertIn(role, token['roles']) def test_trusts_from_domain_specific_implied_role(self): self._create_three_roles() # Overwrite the first role with a domain specific role role = unit.new_role_ref(domain_id=self.domain_id) self.role_list[0] = PROVIDERS.role_api.create_role(role['id'], role) self._create_implied_role(self.role_list[0], self.role_list[1]) self._create_implied_role(self.role_list[1], self.role_list[2]) self._assign_top_role_to_user_on_project(self.user, self.project) # Create a trustee and assign the prior role to her trustee = unit.create_user( PROVIDERS.identity_api, domain_id=self.domain_id ) ref = unit.new_trust_ref( trustor_user_id=self.user['id'], trustee_user_id=trustee['id'], project_id=self.project['id'], role_ids=[self.role_list[0]['id']]) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = r.result['trust'] # Only the role that was specified is in the trust, NOT implied roles self.assertEqual(self.role_list[0]['id'], trust['roles'][0]['id']) self.assertThat(trust['roles'], matchers.HasLength(1)) # Authenticate as the trustee auth_data = self.build_authentication_request( user_id=trustee['id'], password=trustee['password'], trust_id=trust['id']) r = self.v3_create_token(auth_data) token = r.result['token'] # The token should have the roles implies by the domain specific role, # but not the domain specific role itself. self.assertThat(token['roles'], matchers.HasLength(len(self.role_list) - 1)) for role in token['roles']: self.assertIn(role, self.role_list) for role in [self.role_list[1], self.role_list[2]]: self.assertIn(role, token['roles']) self.assertNotIn(self.role_list[0], token['roles']) def test_global_role_cannot_imply_domain_specific_role(self): domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) domain_role_ref = unit.new_role_ref(domain_id=domain['id']) domain_role = PROVIDERS.role_api.create_role( domain_role_ref['id'], domain_role_ref ) global_role_ref = unit.new_role_ref() global_role = PROVIDERS.role_api.create_role( global_role_ref['id'], global_role_ref ) self.put('/roles/%s/implies/%s' % (global_role['id'], domain_role['id']), expected_status=http.client.FORBIDDEN) class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase): def setUp(self): def create_role(domain_id=None): """Call ``POST /roles``.""" ref = unit.new_role_ref(domain_id=domain_id) r = self.post( '/roles', body={'role': ref}) return self.assertValidRoleResponse(r, ref) super(DomainSpecificRoleTests, self).setUp() self.domainA = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(self.domainA['id'], self.domainA) self.domainB = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(self.domainB['id'], self.domainB) self.global_role1 = create_role() self.global_role2 = create_role() # Since there maybe other global roles already created, let's count # them, so we can ensure we can check subsequent list responses # are correct r = self.get('/roles') self.existing_global_roles = len(r.result['roles']) # And now create some domain specific roles self.domainA_role1 = create_role(domain_id=self.domainA['id']) self.domainA_role2 = create_role(domain_id=self.domainA['id']) self.domainB_role = create_role(domain_id=self.domainB['id']) def test_get_and_list_domain_specific_roles(self): # Check we can get a domain specific role r = self.get('/roles/%s' % self.domainA_role1['id']) self.assertValidRoleResponse(r, self.domainA_role1) # If we list without specifying a domain, we should only get global # roles back. r = self.get('/roles') self.assertValidRoleListResponse( r, expected_length=self.existing_global_roles) self.assertRoleInListResponse(r, self.global_role1) self.assertRoleInListResponse(r, self.global_role2) self.assertRoleNotInListResponse(r, self.domainA_role1) self.assertRoleNotInListResponse(r, self.domainA_role2) self.assertRoleNotInListResponse(r, self.domainB_role) # Now list those in domainA, making sure that's all we get back r = self.get('/roles?domain_id=%s' % self.domainA['id']) self.assertValidRoleListResponse(r, expected_length=2) self.assertRoleInListResponse(r, self.domainA_role1) self.assertRoleInListResponse(r, self.domainA_role2) def test_update_domain_specific_roles(self): self.domainA_role1['name'] = uuid.uuid4().hex self.patch('/roles/%(role_id)s' % { 'role_id': self.domainA_role1['id']}, body={'role': self.domainA_role1}) r = self.get('/roles/%s' % self.domainA_role1['id']) self.assertValidRoleResponse(r, self.domainA_role1) def test_delete_domain_specific_roles(self): # Check delete only removes that one domain role self.delete('/roles/%(role_id)s' % { 'role_id': self.domainA_role1['id']}) self.get('/roles/%s' % self.domainA_role1['id'], expected_status=http.client.NOT_FOUND) # Now re-list those in domainA, making sure there's only one left r = self.get('/roles?domain_id=%s' % self.domainA['id']) self.assertValidRoleListResponse(r, expected_length=1) self.assertRoleInListResponse(r, self.domainA_role2) def test_same_domain_assignment(self): user = unit.create_user(PROVIDERS.identity_api, domain_id=self.domainA['id']) projectA = unit.new_project_ref(domain_id=self.domainA['id']) PROVIDERS.resource_api.create_project(projectA['id'], projectA) PROVIDERS.assignment_api.create_grant( self.domainA_role1['id'], user_id=user['id'], project_id=projectA['id'] ) def test_cross_domain_assignment_valid(self): user = unit.create_user(PROVIDERS.identity_api, domain_id=self.domainB['id']) projectA = unit.new_project_ref(domain_id=self.domainA['id']) PROVIDERS.resource_api.create_project(projectA['id'], projectA) # Positive: a role on domainA can be assigned to a user from domainB # but only for use on a project from domainA PROVIDERS.assignment_api.create_grant( self.domainA_role1['id'], user_id=user['id'], project_id=projectA['id'] ) def test_cross_domain_assignment_invalid(self): user = unit.create_user(PROVIDERS.identity_api, domain_id=self.domainB['id']) projectB = unit.new_project_ref(domain_id=self.domainB['id']) PROVIDERS.resource_api.create_project(projectB['id'], projectB) # Negative: a role on domainA can be assigned to a user from domainB # only for a project from domainA self.assertRaises(exception.DomainSpecificRoleMismatch, PROVIDERS.assignment_api.create_grant, self.domainA_role1['id'], user_id=user['id'], project_id=projectB['id']) def test_cross_domain_implied_roles_authentication(self): # Create a user in domainB user = unit.create_user(PROVIDERS.identity_api, domain_id=self.domainB['id']) # Create project in domainA projectA = unit.new_project_ref(domain_id=self.domainA['id']) PROVIDERS.resource_api.create_project(projectA['id'], projectA) # Now we create an implied rule from a role in domainA to a # role in domainB self.put('/roles/%s/implies/%s' % (self.domainA_role1['id'], self.domainB_role['id']), expected_status=http.client.CREATED) # A role in domainA can be assigned to a user from domainB # only for a project from domainA PROVIDERS.assignment_api.create_grant( self.domainA_role1['id'], user_id=user['id'], project_id=projectA['id'] ) # The role assignments should return an empty list since domain roles # can only be used to imply another roles assignments = PROVIDERS.assignment_api.list_role_assignments( user_id=user['id'], effective=True) self.assertEqual([], assignments) # This also means we can't authenticate using the existing assignment auth_body = self.build_authentication_request( user_id=user['id'], password=user['password'], project_id=projectA['id']) self.post('/auth/tokens', body=auth_body, expected_status=http.client.UNAUTHORIZED) class ListUserProjectsTestCase(test_v3.RestfulTestCase): """Test for /users/<user>/projects.""" def load_sample_data(self): # do not load base class's data, keep it focused on the tests self.auths = [] self.domains = [] self.projects = [] self.roles = [] self.users = [] root_domain = unit.new_domain_ref( id=resource_base.NULL_DOMAIN_ID, name=resource_base.NULL_DOMAIN_ID ) self.resource_api.create_domain(resource_base.NULL_DOMAIN_ID, root_domain) # Create 3 sets of domain, roles, projects, and users to demonstrate # the right user's data is loaded and only projects they can access # are returned. for _ in range(3): domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) user = unit.create_user( PROVIDERS.identity_api, domain_id=domain['id'] ) role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) PROVIDERS.assignment_api.create_grant( role['id'], user_id=user['id'], domain_id=domain['id'] ) project = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( role['id'], user_id=user['id'], project_id=project['id'] ) auth = self.build_authentication_request( user_id=user['id'], password=user['password'], domain_id=domain['id']) self.auths.append(auth) self.domains.append(domain) self.projects.append(project) self.roles.append(role) self.users.append(user) def test_list_head_all(self): for i in range(len(self.users)): user = self.users[i] auth = self.auths[i] url = '/users/%s/projects' % user['id'] result = self.get(url, auth=auth) projects_result = result.json['projects'] self.assertEqual(1, len(projects_result)) self.assertEqual(self.projects[i]['id'], projects_result[0]['id']) self.head(url, auth=auth, expected_status=http.client.OK) def test_list_enabled(self): for i in range(len(self.users)): user = self.users[i] auth = self.auths[i] # There are no disabled projects url = '/users/%s/projects?enabled=True' % user['id'] result = self.get(url, auth=auth) projects_result = result.json['projects'] self.assertEqual(1, len(projects_result)) self.assertEqual(self.projects[i]['id'], projects_result[0]['id']) def test_list_disabled(self): for i in range(len(self.users)): user = self.users[i] auth = self.auths[i] project = self.projects[i] # There are no disabled projects url = '/users/%s/projects?enabled=False' % user['id'] result = self.get(url, auth=auth) self.assertEqual(0, len(result.json['projects'])) # disable this one and check again project['enabled'] = False PROVIDERS.resource_api.update_project(project['id'], project) result = self.get(url, auth=auth) projects_result = result.json['projects'] self.assertEqual(1, len(projects_result)) self.assertEqual(self.projects[i]['id'], projects_result[0]['id']) def test_list_by_domain_id(self): for i in range(len(self.users)): user = self.users[i] domain = self.domains[i] auth = self.auths[i] # Try looking for projects with a non-existent domain_id url = '/users/%s/projects?domain_id=%s' % (user['id'], uuid.uuid4().hex) result = self.get(url, auth=auth) self.assertEqual(0, len(result.json['projects'])) # Now try a valid one url = '/users/%s/projects?domain_id=%s' % (user['id'], domain['id']) result = self.get(url, auth=auth) projects_result = result.json['projects'] self.assertEqual(1, len(projects_result)) self.assertEqual(self.projects[i]['id'], projects_result[0]['id']) # FIXME(lbragstad): These tests contain system-level API calls, which means # they will log a warning message if they are called with a project-scoped # token, regardless of the role assignment on the project. We need to fix # them by using a proper system-scoped admin token to make the call instead # of a project scoped token. class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, SystemRoleAssignmentMixin): def test_assign_system_role_to_user(self): system_role_id = self._create_new_role() # assign the user a role on the system member_url = ( '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], 'role_id': system_role_id } ) self.put(member_url) # validate the role assignment self.head(member_url) # list system roles collection_url = ( '/system/users/%(user_id)s/roles' % {'user_id': self.user['id']} ) roles = self.get(collection_url).json_body['roles'] self.assertEqual(len(roles), 1) self.assertEqual(roles[0]['id'], system_role_id) self.head(collection_url, expected_status=http.client.OK) response = self.get( '/role_assignments?scope.system=all&user.id=%(user_id)s' % { 'user_id': self.user['id'] } ) self.assertValidRoleAssignmentListResponse(response) def test_list_role_assignments_for_user_returns_all_assignments(self): system_role_id = self._create_new_role() # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], 'role_id': system_role_id } self.put(member_url) # the response should contain one role assignment for the system role # and one for a role that was setup during setUp(). response = self.get( '/role_assignments?user.id=%(user_id)s' % { 'user_id': self.user['id'] } ) self.assertValidRoleAssignmentListResponse(response, expected_length=2) def test_list_system_roles_for_user_returns_none_without_assignment(self): # list system roles for user collection_url = '/system/users/%(user_id)s/roles' % { 'user_id': self.user['id'] } response = self.get(collection_url) # assert that the user doesn't have any system role assignments, which # is denoted by an empty list self.assertEqual(response.json_body['roles'], []) response = self.get( '/role_assignments?scope.system=all&user.id=%(user_id)s' % { 'user_id': self.user['id'] } ) self.assertEqual(len(response.json_body['role_assignments']), 0) self.assertValidRoleAssignmentListResponse(response) def test_list_system_roles_for_user_does_not_return_project_roles(self): system_role_id = self._create_new_role() # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], 'role_id': system_role_id } self.put(member_url) # list project role assignments and save the role id of that # assignment, this assignment was created during setUp response = self.get( '/projects/%(project_id)s/users/%(user_id)s/roles' % { 'project_id': self.project['id'], 'user_id': self.user['id'] } ) self.assertEqual(len(response.json_body['roles']), 1) project_role_id = response.json_body['roles'][0]['id'] # list system role assignments collection_url = '/system/users/%(user_id)s/roles' % { 'user_id': self.user['id'] } response = self.get(collection_url) # assert the project role assignment is not in the system role # assignments for role in response.json_body['roles']: self.assertNotEqual(role['id'], project_role_id) # make sure the role_assignment API filters correctly based on system # scope response = self.get( '/role_assignments?scope.system=all&user.id=%(user_id)s' % { 'user_id': self.user['id'] } ) self.assertEqual(len(response.json_body['role_assignments']), 1) system_assignment = response.json_body['role_assignments'][0] self.assertEqual(system_assignment['role']['id'], system_role_id) self.assertTrue(system_assignment['scope']['system']['all']) # make sure the role_assignment API doesn't include the system role # assignment when we filter based on project path = ( '/role_assignments?scope.project.id=%(project_id)s&' 'user.id=%(user_id)s' ) % {'project_id': self.project['id'], 'user_id': self.user['id']} response = self.get(path) self.assertEqual(len(response.json_body['role_assignments']), 1) project_assignment = response.json_body['role_assignments'][0] self.assertEqual(project_assignment['role']['id'], project_role_id) def test_list_system_roles_for_user_does_not_return_domain_roles(self): system_role_id = self._create_new_role() domain_role_id = self._create_new_role() # assign a role to the user on a domain domain_member_url = ( '/domains/%(domain_id)s/users/%(user_id)s/roles/%(role_id)s' % { 'domain_id': self.user['domain_id'], 'user_id': self.user['id'], 'role_id': domain_role_id } ) self.put(domain_member_url) # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], 'role_id': system_role_id } self.put(member_url) # list domain role assignments response = self.get( '/domains/%(domain_id)s/users/%(user_id)s/roles' % { 'domain_id': self.user['domain_id'], 'user_id': self.user['id'] } ) self.assertEqual(len(response.json_body['roles']), 1) # list system role assignments collection_url = '/system/users/%(user_id)s/roles' % { 'user_id': self.user['id'] } response = self.get(collection_url) # assert the domain role assignment is not in the system role # assignments for role in response.json_body['roles']: self.assertNotEqual(role['id'], domain_role_id) # make sure the role_assignment API filters correctly based on system # scope response = self.get( '/role_assignments?scope.system=all&user.id=%(user_id)s' % { 'user_id': self.user['id'] } ) self.assertEqual(len(response.json_body['role_assignments']), 1) system_assignment = response.json_body['role_assignments'][0] self.assertEqual(system_assignment['role']['id'], system_role_id) self.assertTrue(system_assignment['scope']['system']['all']) # make sure the role_assignment API doesn't include the system role # assignment when we filter based on domain path = ( '/role_assignments?scope.domain.id=%(domain_id)s&' 'user.id=%(user_id)s' ) % {'domain_id': self.user['domain_id'], 'user_id': self.user['id']} response = self.get(path) self.assertEqual(len(response.json_body['role_assignments']), 1) domain_assignment = response.json_body['role_assignments'][0] self.assertEqual(domain_assignment['role']['id'], domain_role_id) def test_check_user_has_system_role_when_assignment_exists(self): system_role_id = self._create_new_role() # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], 'role_id': system_role_id } self.put(member_url) # check the user has the system role assignment self.head(member_url) def test_check_user_does_not_have_system_role_without_assignment(self): system_role_id = self._create_new_role() # check the user does't have the system role assignment member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], 'role_id': system_role_id } self.head(member_url, expected_status=http.client.NOT_FOUND) response = self.get( '/role_assignments?scope.system=all&user.id=%(user_id)s' % { 'user_id': self.user['id'] } ) self.assertEqual(len(response.json_body['role_assignments']), 0) self.assertValidRoleAssignmentListResponse(response) def test_unassign_system_role_from_user(self): system_role_id = self._create_new_role() # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], 'role_id': system_role_id } self.put(member_url) # ensure the user has the role assignment self.head(member_url) response = self.get( '/role_assignments?scope.system=all&user.id=%(user_id)s' % { 'user_id': self.user['id'] } ) self.assertEqual(len(response.json_body['role_assignments']), 1) self.assertValidRoleAssignmentListResponse(response) # remove the system role assignment from the user self.delete(member_url) # ensure the user doesn't have any system role assignments collection_url = '/system/users/%(user_id)s/roles' % { 'user_id': self.user['id'] } response = self.get(collection_url) self.assertEqual(len(response.json_body['roles']), 0) response = self.get( '/role_assignments?scope.system=all&user.id=%(user_id)s' % { 'user_id': self.user['id'] } ) self.assertValidRoleAssignmentListResponse(response, expected_length=0) def test_query_for_system_scope_and_domain_scope_fails(self): # When asking for assignments and providing query parameters, we # shouldn't be able to ask for two different types of scope. This is # also true for project + domain scope. path = ( '/role_assignments?scope.system=all' '&scope.domain.id=%(domain_id)s' ) % {'domain_id': self.domain_id} self.get(path, expected_status=http.client.BAD_REQUEST) def test_query_for_system_scope_and_project_scope_fails(self): # When asking for assignments and providing query parameters, we # shouldn't be able to ask for two different types of scope. This is # also true for project + domain scope. path = ( '/role_assignments?scope.system=all' '&scope.project.id=%(project_id)s' ) % {'project_id': self.project_id} self.get(path, expected_status=http.client.BAD_REQUEST) def test_query_for_role_id_does_not_return_system_user_roles(self): system_role_id = self._create_new_role() # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], 'role_id': system_role_id } self.put(member_url) # Make sure we only get one role assignment back since the system role # assignment shouldn't be returned. path = ( '/role_assignments?role.id=%(role_id)s&user.id=%(user_id)s' ) % {'role_id': self.role_id, 'user_id': self.user['id']} response = self.get(path) self.assertValidRoleAssignmentListResponse(response, expected_length=1) # FIXME(lbragstad): These tests contain system-level API calls, which means # they will log a warning message if they are called with a project-scoped # token, regardless of the role assignment on the project. We need to fix # them by using a proper system-scoped admin token to make the call instead # of a project scoped token. class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, SystemRoleAssignmentMixin): def test_assign_system_role_to_group(self): system_role_id = self._create_new_role() group = self._create_group() # assign the role to the group globally member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], 'role_id': system_role_id } self.put(member_url) # validate the role assignment self.head(member_url) # list global roles collection_url = '/system/groups/%(group_id)s/roles' % { 'group_id': group['id'] } roles = self.get(collection_url).json_body['roles'] self.assertEqual(len(roles), 1) self.assertEqual(roles[0]['id'], system_role_id) self.head(collection_url, expected_status=http.client.OK) response = self.get( '/role_assignments?scope.system=all&group.id=%(group_id)s' % { 'group_id': group['id'] } ) self.assertValidRoleAssignmentListResponse(response, expected_length=1) self.assertEqual( response.json_body['role_assignments'][0]['role']['id'], system_role_id ) def test_assign_system_role_to_non_existant_group_fails(self): system_role_id = self._create_new_role() group_id = uuid.uuid4().hex # assign the role to the group globally member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group_id, 'role_id': system_role_id } self.put(member_url, expected_status=http.client.NOT_FOUND) def test_list_role_assignments_for_group_returns_all_assignments(self): system_role_id = self._create_new_role() group = self._create_group() # assign the role to the group globally and on a single project member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], 'role_id': system_role_id } self.put(member_url) member_url = ( '/projects/%(project_id)s/groups/%(group_id)s/' 'roles/%(role_id)s' ) % { 'project_id': self.project_id, 'group_id': group['id'], 'role_id': system_role_id } self.put(member_url) # make sure both assignments exist in the response, there should be two response = self.get( '/role_assignments?group.id=%(group_id)s' % { 'group_id': group['id'] } ) self.assertValidRoleAssignmentListResponse(response, expected_length=2) def test_list_system_roles_for_group_returns_none_without_assignment(self): group = self._create_group() # list global roles for group collection_url = '/system/groups/%(group_id)s/roles' % { 'group_id': group['id'] } response = self.get(collection_url) # assert that the group doesn't have any system role assignments, which # is denoted by an empty list self.assertEqual(response.json_body['roles'], []) response = self.get( '/role_assignments?scope.system=all&group.id=%(group_id)s' % { 'group_id': group['id'] } ) self.assertValidRoleAssignmentListResponse(response, expected_length=0) def test_list_system_roles_for_group_does_not_return_project_roles(self): system_role_id = self._create_new_role() project_role_id = self._create_new_role() group = self._create_group() # assign the group a role on the system and a role on a project member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], 'role_id': system_role_id } self.put(member_url) member_url = ( '/projects/%(project_id)s/groups/%(group_id)s/' 'roles/%(role_id)s' ) % { 'project_id': self.project_id, 'group_id': group['id'], 'role_id': project_role_id } self.put(member_url) # list system role assignments collection_url = '/system/groups/%(group_id)s/roles' % { 'group_id': group['id'] } response = self.get(collection_url) # assert the project role assignment is not in the system role # assignments for role in response.json_body['roles']: self.assertNotEqual(role['id'], project_role_id) response = self.get( '/role_assignments?scope.system=all&group.id=%(group_id)s' % { 'group_id': group['id'] } ) self.assertValidRoleAssignmentListResponse(response, expected_length=1) def test_list_system_roles_for_group_does_not_return_domain_roles(self): system_role_id = self._create_new_role() domain_role_id = self._create_new_role() group = self._create_group() # assign a role to the group on a domain domain_member_url = ( '/domains/%(domain_id)s/groups/%(group_id)s/' 'roles/%(role_id)s' % { 'domain_id': group['domain_id'], 'group_id': group['id'], 'role_id': domain_role_id } ) self.put(domain_member_url) # assign the group a role on the system member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], 'role_id': system_role_id } self.put(member_url) # list domain role assignments response = self.get( '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { 'domain_id': group['domain_id'], 'group_id': group['id'] } ) self.assertEqual(len(response.json_body['roles']), 1) # list system role assignments collection_url = '/system/groups/%(group_id)s/roles' % { 'group_id': group['id'] } response = self.get(collection_url) # assert the domain role assignment is not in the system role # assignments for role in response.json_body['roles']: self.assertNotEqual(role['id'], domain_role_id) response = self.get( '/role_assignments?scope.system=all&group.id=%(group_id)s' % { 'group_id': group['id'] } ) self.assertValidRoleAssignmentListResponse(response, expected_length=1) def test_check_group_has_system_role_when_assignment_exists(self): system_role_id = self._create_new_role() group = self._create_group() # assign the group a role on the system member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], 'role_id': system_role_id } self.put(member_url) # check the group has the system role assignment self.head(member_url) response = self.get( '/role_assignments?scope.system=all&group.id=%(group_id)s' % { 'group_id': group['id'] } ) self.assertValidRoleAssignmentListResponse(response, expected_length=1) self.assertEqual( response.json_body['role_assignments'][0]['role']['id'], system_role_id ) def test_check_group_does_not_have_system_role_without_assignment(self): system_role_id = self._create_new_role() group = self._create_group() # check the group does't have the system role assignment member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], 'role_id': system_role_id } self.head(member_url, expected_status=http.client.NOT_FOUND) response = self.get( '/role_assignments?scope.system=all&group.id=%(group_id)s' % { 'group_id': group['id'] } ) self.assertValidRoleAssignmentListResponse(response, expected_length=0) def test_unassign_system_role_from_group(self): system_role_id = self._create_new_role() group = self._create_group() # assign the group a role on the system member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], 'role_id': system_role_id } self.put(member_url) # ensure the group has the role assignment self.head(member_url) response = self.get( '/role_assignments?scope.system=all&group.id=%(group_id)s' % { 'group_id': group['id'] } ) self.assertEqual(len(response.json_body['role_assignments']), 1) self.assertValidRoleAssignmentListResponse(response) # remove the system role assignment from the group self.delete(member_url) # ensure the group doesn't have any system role assignments collection_url = '/system/groups/%(group_id)s/roles' % { 'group_id': group['id'] } response = self.get(collection_url) self.assertEqual(len(response.json_body['roles']), 0) response = self.get( '/role_assignments?scope.system=all&group.id=%(group_id)s' % { 'group_id': group['id'] } ) self.assertValidRoleAssignmentListResponse(response, expected_length=0) def test_query_for_role_id_does_not_return_system_group_roles(self): system_role_id = self._create_new_role() group = self._create_group() # assign the group a role on the system member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], 'role_id': system_role_id } self.put(member_url) # assign the group a role on the system member_url = ( '/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' % {'project_id': self.project_id, 'group_id': group['id'], 'role_id': self.role_id} ) self.put(member_url) # Make sure we only get one role assignment back since the system role # assignment shouldn't be returned. path = ( '/role_assignments?role.id=%(role_id)s&group.id=%(group_id)s' ) % {'role_id': self.role_id, 'group_id': group['id']} response = self.get(path) self.assertValidRoleAssignmentListResponse(response, expected_length=1)
43.434965
79
0.628166
174,001
0.990606
0
0
4,973
0.028312
0
0
52,826
0.300744
d9e48585d735333916bf5e8b10a68c72e4541093
248,866
py
Python
pysnmp-with-texts/XXX-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
8
2019-05-09T17:04:00.000Z
2021-06-09T06:50:51.000Z
pysnmp-with-texts/XXX-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
4
2019-05-31T16:42:59.000Z
2020-01-31T21:57:17.000Z
pysnmp-with-texts/XXX-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module XXX-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/XXX-MIB # Produced by pysmi-0.3.4 at Wed May 1 15:44:42 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection") ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup") Gauge32, ModuleIdentity, iso, Integer32, enterprises, ObjectIdentity, Unsigned32, Counter64, IpAddress, Bits, Counter32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "ModuleIdentity", "iso", "Integer32", "enterprises", "ObjectIdentity", "Unsigned32", "Counter64", "IpAddress", "Bits", "Counter32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "NotificationType") DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention") company = ModuleIdentity((1, 3, 6, 1, 4, 1, 6688)) company.setRevisions(('2009-03-05 00:00',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: company.setRevisionsDescriptions(('1G MC supported',)) if mibBuilder.loadTexts: company.setLastUpdated('200903050000Z') if mibBuilder.loadTexts: company.setOrganization('FiberRoad') if mibBuilder.loadTexts: company.setContactInfo('www.fiberroad.com.cn') if mibBuilder.loadTexts: company.setDescription('Media Converter NMS SNMP mib file') ipProduct = ObjectIdentity((1, 3, 6, 1, 4, 1, 6688, 1)) if mibBuilder.loadTexts: ipProduct.setStatus('current') if mibBuilder.loadTexts: ipProduct.setDescription('IP product line') height2HU = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1)) systemMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1)) alarmMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2)) shelfNum = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readonly") if mibBuilder.loadTexts: shelfNum.setStatus('current') if mibBuilder.loadTexts: shelfNum.setDescription('The number of shelf in current system') shelfTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2), ) if mibBuilder.loadTexts: shelfTable.setStatus('current') if mibBuilder.loadTexts: shelfTable.setDescription('Shelf table') shelfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1), ).setIndexNames((0, "XXX-MIB", "shelfName")) if mibBuilder.loadTexts: shelfEntry.setStatus('current') if mibBuilder.loadTexts: shelfEntry.setDescription('Shelf entry definition') shelfName = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("master", 1), ("slave1", 2), ("slave2", 3), ("slave3", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: shelfName.setStatus('current') if mibBuilder.loadTexts: shelfName.setDescription('Shelf name') psuA = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("nc", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: psuA.setStatus('current') if mibBuilder.loadTexts: psuA.setDescription('The status fan A of current shelf') psuB = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("nc", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: psuB.setStatus('current') if mibBuilder.loadTexts: psuB.setDescription('The status psu B of current shelf') volA = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("abnormal", 2), ("nc", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: volA.setStatus('current') if mibBuilder.loadTexts: volA.setDescription('The voltage status of psuA of current shelf') volB = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("abnormal", 2), ("nc", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: volB.setStatus('current') if mibBuilder.loadTexts: volB.setDescription('The voltage status of psuB of current shelf') fan = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("nc", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: fan.setStatus('current') if mibBuilder.loadTexts: fan.setDescription('The status fan A of current shelf') temperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 7), Integer32()).setUnits(' oC').setMaxAccess("readonly") if mibBuilder.loadTexts: temperature.setStatus('current') if mibBuilder.loadTexts: temperature.setDescription('The temperature status of current shelf') coCardNum = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readonly") if mibBuilder.loadTexts: coCardNum.setStatus('current') if mibBuilder.loadTexts: coCardNum.setDescription('The number of center card inserting of current shelf') rmtCardNum = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readonly") if mibBuilder.loadTexts: rmtCardNum.setStatus('current') if mibBuilder.loadTexts: rmtCardNum.setDescription('The number of remote card inserting of current shelf') slotObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3)) slotTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1), ) if mibBuilder.loadTexts: slotTable.setStatus('current') if mibBuilder.loadTexts: slotTable.setDescription('Sparse table containing one entry for each slot in exist chassis in the system, indexed by shelfIdx and slotIdx.') slotEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1), ).setIndexNames((0, "XXX-MIB", "shelfIdx"), (0, "XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: slotEntry.setStatus('current') if mibBuilder.loadTexts: slotEntry.setDescription("in this table ,user can find the converter module's type inserted in the system's slot.then you can get the detail information about the specified type in the cardObjects table") shelfIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("master", 1), ("slave1", 2), ("slave2", 3), ("slave3", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: shelfIdx.setStatus('current') if mibBuilder.loadTexts: shelfIdx.setDescription('Chassis index - 1 = master management module, 2-4 = slave management module') slotIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17))).clone(namedValues=NamedValues(("slot01", 1), ("slot02", 2), ("slot03", 3), ("slot04", 4), ("slot05", 5), ("slot06", 6), ("slot07", 7), ("slot08", 8), ("slot09", 9), ("slot10", 10), ("slot11", 11), ("slot12", 12), ("slot13", 13), ("slot14", 14), ("slot15", 15), ("slot16", 16), ("slot17", 17)))).setMaxAccess("readonly") if mibBuilder.loadTexts: slotIdx.setStatus('current') if mibBuilder.loadTexts: slotIdx.setDescription("chassis's slot,whitch is a index in this table") coCardType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 100, 101, 102))).clone(namedValues=NamedValues(("no-card", 0), ("ip113s", 1), ("ip113f", 2), ("mc-1g-e2o", 3), ("mc-1g-o2o", 4), ("mc-4-25g-oeo", 5), ("mc-ip175d", 6), ("mc-10g-oeo", 7), ("mc-10g-oee", 8), ("mc-FAN", 9), ("mc-10g-oeo-1r", 10), ("mc-2-5g", 11), ("mc-40g-oeo", 12), ("mc-2-5g-t", 13), ("mc-2-5g-f", 14), ("mc-2-5g-mux-t", 15), ("mc-2-5g-mux-f", 16), ("mc-1g-e2o-backup", 17), ("mc-e1-1sfp", 18), ("mc-e1-2sfp", 19), ("mc-100m-sfp", 20), ("mc-1g-o2o-backup", 21), ("mc-cwdm-4", 22), ("mc-cwdm-8", 23), ("mc-10g-oeo-2r", 24), ("mc-qca8334", 25), ("mc-e1t1", 26), ("fr600f-mm", 100), ("fr600f-ms", 101), ("not-support", 102)))).setMaxAccess("readonly") if mibBuilder.loadTexts: coCardType.setStatus('current') if mibBuilder.loadTexts: coCardType.setDescription("local card's type inserted in the chassis") coCardDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 4), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: coCardDesc.setStatus('current') if mibBuilder.loadTexts: coCardDesc.setDescription("local card's description") rmtCardType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 100, 101, 102))).clone(namedValues=NamedValues(("no-card", 0), ("ip113sr", 1), ("ip113f", 2), ("mc-1g-e2o", 3), ("mc-1g-o2o", 4), ("mc-4-25g-oeor", 5), ("mc-ip175dr", 6), ("mc-10g-oeor", 7), ("mc-10g-oeer", 8), ("mc-FANr", 9), ("mc-10g-oeo-1rr", 10), ("mc-2-5gr", 11), ("mc-40g-oeor", 12), ("mc-2-5g-tr", 13), ("mc-2-5g-fr", 14), ("mc-2-5g-mux-tr", 15), ("mc-2-5g-mux-fr", 16), ("mc-1g-e2o-backupr", 17), ("mc-e1-1sfpr", 18), ("mc-e1-2sfpr", 19), ("mc-100m-sfpr", 20), ("mc-1g-o2o-backupr", 21), ("mc-cwdmr-4", 22), ("mc-cwdmr-8", 23), ("mc-10g-oeo-2rr", 24), ("mc-qca8334r", 25), ("mc-e1t1r", 26), ("fr600f-mm", 100), ("fr600f-ms", 101), ("not-support", 102)))).setMaxAccess("readonly") if mibBuilder.loadTexts: rmtCardType.setStatus('current') if mibBuilder.loadTexts: rmtCardType.setDescription("remote card's type connect with the local converter") rmtCardDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 6), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: rmtCardDesc.setStatus('current') if mibBuilder.loadTexts: rmtCardDesc.setDescription("remote card's description") cardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4)) nmuObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1)) nmuConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1)) nmuType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(100, 101, 102))).clone(namedValues=NamedValues(("fr600f-mm", 100), ("fr600f-ms", 101), ("not-support", 102)))).setMaxAccess("readonly") if mibBuilder.loadTexts: nmuType.setStatus('current') if mibBuilder.loadTexts: nmuType.setDescription('The type of NMU (network management unit)') ipaddr = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 2), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipaddr.setStatus('current') if mibBuilder.loadTexts: ipaddr.setDescription('The ethernet IP address of NMU (network management unit)') subnet = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 3), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: subnet.setStatus('current') if mibBuilder.loadTexts: subnet.setDescription('The etherent mask address of NMU (network management unit)') gateway = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 4), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: gateway.setStatus('current') if mibBuilder.loadTexts: gateway.setDescription('The ethernet gateway address of NMU (network management unit)') sysContact = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 5), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sysContact.setStatus('current') if mibBuilder.loadTexts: sysContact.setDescription('Mirror of the system.sysContact.0') sysName = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 6), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sysName.setStatus('current') if mibBuilder.loadTexts: sysName.setDescription('Mirror of the system.sysName.0') sysLocation = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 7), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sysLocation.setStatus('current') if mibBuilder.loadTexts: sysLocation.setDescription('Mirror of the system.sysLocation.0') trapHost1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 8), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: trapHost1.setStatus('current') if mibBuilder.loadTexts: trapHost1.setDescription("The first host's IP address used to receive trap messages, when set 0 it simply delete this entry. This applies to the trap host 2~4 below as well.") trapHost2 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 9), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: trapHost2.setStatus('current') if mibBuilder.loadTexts: trapHost2.setDescription("The second host's IP address used to receive trap messages") trapHost3 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 10), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: trapHost3.setStatus('current') if mibBuilder.loadTexts: trapHost3.setDescription("The third host's IP address used to receive trap messages") trapHost4 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 11), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: trapHost4.setStatus('current') if mibBuilder.loadTexts: trapHost4.setDescription("The fourth host's IP address used to receive trap messages") mcCmObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2)) mcCmTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1), ) if mibBuilder.loadTexts: mcCmTable.setStatus('current') if mibBuilder.loadTexts: mcCmTable.setDescription('MC Configuration table') mcCmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mcCmEntry.setStatus('current') if mibBuilder.loadTexts: mcCmEntry.setDescription('MC Configuration entry definition') mcShelfIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("master", 1), ("slave1", 2), ("slave2", 3), ("slave3", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcShelfIdx.setStatus('current') if mibBuilder.loadTexts: mcShelfIdx.setDescription('Shelf index') mcCardIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))).clone(namedValues=NamedValues(("card01", 1), ("card02", 2), ("card03", 3), ("card04", 4), ("card05", 5), ("card06", 6), ("card07", 7), ("card08", 8), ("card09", 9), ("card10", 10), ("card11", 11), ("card12", 12), ("card13", 13), ("card14", 14), ("card15", 15), ("card16", 16)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcCardIdx.setStatus('current') if mibBuilder.loadTexts: mcCardIdx.setDescription('Card index') mcType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26))).clone(namedValues=NamedValues(("no-card", 0), ("ip113s", 1), ("ip113f", 2), ("mc-1g-e2o", 3), ("mc-1g-o2o", 4), ("mc-4-25g-oeo", 5), ("mc-ip175d", 6), ("mc-10g-oeo", 7), ("mc-10g-oee", 8), ("mc-FAN", 9), ("mc-10g-oeo-1r", 10), ("mc-2-5g", 11), ("mc-40g-oeo", 12), ("mc-2-5g-t", 13), ("mc-2-5g-f", 14), ("mc-2-5g-mux-t", 15), ("mc-2-5g-mux-f", 16), ("mc-1g-e2o-backup", 17), ("mc-e1-1sfp", 18), ("mc-e1-2sfp", 19), ("mc-100m-sfp", 20), ("mc-1g-o2o-backup", 21), ("mc-cwdm-4", 22), ("mc-cwdm-8", 23), ("mc-10g-oeo-2r", 24), ("mc-qca8334", 25), ("mc-e1t1", 26)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcType.setStatus('current') if mibBuilder.loadTexts: mcType.setDescription("Center card's type") mcTransceiverMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("bidi", 1), ("duplex-fiber", 2), ("sfp", 3), ("not-support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcTransceiverMode.setStatus('current') if mibBuilder.loadTexts: mcTransceiverMode.setDescription("Center card's optical transceiver mode. 100M card support bidi/duplex-fiber; 1G card support bidi/duplex-fiber/sfp. Once sfp is given, the following mcTransceiverDist should be ignored.") mcTransceiverDist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 120))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcTransceiverDist.setStatus('current') if mibBuilder.loadTexts: mcTransceiverDist.setDescription("Center card's optical transceiver distance, 1 means 550m for duplex-fiber mode in case of 1G card, otherwise it represents the real distance (unit of km).") mcPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("locked", 1), ("unlocked", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcPortState.setStatus('current') if mibBuilder.loadTexts: mcPortState.setDescription("Center card's port status, locked or unlocked") mcTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcTransmitMode.setStatus('current') if mibBuilder.loadTexts: mcTransmitMode.setDescription("Center card's transmmit mode") mcCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcCurWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcCurWorkMode.setDescription("Center card's current work mode") mcCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcCfgWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcCfgWorkMode.setDescription("Center card's configurable work mode") mcLFPCfg = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcLFPCfg.setStatus('current') if mibBuilder.loadTexts: mcLFPCfg.setDescription('Remote fault detect function, valid only on center MC card') mcUpStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 11), Gauge32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcUpStream.setStatus('current') if mibBuilder.loadTexts: mcUpStream.setDescription("Center card's up stream of MC") mcDownStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 12), Gauge32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcDownStream.setStatus('current') if mibBuilder.loadTexts: mcDownStream.setDescription("Center card's down stream of MC") mcTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcTxlink.setStatus('current') if mibBuilder.loadTexts: mcTxlink.setDescription("Center card's electrical port's link status") mcFxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcFxlink.setStatus('current') if mibBuilder.loadTexts: mcFxlink.setDescription("Center card's optical port's link status") mcHWLFP = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcHWLFP.setStatus('current') if mibBuilder.loadTexts: mcHWLFP.setDescription("Center card's HW LFP, not applicable for 1G card") mcHWTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcHWTransmitMode.setStatus('current') if mibBuilder.loadTexts: mcHWTransmitMode.setDescription("Center card's HW transmit mode, not applicable for 1G card") mcHWWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcHWWorkMode.setStatus('current') if mibBuilder.loadTexts: mcHWWorkMode.setDescription("Center card's HW work mode, not applicable for 1G card") mcHWRmtCtrlMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcHWRmtCtrlMode.setStatus('current') if mibBuilder.loadTexts: mcHWRmtCtrlMode.setDescription("Center card's HW remote control mode (only valid for local card). the disable mode indicates that all SET operations must be prohibited") mcNtwSfpExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcNtwSfpExist.setStatus('current') if mibBuilder.loadTexts: mcNtwSfpExist.setDescription("Center 1G card's Network SFP indication") mcAccSfpExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcAccSfpExist.setStatus('current') if mibBuilder.loadTexts: mcAccSfpExist.setDescription("Center 1G card's Access SFP indication, applicable only for O2O type") mcUtility = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("idle", 1), ("reset", 2), ("default", 3), ("set2hw", 4), ("not-support", 5)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcUtility.setStatus('current') if mibBuilder.loadTexts: mcUtility.setDescription('reset, default to factory, set to HW word, etc...') mcRmtDetect = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("no-remote", 0), ("yes", 1), ("not-support", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtDetect.setStatus('current') if mibBuilder.loadTexts: mcRmtDetect.setDescription('An identifier to indicate if there is a remote MC currently connecting to system or not') mcRmtType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26))).clone(namedValues=NamedValues(("no-card", 0), ("ip113sr", 1), ("ip113f", 2), ("mc-1g-e2or", 3), ("mc-1g-o2or", 4), ("mc-4-25g-oeor", 5), ("mc-ip175dr", 6), ("mc-10g-oeor", 7), ("mc-10g-oeer", 8), ("mc-FANr", 9), ("mc-10g-oeo-1rr", 10), ("mc-2-5gr", 11), ("mc-40g-oeor", 12), ("mc-2-5g-tr", 13), ("mc-2-5g-fr", 14), ("mc-2-5g-mux-tr", 15), ("mc-2-5g-mux-fr", 16), ("mc-1g-e2o-backupr", 17), ("mc-e1-1sfpr", 18), ("mc-e1-2sfpr", 19), ("mc-100m-sfpr", 20), ("mc-1g-o2o-backupr", 21), ("mc-cwdmr-4", 22), ("mc-cwdmr-8", 23), ("mc-10g-oeo-2rr", 24), ("mc-qca8334r", 25), ("mc-e1t1r", 26)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtType.setStatus('current') if mibBuilder.loadTexts: mcRmtType.setDescription("Remote card's type") mcRmtTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmtTransmitMode.setStatus('current') if mibBuilder.loadTexts: mcRmtTransmitMode.setDescription("Remote card's transmmit mode") mcRmtCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtCurWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcRmtCurWorkMode.setDescription("Remote card's current work mode") mcRmtCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmtCfgWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcRmtCfgWorkMode.setDescription("Remote card's configurable work mode") mcRmtLFP = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmtLFP.setStatus('current') if mibBuilder.loadTexts: mcRmtLFP.setDescription("Remote card's LFP lamp state") mcRmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtTxlink.setStatus('current') if mibBuilder.loadTexts: mcRmtTxlink.setDescription("Remote card's electrial port status") mcRmtHWLFP = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtHWLFP.setStatus('current') if mibBuilder.loadTexts: mcRmtHWLFP.setDescription("Remote card's HW LFP, not applicable for 1G card") mcRmtHWTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtHWTransmitMode.setStatus('current') if mibBuilder.loadTexts: mcRmtHWTransmitMode.setDescription("Remote card's HW transmit mode, not applicable for 1G card") mcRmtHWWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtHWWorkMode.setStatus('current') if mibBuilder.loadTexts: mcRmtHWWorkMode.setDescription("Remote card's HW work mode, not applicable for 1G card") mcRmtLoopback = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmtLoopback.setStatus('current') if mibBuilder.loadTexts: mcRmtLoopback.setDescription("Remote card's HW Loopback state") mcRmtPwrDown = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 33), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("powerdown", 1), ("normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtPwrDown.setStatus('current') if mibBuilder.loadTexts: mcRmtPwrDown.setDescription("Remote card's power down state") mcRmtAccSfpExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtAccSfpExist.setStatus('current') if mibBuilder.loadTexts: mcRmtAccSfpExist.setDescription("Remote 1G card's Access SFP indication, applicable only for O2O type") mcRmtUtility = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 35), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("no-card", 0), ("idle", 1), ("reset", 2), ("default", 3), ("set2hw", 4), ("not-support", 5)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmtUtility.setStatus('current') if mibBuilder.loadTexts: mcRmtUtility.setDescription("Rmote cards's reset, default to factory, set to HW word, etc...") mcCm1gSpecificObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2)) mcCm1gIpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1)) mcCm1gIpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1), ) if mibBuilder.loadTexts: mcCm1gIpTable.setStatus('current') if mibBuilder.loadTexts: mcCm1gIpTable.setDescription('MC 1G Ip address table') mcCm1gIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg")) if mibBuilder.loadTexts: mcCm1gIpEntry.setStatus('current') if mibBuilder.loadTexts: mcCm1gIpEntry.setDescription('MC 1G Ip address entry definition') mcLoOrRmtFg = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("remote", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcLoOrRmtFg.setStatus('current') if mibBuilder.loadTexts: mcLoOrRmtFg.setDescription('location index, local or remote') mcIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1, 1, 2), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcIpAddr.setStatus('current') if mibBuilder.loadTexts: mcIpAddr.setDescription('The Ip address of the node') mcCm1gSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2)) mcCm1gSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1), ) if mibBuilder.loadTexts: mcCm1gSfpTable.setStatus('current') if mibBuilder.loadTexts: mcCm1gSfpTable.setDescription('MC 1G SFP table') mcCm1gSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg")) if mibBuilder.loadTexts: mcCm1gSfpEntry.setStatus('current') if mibBuilder.loadTexts: mcCm1gSfpEntry.setDescription('MC 1G SFP entry definition') getSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: getSfpCmd.setStatus('current') if mibBuilder.loadTexts: getSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.') sfpCompliance = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpCompliance.setStatus('current') if mibBuilder.loadTexts: sfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored') sfpConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpConnector.setStatus('current') if mibBuilder.loadTexts: sfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported') sfpTransCode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpTransCode.setStatus('current') if mibBuilder.loadTexts: sfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit1: Copper Module bit2: MultiMode bit3: MultiMode others: unsupported') sfpSmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpSmLength.setStatus('current') if mibBuilder.loadTexts: sfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode') sfpMmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpMmLength.setStatus('current') if mibBuilder.loadTexts: sfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode') sfpCopperLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpCopperLength.setStatus('current') if mibBuilder.loadTexts: sfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45') sfpBrSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpBrSpeed.setStatus('current') if mibBuilder.loadTexts: sfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)') sfpWavelength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 9), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpWavelength.setStatus('current') if mibBuilder.loadTexts: sfpWavelength.setDescription('SFP laser wavelength (one word)') sfpTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 10), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpTemperature.setStatus('current') if mibBuilder.loadTexts: sfpTemperature.setDescription('SFP temperature (one type, signed)') sfpTranPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 11), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpTranPower.setStatus('current') if mibBuilder.loadTexts: sfpTranPower.setDescription('SFP tx power (one type, signed)') sfpRecvPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 12), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpRecvPower.setStatus('current') if mibBuilder.loadTexts: sfpRecvPower.setDescription('SFP rx power (one type, signed)') sfpVoltage = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 13), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpVoltage.setStatus('current') if mibBuilder.loadTexts: sfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)') mcCm1gAccSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3)) mcCm1gAccSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1), ) if mibBuilder.loadTexts: mcCm1gAccSfpTable.setStatus('current') if mibBuilder.loadTexts: mcCm1gAccSfpTable.setDescription('MC 1G Access SFP table') mcCm1gAccSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg")) if mibBuilder.loadTexts: mcCm1gAccSfpEntry.setStatus('current') if mibBuilder.loadTexts: mcCm1gAccSfpEntry.setDescription('MC 1G Access SFP entry definition') getAccSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: getAccSfpCmd.setStatus('current') if mibBuilder.loadTexts: getAccSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.') accsfpCompliance = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpCompliance.setStatus('current') if mibBuilder.loadTexts: accsfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored') accsfpConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpConnector.setStatus('current') if mibBuilder.loadTexts: accsfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported') accsfpTransCode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpTransCode.setStatus('current') if mibBuilder.loadTexts: accsfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit2: MultiMode bit3: MultiMode others: unsupported') accsfpSmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpSmLength.setStatus('current') if mibBuilder.loadTexts: accsfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode') accsfpMmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpMmLength.setStatus('current') if mibBuilder.loadTexts: accsfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode') accsfpCopperLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpCopperLength.setStatus('current') if mibBuilder.loadTexts: accsfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45') accsfpBrSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpBrSpeed.setStatus('current') if mibBuilder.loadTexts: accsfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)') accsfpWavelength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 9), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpWavelength.setStatus('current') if mibBuilder.loadTexts: accsfpWavelength.setDescription('SFP laser wavelength (one word)') accsfpTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 10), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpTemperature.setStatus('current') if mibBuilder.loadTexts: accsfpTemperature.setDescription('SFP temperature (one type, signed)') accsfpTranPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 11), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpTranPower.setStatus('current') if mibBuilder.loadTexts: accsfpTranPower.setDescription('SFP tx power (one type, signed)') accsfpRecvPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 12), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpRecvPower.setStatus('current') if mibBuilder.loadTexts: accsfpRecvPower.setDescription('SFP rx power (one type, signed)') accsfpVoltage = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 13), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: accsfpVoltage.setStatus('current') if mibBuilder.loadTexts: accsfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)') mcIP175DObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3)) mcIP175DCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1)) mcIP175DCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1, 1), ) if mibBuilder.loadTexts: mcIP175DCardTable.setStatus('current') if mibBuilder.loadTexts: mcIP175DCardTable.setDescription('MC IP175D Configuration table') mcIP175DCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mcIP175DCardEntry.setStatus('current') if mibBuilder.loadTexts: mcIP175DCardEntry.setDescription('MC Configuration entry definition') mcIP175DVlanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Normal", 1), ("mode1", 2), ("mode2", 3), ("not-support", 4)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcIP175DVlanMode.setStatus('current') if mibBuilder.loadTexts: mcIP175DVlanMode.setDescription("Center card's vlan mode") mcIP175DPortObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2)) mcIP175DPortTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1), ) if mibBuilder.loadTexts: mcIP175DPortTable.setStatus('current') if mibBuilder.loadTexts: mcIP175DPortTable.setDescription('MC IP175D Configuration table') mcIP175DPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcIP175DPortIdx")) if mibBuilder.loadTexts: mcIP175DPortEntry.setStatus('current') if mibBuilder.loadTexts: mcIP175DPortEntry.setDescription('MC Configuration entry definition') mcIP175DPortIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("port1", 1), ("port2", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcIP175DPortIdx.setStatus('current') if mibBuilder.loadTexts: mcIP175DPortIdx.setDescription('Port index') mcIP175DCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcIP175DCurWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcIP175DCurWorkMode.setDescription("Center card's port current work mode") mcIP175DCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcIP175DCfgWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcIP175DCfgWorkMode.setDescription("Center card's port configurable work mode") mcIP175DUpStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(64, 100000))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcIP175DUpStream.setStatus('current') if mibBuilder.loadTexts: mcIP175DUpStream.setDescription("Center card's port up stream of MC") mcIP175DDownStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 5), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(64, 100000))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcIP175DDownStream.setStatus('current') if mibBuilder.loadTexts: mcIP175DDownStream.setDescription("Center card's port down stream of MC") mcIP175DTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcIP175DTxlink.setStatus('current') if mibBuilder.loadTexts: mcIP175DTxlink.setDescription("Center card's port 1 electrical port's link status") mcIP175DRmtCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcIP175DRmtCurWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcIP175DRmtCurWorkMode.setDescription("Remote card's port 1 current work mode") mcIP175DRmtCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcIP175DRmtCfgWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcIP175DRmtCfgWorkMode.setDescription("Remote card's port1 configurable work mode") mcIP175DRmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcIP175DRmtTxlink.setStatus('current') if mibBuilder.loadTexts: mcIP175DRmtTxlink.setDescription("Remote card's port electrial port status") mc4_25G_OEOObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4)).setLabel("mc4-25G-OEOObjects") mc4_25G_OEOCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1)).setLabel("mc4-25G-OEOCardObjects") mc4_25G_OEOCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1), ).setLabel("mc4-25G-OEOCardTable") if mibBuilder.loadTexts: mc4_25G_OEOCardTable.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEOCardTable.setDescription('MC 4.25G OEO Configuration table') mc4_25G_OEOCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1), ).setLabel("mc4-25G-OEOCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc4_25G_OEOCardEntry.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEOCardEntry.setDescription('MC Configuration entry definition') mc4_25G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mc4-25G-OEOCurSpdMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc4_25G_OEOCurSpdMode.setStatus('mandatory') if mibBuilder.loadTexts: mc4_25G_OEOCurSpdMode.setDescription("Center card's config speed mode") mc4_25G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mc4-25G-OEOCfgSpdMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc4_25G_OEOCfgSpdMode.setStatus('mandatory') if mibBuilder.loadTexts: mc4_25G_OEOCfgSpdMode.setDescription("Center card's current speed mode") mc4_25G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOLoopback").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc4_25G_OEOLoopback.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEOLoopback.setDescription("card's Loopback state") mc4_25G_OEOWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOWorkMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc4_25G_OEOWorkMode.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEOWorkMode.setDescription("card's Work Mode") mc4_25G_OEONtwPD = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc4-25G-OEONtwPD").setMaxAccess("readonly") if mibBuilder.loadTexts: mc4_25G_OEONtwPD.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEONtwPD.setDescription("Center card's network side PD status") mc4_25G_OEOAccPD = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOAccPD").setMaxAccess("readonly") if mibBuilder.loadTexts: mc4_25G_OEOAccPD.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEOAccPD.setDescription("Center card's access side PD status") mc4_25G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mc4-25G-OEOHWSpdMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc4_25G_OEOHWSpdMode.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEOHWSpdMode.setDescription("Center card's HW speed mode") mc4_25G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOHWLoopback").setMaxAccess("readonly") if mibBuilder.loadTexts: mc4_25G_OEOHWLoopback.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEOHWLoopback.setDescription("card's HW Loopback state") mc4_25G_OEOHWWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOHWWorkMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc4_25G_OEOHWWorkMode.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEOHWWorkMode.setDescription("card's HW Work Mode") mc4_25G_OEO_Test_Lock = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Lock", 1), ("Unlock", 2)))).setLabel("mc4-25G-OEO-Test-Lock").setMaxAccess("readonly") if mibBuilder.loadTexts: mc4_25G_OEO_Test_Lock.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEO_Test_Lock.setDescription('test result lock or unlock') mc4_25G_OEO_Test_Error_Counter = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 11), Integer32()).setLabel("mc4-25G-OEO-Test-Error-Counter").setMaxAccess("readonly") if mibBuilder.loadTexts: mc4_25G_OEO_Test_Error_Counter.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEO_Test_Error_Counter.setDescription('test result error counter') mc4_25G_OEO_Test_Continue_Time = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 12), Integer32()).setLabel("mc4-25G-OEO-Test-Continue-Time").setMaxAccess("readonly") if mibBuilder.loadTexts: mc4_25G_OEO_Test_Continue_Time.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEO_Test_Continue_Time.setDescription('test continue time unit is second') mc4_25G_OEO_Test_Result = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Pass", 1), ("Error", 2)))).setLabel("mc4-25G-OEO-Test-Result").setMaxAccess("readonly") if mibBuilder.loadTexts: mc4_25G_OEO_Test_Result.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEO_Test_Result.setDescription('test result') mc4_25G_OEO_Start_Test = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Start", 1), ("Stop", 2)))).setLabel("mc4-25G-OEO-Start-Test").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc4_25G_OEO_Start_Test.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEO_Start_Test.setDescription('start test and stop test') mc4_25G_OEO_Get_Test_Rst = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("Get", 1)))).setLabel("mc4-25G-OEO-Get-Test-Rst").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc4_25G_OEO_Get_Test_Rst.setStatus('current') if mibBuilder.loadTexts: mc4_25G_OEO_Get_Test_Rst.setDescription('get test result') mcRmt4_25G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mcRmt4-25G-OEOCurSpdMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmt4_25G_OEOCurSpdMode.setStatus('mandatory') if mibBuilder.loadTexts: mcRmt4_25G_OEOCurSpdMode.setDescription("Center card's config speed mode") mcRmt4_25G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mcRmt4-25G-OEOCfgSpdMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmt4_25G_OEOCfgSpdMode.setStatus('mandatory') if mibBuilder.loadTexts: mcRmt4_25G_OEOCfgSpdMode.setDescription("Center card's current speed mode") mcRmt4_25G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOLoopback").setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmt4_25G_OEOLoopback.setStatus('current') if mibBuilder.loadTexts: mcRmt4_25G_OEOLoopback.setDescription("card's Loopback state") mcRmt4_25G_OEOWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOWorkMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmt4_25G_OEOWorkMode.setStatus('current') if mibBuilder.loadTexts: mcRmt4_25G_OEOWorkMode.setDescription("card's Work Mode") mcRmt4_25G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mcRmt4-25G-OEOHWSpdMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmt4_25G_OEOHWSpdMode.setStatus('current') if mibBuilder.loadTexts: mcRmt4_25G_OEOHWSpdMode.setDescription("Center card's HW speed mode") mcRmt4_25G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOHWLoopback").setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmt4_25G_OEOHWLoopback.setStatus('current') if mibBuilder.loadTexts: mcRmt4_25G_OEOHWLoopback.setDescription("card's HW Loopback state") mcRmt4_25G_OEOHWWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOHWWorkMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmt4_25G_OEOHWWorkMode.setStatus('current') if mibBuilder.loadTexts: mcRmt4_25G_OEOHWWorkMode.setDescription("card's HW Work Mode") mc10G_OEOObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5)).setLabel("mc10G-OEOObjects") mc10G_OEOCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1)).setLabel("mc10G-OEOCardObjects") mc10G_OEOCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1), ).setLabel("mc10G-OEOCardTable") if mibBuilder.loadTexts: mc10G_OEOCardTable.setStatus('current') if mibBuilder.loadTexts: mc10G_OEOCardTable.setDescription('MC 10G OEO Configuration table') mc10G_OEOCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1), ).setLabel("mc10G-OEOCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc10G_OEOCardEntry.setStatus('current') if mibBuilder.loadTexts: mc10G_OEOCardEntry.setDescription('MC Configuration entry definition') mc10G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mc10G-OEOCurSpdMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEOCurSpdMode.setStatus('mandatory') if mibBuilder.loadTexts: mc10G_OEOCurSpdMode.setDescription("Center card's current speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)") mc10G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mc10G-OEOCfgSpdMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10G_OEOCfgSpdMode.setStatus('mandatory') if mibBuilder.loadTexts: mc10G_OEOCfgSpdMode.setDescription("Center card's config speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)") mc10G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEOLoopback").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10G_OEOLoopback.setStatus('current') if mibBuilder.loadTexts: mc10G_OEOLoopback.setDescription("card's Loopback state") mc10G_OEOSFP1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEOSFP1").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEOSFP1.setStatus('current') if mibBuilder.loadTexts: mc10G_OEOSFP1.setDescription("Center card's SFP1 link status") mc10G_OEOSFP2 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEOSFP2").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEOSFP2.setStatus('current') if mibBuilder.loadTexts: mc10G_OEOSFP2.setDescription("Center card's SFP2 link status") mc10G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mc10G-OEOHWSpdMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEOHWSpdMode.setStatus('current') if mibBuilder.loadTexts: mc10G_OEOHWSpdMode.setDescription("Center card's HW speed mode") mc10G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEOHWLoopback").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEOHWLoopback.setStatus('current') if mibBuilder.loadTexts: mc10G_OEOHWLoopback.setDescription("card's HW Loopback state") mc10G_OEO_Test_Lock = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Lock", 1), ("Unlock", 2)))).setLabel("mc10G-OEO-Test-Lock").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO_Test_Lock.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO_Test_Lock.setDescription('test result lock or unlock') mc10G_OEO_Test_Error_Counter = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 9), Integer32()).setLabel("mc10G-OEO-Test-Error-Counter").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO_Test_Error_Counter.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO_Test_Error_Counter.setDescription('test result error counter') mc10G_OEO_Test_Continue_Time = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 10), Integer32()).setLabel("mc10G-OEO-Test-Continue-Time").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO_Test_Continue_Time.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO_Test_Continue_Time.setDescription('test continue time unit is second') mc10G_OEO_Test_Result = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Pass", 1), ("Error", 2)))).setLabel("mc10G-OEO-Test-Result").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO_Test_Result.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO_Test_Result.setDescription('test result') mc10G_OEO_Start_Test = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Start", 1), ("Stop", 2)))).setLabel("mc10G-OEO-Start-Test").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10G_OEO_Start_Test.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO_Start_Test.setDescription('start test and stop test') mc10G_OEO_Get_Test_Rst = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("Get", 1)))).setLabel("mc10G-OEO-Get-Test-Rst").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10G_OEO_Get_Test_Rst.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO_Get_Test_Rst.setDescription('get test result') mcRmt10G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mcRmt10G-OEOCurSpdMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmt10G_OEOCurSpdMode.setStatus('mandatory') if mibBuilder.loadTexts: mcRmt10G_OEOCurSpdMode.setDescription("Center card's current speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)") mcRmt10G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mcRmt10G-OEOCfgSpdMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmt10G_OEOCfgSpdMode.setStatus('mandatory') if mibBuilder.loadTexts: mcRmt10G_OEOCfgSpdMode.setDescription("Center card's config speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)") mcRmt10G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt10G-OEOLoopback").setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmt10G_OEOLoopback.setStatus('current') if mibBuilder.loadTexts: mcRmt10G_OEOLoopback.setDescription("card's Loopback state") mcRmt10G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mcRmt10G-OEOHWSpdMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmt10G_OEOHWSpdMode.setStatus('current') if mibBuilder.loadTexts: mcRmt10G_OEOHWSpdMode.setDescription("Center card's HW speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)") mcRmt10G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt10G-OEOHWLoopback").setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmt10G_OEOHWLoopback.setStatus('current') if mibBuilder.loadTexts: mcRmt10G_OEOHWLoopback.setDescription("card's HW Loopback state") mcRmt10G_OEOSFP1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mcRmt10G-OEOSFP1").setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmt10G_OEOSFP1.setStatus('current') if mibBuilder.loadTexts: mcRmt10G_OEOSFP1.setDescription("card's SFP1 link status") mc10G_OEO_accType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO-accType").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO_accType.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO_accType.setDescription('') mc10G_OEO_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO-ntwType").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO_ntwType.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO_ntwType.setDescription('') mcRmt10G_OEO_accType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mcRmt10G-OEO-accType").setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmt10G_OEO_accType.setStatus('current') if mibBuilder.loadTexts: mcRmt10G_OEO_accType.setDescription('') mcRmt10G_OEO_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mcRmt10G-OEO-ntwType").setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmt10G_OEO_ntwType.setStatus('current') if mibBuilder.loadTexts: mcRmt10G_OEO_ntwType.setDescription('') mc10G_OEEObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6)).setLabel("mc10G-OEEObjects") mc10G_OEECardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1)).setLabel("mc10G-OEECardObjects") mc10G_OEECardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1), ).setLabel("mc10G-OEECardTable") if mibBuilder.loadTexts: mc10G_OEECardTable.setStatus('current') if mibBuilder.loadTexts: mc10G_OEECardTable.setDescription('MC 10G OEE Configuration table') mc10G_OEECardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1), ).setLabel("mc10G-OEECardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc10G_OEECardEntry.setStatus('current') if mibBuilder.loadTexts: mc10G_OEECardEntry.setDescription('MC Configuration entry definition') mc10G_OEETxlink = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEETxlink").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEETxlink.setStatus('mandatory') if mibBuilder.loadTexts: mc10G_OEETxlink.setDescription("Center card's electrical port's link status") mc10G_OEEFxlink = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEEFxlink").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEEFxlink.setStatus('mandatory') if mibBuilder.loadTexts: mc10G_OEEFxlink.setDescription("Center card's optical port's link status") mc10G_OEECurSpd = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("m10G-Master", 7), ("m10G-Slave", 8), ("not-support", 9)))).setLabel("mc10G-OEECurSpd").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEECurSpd.setStatus('mandatory') if mibBuilder.loadTexts: mc10G_OEECurSpd.setDescription("Local card's current spd") mc10G_OEELoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEELoopMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10G_OEELoopMode.setStatus('current') if mibBuilder.loadTexts: mc10G_OEELoopMode.setDescription("card's Loopback state") mc10G_OEESpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 7, 8))).clone(namedValues=NamedValues(("auto", 1), ("m10G-Master", 7), ("m10G-Slave", 8)))).setLabel("mc10G-OEESpdMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10G_OEESpdMode.setStatus('current') if mibBuilder.loadTexts: mc10G_OEESpdMode.setDescription("card's speed mode") mc10G_OEEHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEEHWLoopback").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEEHWLoopback.setStatus('current') if mibBuilder.loadTexts: mc10G_OEEHWLoopback.setDescription("card's Loopback state") mc10G_OEE_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEE-ntwType").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEE_ntwType.setStatus('current') if mibBuilder.loadTexts: mc10G_OEE_ntwType.setDescription('') mc10G_OEE_checkResult = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 8), Integer32()).setLabel("mc10G-OEE-checkResult").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEE_checkResult.setStatus('current') if mibBuilder.loadTexts: mc10G_OEE_checkResult.setDescription('test result') mcFanObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7)) mcFanCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1)) mcFanCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1, 1), ) if mibBuilder.loadTexts: mcFanCardTable.setStatus('current') if mibBuilder.loadTexts: mcFanCardTable.setDescription('MC fan card table') mcFanCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mcFanCardEntry.setStatus('current') if mibBuilder.loadTexts: mcFanCardEntry.setDescription('MC Configuration entry definition') mcFanStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Normal", 1), ("Abnormal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcFanStatus.setStatus('mandatory') if mibBuilder.loadTexts: mcFanStatus.setDescription("Center card's fan status") mc40G_OEOObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8)).setLabel("mc40G-OEOObjects") mc40G_OEOCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1)).setLabel("mc40G-OEOCardObjects") mc40G_OEOCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1), ).setLabel("mc40G-OEOCardTable") if mibBuilder.loadTexts: mc40G_OEOCardTable.setStatus('current') if mibBuilder.loadTexts: mc40G_OEOCardTable.setDescription('MC 40G OEO Configuration table') mc40G_OEOCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1), ).setLabel("mc40G-OEOCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc40G_OEOCardEntry.setStatus('current') if mibBuilder.loadTexts: mc40G_OEOCardEntry.setDescription('MC Configuration entry definition') mc40G_OEOQsfp1Lane1_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane1-link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane1_link.setStatus('mandatory') if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane1_link.setDescription("Center card's Qsfp1 Lane1 link status") mc40G_OEOQsfp1Lane2_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane2-link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane2_link.setStatus('mandatory') if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane2_link.setDescription("Center card's Qsfp1 Lane2 link status") mc40G_OEOQsfp1Lane3_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane3-link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane3_link.setStatus('mandatory') if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane3_link.setDescription("Center card's Qsfp1 Lane3 link status") mc40G_OEOQsfp1Lane4_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane4-link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane4_link.setStatus('mandatory') if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane4_link.setDescription("Center card's Qsfp1 Lane4 link status") mc40G_OEOQsfp2Lane1_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane1-link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane1_link.setStatus('mandatory') if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane1_link.setDescription("Center card's Qsfp2 Lane1 link status") mc40G_OEOQsfp2Lane2_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane2-link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane2_link.setStatus('mandatory') if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane2_link.setDescription("Center card's Qsfp2 Lane2 link status") mc40G_OEOQsfp2Lane3_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane3-link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane3_link.setStatus('mandatory') if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane3_link.setDescription("Center card's Qsfp2 Lane3 link status") mc40G_OEOQsfp2Lane4_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane4-link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane4_link.setStatus('mandatory') if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane4_link.setDescription("Center card's Qsfp2 Lane4 link status") mc40G_OEOLane1LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane1LoopMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc40G_OEOLane1LoopMode.setStatus('current') if mibBuilder.loadTexts: mc40G_OEOLane1LoopMode.setDescription("card's Lane1 Loopback state") mc40G_OEOLane2LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane2LoopMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc40G_OEOLane2LoopMode.setStatus('current') if mibBuilder.loadTexts: mc40G_OEOLane2LoopMode.setDescription("card's Lane2 Loopback state") mc40G_OEOLane3LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane3LoopMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc40G_OEOLane3LoopMode.setStatus('current') if mibBuilder.loadTexts: mc40G_OEOLane3LoopMode.setDescription("card's Lane3 Loopback state") mc40G_OEOLane4LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane4LoopMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc40G_OEOLane4LoopMode.setStatus('current') if mibBuilder.loadTexts: mc40G_OEOLane4LoopMode.setDescription("card's Lane4 Loopback state") mc40G_OEOLoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("all", 1), ("line-side-enable", 2), ("host-side-enable", 3), ("disable", 4), ("not-support", 5)))).setLabel("mc40G-OEOLoopMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc40G_OEOLoopMode.setStatus('current') if mibBuilder.loadTexts: mc40G_OEOLoopMode.setDescription("card's Loopback state") mc40G_OEOSpeedMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("no-card", 0), ("mc40GSpeed-1", 1), ("mc40GSpeed-2", 2), ("mc40GSpeed-3", 3), ("mc40GSpeed-4", 4), ("mc40GSpeed-5", 5), ("mc40GSpeed-6", 6), ("mc40GSpeed-7", 7), ("mc40GSpeed-8", 8), ("mc40GSpeed-9", 9), ("mc40GSpeed-10", 10), ("mc40GSpeed-11", 11), ("mc40GSpeed-12", 12), ("mc40GSpeed-13", 13), ("not-support", 14)))).setLabel("mc40G-OEOSpeedMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc40G_OEOSpeedMode.setStatus('mandatory') if mibBuilder.loadTexts: mc40G_OEOSpeedMode.setDescription('speed1: 1X40G: 10G LAN(10312.5Mbps) speed2: 1X40G: OTU3(10754.60325Mbps) speed3: 1X40G: OTU3e2(11145.83875Mbps) speed4: 4X10G: 10G LAN(10312.5Mbps) speed5: 4X10G: CPRI(9830.4 Mbps) speed6: 4X10G: OC-192/STM-64(9953.28Mbps) speed7: 4X10G: OC-192/STM-64(10664.228571427Mbps) speed8: 4X10G: OC-192/STM-64(10709.225316455Mbps) speed9: 4X10G: 10G Ethernet(11049.107142857Mbps) speed10: 4X10G: 10GFibreChannel(10518.750Mbps) speed11: 4X10G: 10GFibreChannel(11270.089285714Mbps) speed12: 4X10G: 10GFibreChannel(11317.642405063Mbps) speed13: 4X10G: 10GInfiniband(10000.00Mbps)') mc40G_OEOHWLoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOHWLoopMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc40G_OEOHWLoopMode.setStatus('current') if mibBuilder.loadTexts: mc40G_OEOHWLoopMode.setDescription("card's HW Loopback state") mc40G_OEOHWSpeedMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("no-card", 0), ("mc40GSpeed-1", 1), ("mc40GSpeed-2", 2), ("mc40GSpeed-3", 3), ("mc40GSpeed-4", 4), ("mc40GSpeed-5", 5), ("mc40GSpeed-6", 6), ("mc40GSpeed-7", 7), ("mc40GSpeed-8", 8), ("mc40GSpeed-9", 9), ("mc40GSpeed-10", 10), ("mc40GSpeed-11", 11), ("mc40GSpeed-12", 12), ("not-support", 13)))).setLabel("mc40G-OEOHWSpeedMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc40G_OEOHWSpeedMode.setStatus('mandatory') if mibBuilder.loadTexts: mc40G_OEOHWSpeedMode.setDescription('speed1: 1X40G: 10G LAN(10312.5Mbps) speed2: 1X40G: OTU3(10754.60325Mbps) speed3: 1X40G: OTU3e2(11145.83875Mbps) speed4: 4X10G: 10G LAN(10312.5Mbps) speed5: 4X10G: CPRI(9830.4 Mbps) speed6: 4X10G: OC-192/STM-64(9953.28Mbps) speed7: 4X10G: OC-192/STM-64(10664.228571427Mbps) speed8: 4X10G: OC-192/STM-64(10709.225316455Mbps) speed9: 4X10G: 10G Ethernet(11049.107142857Mbps) speed10: 4X10G: 10GFibreChannel(10518.750Mbps) speed11: 4X10G: 10GFibreChannel(11270.089285714Mbps) speed12: 4X10G: 10GFibreChannel(11317.642405063Mbps)') mcQsfpSpecificObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9)) mcNtwQSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1)) mcNtwQSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1), ) if mibBuilder.loadTexts: mcNtwQSfpTable.setStatus('current') if mibBuilder.loadTexts: mcNtwQSfpTable.setDescription('MC Ntw QSFP table') mcNtwQSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mcNtwQSfpEntry.setStatus('current') if mibBuilder.loadTexts: mcNtwQSfpEntry.setDescription('MC Ntw QSFP entry definition') getNtwQSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: getNtwQSfpCmd.setStatus('current') if mibBuilder.loadTexts: getNtwQSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.') qsfpNtwConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpNtwConnector.setStatus('current') if mibBuilder.loadTexts: qsfpNtwConnector.setDescription('SFP connector type (one byte) 0x07: LC 0x0B: Optical Pigtail 0x0C: MPO 0x21: Copper Pigtail others: unsupported') qsfpNtwTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpNtwTemperature.setStatus('current') if mibBuilder.loadTexts: qsfpNtwTemperature.setDescription('SFP temperature (one type, signed)') qsfpNtwTxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpNtwTxPower1.setStatus('current') if mibBuilder.loadTexts: qsfpNtwTxPower1.setDescription('SFP tx power (one type, signed)') qsfpNtwTxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpNtwTxPower2.setStatus('current') if mibBuilder.loadTexts: qsfpNtwTxPower2.setDescription('SFP tx power (one type, signed)') qsfpNtwTxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpNtwTxPower3.setStatus('current') if mibBuilder.loadTexts: qsfpNtwTxPower3.setDescription('SFP tx power (one type, signed)') qsfpNtwTxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpNtwTxPower4.setStatus('current') if mibBuilder.loadTexts: qsfpNtwTxPower4.setDescription('SFP tx power (one type, signed)') qsfpNtwRxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpNtwRxPower1.setStatus('current') if mibBuilder.loadTexts: qsfpNtwRxPower1.setDescription('SFP rx power (one type, signed)') qsfpNtwRxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 9), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpNtwRxPower2.setStatus('current') if mibBuilder.loadTexts: qsfpNtwRxPower2.setDescription('SFP rx power (one type, signed)') qsfpNtwRxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 10), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpNtwRxPower3.setStatus('current') if mibBuilder.loadTexts: qsfpNtwRxPower3.setDescription('SFP rx power (one type, signed)') qsfpNtwRxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 11), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpNtwRxPower4.setStatus('current') if mibBuilder.loadTexts: qsfpNtwRxPower4.setDescription('SFP rx power (one type, signed)') mcAccQSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2)) mcAccQSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1), ) if mibBuilder.loadTexts: mcAccQSfpTable.setStatus('current') if mibBuilder.loadTexts: mcAccQSfpTable.setDescription('MC Acc QSFP table') mcAccQSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mcAccQSfpEntry.setStatus('current') if mibBuilder.loadTexts: mcAccQSfpEntry.setDescription('MC Acc QSFP entry definition') getAccQSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: getAccQSfpCmd.setStatus('current') if mibBuilder.loadTexts: getAccQSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.') qsfpAccConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpAccConnector.setStatus('current') if mibBuilder.loadTexts: qsfpAccConnector.setDescription('SFP connector type (one byte) 0x07: LC 0x0B: Optical Pigtail 0x0C: MPO 0x21: Copper Pigtail others: unsupported') qsfpAccTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpAccTemperature.setStatus('current') if mibBuilder.loadTexts: qsfpAccTemperature.setDescription('SFP temperature (one type, signed)') qsfpAccTxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpAccTxPower1.setStatus('current') if mibBuilder.loadTexts: qsfpAccTxPower1.setDescription('SFP tx power (one type, signed)') qsfpAccTxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpAccTxPower2.setStatus('current') if mibBuilder.loadTexts: qsfpAccTxPower2.setDescription('SFP tx power (one type, signed)') qsfpAccTxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpAccTxPower3.setStatus('current') if mibBuilder.loadTexts: qsfpAccTxPower3.setDescription('SFP tx power (one type, signed)') qsfpAccTxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpAccTxPower4.setStatus('current') if mibBuilder.loadTexts: qsfpAccTxPower4.setDescription('SFP tx power (one type, signed)') qsfpAccRxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpAccRxPower1.setStatus('current') if mibBuilder.loadTexts: qsfpAccRxPower1.setDescription('SFP rx power (one type, signed)') qsfpAccRxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 9), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpAccRxPower2.setStatus('current') if mibBuilder.loadTexts: qsfpAccRxPower2.setDescription('SFP rx power (one type, signed)') qsfpAccRxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 10), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpAccRxPower3.setStatus('current') if mibBuilder.loadTexts: qsfpAccRxPower3.setDescription('SFP rx power (one type, signed)') qsfpAccRxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 11), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: qsfpAccRxPower4.setStatus('current') if mibBuilder.loadTexts: qsfpAccRxPower4.setDescription('SFP rx power (one type, signed)') mc2_5GMCObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10)).setLabel("mc2-5GMCObjects") mc2_5GMCSFP3Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1)).setLabel("mc2-5GMCSFP3Objects") mc2_5Cm1gSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1), ).setLabel("mc2-5Cm1gSfpTable") if mibBuilder.loadTexts: mc2_5Cm1gSfpTable.setStatus('current') if mibBuilder.loadTexts: mc2_5Cm1gSfpTable.setDescription('MC 1G SFP table') mc2_5Cm1gSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1), ).setLabel("mc2-5Cm1gSfpEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg")) if mibBuilder.loadTexts: mc2_5Cm1gSfpEntry.setStatus('current') if mibBuilder.loadTexts: mc2_5Cm1gSfpEntry.setDescription('MC 1G SFP entry definition') mc2_5g_getSfpCmd = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setLabel("mc2-5g-getSfpCmd").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc2_5g_getSfpCmd.setStatus('current') if mibBuilder.loadTexts: mc2_5g_getSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.') mc2_5g_sfpCompliance = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 2), Integer32()).setLabel("mc2-5g-sfpCompliance").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpCompliance.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored') mc2_5g_sfpConnector = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 3), Integer32()).setLabel("mc2-5g-sfpConnector").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpConnector.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported') mc2_5g_sfpTransCode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 4), Integer32()).setLabel("mc2-5g-sfpTransCode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpTransCode.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit2: MultiMode bit3: MultiMode others: unsupported') mc2_5g_sfpSmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 5), Integer32()).setLabel("mc2-5g-sfpSmLength").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpSmLength.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode') mc2_5g_sfpMmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 6), Integer32()).setLabel("mc2-5g-sfpMmLength").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpMmLength.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode') mc2_5g_sfpCopperLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 7), Integer32()).setLabel("mc2-5g-sfpCopperLength").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpCopperLength.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45') mc2_5g_sfpBrSpeed = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 8), Integer32()).setLabel("mc2-5g-sfpBrSpeed").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpBrSpeed.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)') mc2_5g_sfpWavelength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 9), Integer32()).setLabel("mc2-5g-sfpWavelength").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpWavelength.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpWavelength.setDescription('SFP laser wavelength (one word)') mc2_5g_sfpTemperature = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 10), Integer32()).setLabel("mc2-5g-sfpTemperature").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpTemperature.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpTemperature.setDescription('SFP temperature (one type, signed)') mc2_5g_sfpTranPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 11), Integer32()).setLabel("mc2-5g-sfpTranPower").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpTranPower.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpTranPower.setDescription('SFP tx power (one type, signed)') mc2_5g_sfpRecvPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 12), Integer32()).setLabel("mc2-5g-sfpRecvPower").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpRecvPower.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpRecvPower.setDescription('SFP rx power (one type, signed)') mc2_5g_sfpVoltage = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 13), Integer32()).setLabel("mc2-5g-sfpVoltage").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5g_sfpVoltage.setStatus('current') if mibBuilder.loadTexts: mc2_5g_sfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)') mc2_5GMCCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2)).setLabel("mc2-5GMCCardObjects") mc2_5GMCCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1), ).setLabel("mc2-5GMCCardTable") if mibBuilder.loadTexts: mc2_5GMCCardTable.setStatus('current') if mibBuilder.loadTexts: mc2_5GMCCardTable.setDescription('MC 2-5GMC Configuration table') mc2_5GMCCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1), ).setLabel("mc2-5GMCCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc2_5GMCCardEntry.setStatus('current') if mibBuilder.loadTexts: mc2_5GMCCardEntry.setDescription('MC Configuration entry definition') mc2_5GMCSfp3Exist = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setLabel("mc2-5GMCSfp3Exist").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5GMCSfp3Exist.setStatus('current') if mibBuilder.loadTexts: mc2_5GMCSfp3Exist.setDescription("Center 1G card's SFP3 indication") mc2_5GMCPort1link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc2-5GMCPort1link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5GMCPort1link.setStatus('current') if mibBuilder.loadTexts: mc2_5GMCPort1link.setDescription("Center card's electrical port1's link status") mc2_5GMCPort2link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc2-5GMCPort2link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5GMCPort2link.setStatus('current') if mibBuilder.loadTexts: mc2_5GMCPort2link.setDescription("Center card's electrical port2's link status") mc2_5GMCPort3link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc2-5GMCPort3link").setMaxAccess("readonly") if mibBuilder.loadTexts: mc2_5GMCPort3link.setStatus('current') if mibBuilder.loadTexts: mc2_5GMCPort3link.setDescription("Center card's electrical port3's link status") mcE1Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11)) mcE1CardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1)) mcE1CardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1), ) if mibBuilder.loadTexts: mcE1CardTable.setStatus('current') if mibBuilder.loadTexts: mcE1CardTable.setDescription('MC E1 + Eth Configuration table') mcE1CardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mcE1CardEntry.setStatus('current') if mibBuilder.loadTexts: mcE1CardEntry.setDescription('MC Configuration entry definition') mcE1Txlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1Txlink.setStatus('mandatory') if mibBuilder.loadTexts: mcE1Txlink.setDescription("Center card's electrical port's link status") mcE1TxCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1TxCurWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcE1TxCurWorkMode.setDescription("Center card's current work mode") mcE1SFP1Link = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1SFP1Link.setStatus('mandatory') if mibBuilder.loadTexts: mcE1SFP1Link.setDescription("Center card's SFP1 port's link status") mcE1Port1LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1Port1LOS.setStatus('current') if mibBuilder.loadTexts: mcE1Port1LOS.setDescription("card's E1 Port1 Los state") mcE1Port1AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1Port1AIS.setStatus('current') if mibBuilder.loadTexts: mcE1Port1AIS.setDescription("card's E1 Port1 AIS state") mcE1Port1CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1Port1CV.setStatus('current') if mibBuilder.loadTexts: mcE1Port1CV.setDescription("card's E1 Port1 CV state") mcE1Port2LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1Port2LOS.setStatus('current') if mibBuilder.loadTexts: mcE1Port2LOS.setDescription("card's E1 Port2 Los state") mcE1Port2AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1Port2AIS.setStatus('current') if mibBuilder.loadTexts: mcE1Port2AIS.setDescription("card's E1 Port2 AIS state") mcE1Port2CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1Port2CV.setStatus('current') if mibBuilder.loadTexts: mcE1Port2CV.setDescription("card's E1 Port2 CV state") mcE1Port1Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcE1Port1Loop.setStatus('current') if mibBuilder.loadTexts: mcE1Port1Loop.setDescription("card's Port1 Loopback state") mcE1Port2Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcE1Port2Loop.setStatus('current') if mibBuilder.loadTexts: mcE1Port2Loop.setDescription("card's Port2 Loopback state") mcRmtE1Txlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtE1Txlink.setStatus('mandatory') if mibBuilder.loadTexts: mcRmtE1Txlink.setDescription("Remote card's electrical port's link status") mcRmtE1TxCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtE1TxCurWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcRmtE1TxCurWorkMode.setDescription("Remote card's current work mode") mcRmtE1SFP1Link = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtE1SFP1Link.setStatus('mandatory') if mibBuilder.loadTexts: mcRmtE1SFP1Link.setDescription("Remote card's SFP1 port's link status") mcRmtE1Port1LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtE1Port1LOS.setStatus('current') if mibBuilder.loadTexts: mcRmtE1Port1LOS.setDescription("Remote card's E1 Port1 Los state") mcRmtE1Port1AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtE1Port1AIS.setStatus('current') if mibBuilder.loadTexts: mcRmtE1Port1AIS.setDescription("Remote card's E1 Port1 AIS state") mcRmtE1Port1CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtE1Port1CV.setStatus('current') if mibBuilder.loadTexts: mcRmtE1Port1CV.setDescription("Remote card's E1 Port1 CV state") mcRmtE1Port2LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtE1Port2LOS.setStatus('current') if mibBuilder.loadTexts: mcRmtE1Port2LOS.setDescription("Remote card's E1 Port2 Los state") mcRmtE1Port2AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtE1Port2AIS.setStatus('current') if mibBuilder.loadTexts: mcRmtE1Port2AIS.setDescription("Remote card's E1 Port2 AIS state") mcRmtE1Port2CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRmtE1Port2CV.setStatus('current') if mibBuilder.loadTexts: mcRmtE1Port2CV.setDescription("Remote card's E1 Port2 CV state") mcRmtE1Port1Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmtE1Port1Loop.setStatus('current') if mibBuilder.loadTexts: mcRmtE1Port1Loop.setDescription("Remote card's Port1 Loopback state") mcRmtE1Port2Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcRmtE1Port2Loop.setStatus('current') if mibBuilder.loadTexts: mcRmtE1Port2Loop.setDescription("Remote card's Port2 Loopback state") mc1GE2OObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12)) mc1GE2OCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1)) mc1GE2OCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1), ) if mibBuilder.loadTexts: mc1GE2OCardTable.setStatus('current') if mibBuilder.loadTexts: mc1GE2OCardTable.setDescription('MC E2O Fiber backup Configuration table') mc1GE2OCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc1GE2OCardEntry.setStatus('current') if mibBuilder.loadTexts: mc1GE2OCardEntry.setDescription('MC Configuration entry definition') mc1GE2OPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2OPort1SFPlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GE2OPort1SFPlink.setDescription("Center card's port1 SFP's link status") mc1GE2OPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2OPort2SFPlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GE2OPort2SFPlink.setDescription("Center card's port2 SFP's link status") mc1GE2OTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2OTxlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GE2OTxlink.setDescription("Center card's electrical port's link status") mc1GE2OPortPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mc1GE2OPortPri.setStatus('current') if mibBuilder.loadTexts: mc1GE2OPortPri.setDescription("Center card's Port Pri state") mc1GE2OPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2OPort1SFPExist.setStatus('current') if mibBuilder.loadTexts: mc1GE2OPort1SFPExist.setDescription('E2O Port1 SFP indication') mc1GE2OPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2OPort2SFPExist.setStatus('current') if mibBuilder.loadTexts: mc1GE2OPort2SFPExist.setDescription('E2O Port2 SFP indication') mc1GE2OPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2OPortHWPri.setStatus('current') if mibBuilder.loadTexts: mc1GE2OPortHWPri.setDescription("Center card's Port Hardward Pri state") mc1GE2ORmtPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPlink.setDescription("Remote card's port1 SFP's link status") mc1GE2ORmtPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPlink.setDescription("Remote card's port2 SFP's link status") mc1GE2ORmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2ORmtTxlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GE2ORmtTxlink.setDescription("Remote card's electrical port's link status") mc1GE2ORmtPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPExist.setStatus('current') if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPExist.setDescription('E2O Port1 SFP indication') mc1GE2ORmtPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPExist.setStatus('current') if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPExist.setDescription('E2O Port2 SFP indication') mc1GE2ORmtPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GE2ORmtPortHWPri.setStatus('current') if mibBuilder.loadTexts: mc1GE2ORmtPortHWPri.setDescription("Remote card's Port Hardward Pri state") mc1GO2OObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13)) mc1GO2OCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1)) mc1GO2OCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1), ) if mibBuilder.loadTexts: mc1GO2OCardTable.setStatus('current') if mibBuilder.loadTexts: mc1GO2OCardTable.setDescription('MC O2O Fiber backup Configuration table') mc1GO2OCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc1GO2OCardEntry.setStatus('current') if mibBuilder.loadTexts: mc1GO2OCardEntry.setDescription('MC Configuration entry definition') mc1GO2OPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2OPort1SFPlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GO2OPort1SFPlink.setDescription("Center card's port1 SFP's link status") mc1GO2OPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2OPort2SFPlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GO2OPort2SFPlink.setDescription("Center card's port2 SFP's link status") mc1GO2OPort3SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2OPort3SFPlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GO2OPort3SFPlink.setDescription("Center card's port3 SFP's link status") mc1GO2OPortPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mc1GO2OPortPri.setStatus('current') if mibBuilder.loadTexts: mc1GO2OPortPri.setDescription("Center card's Port Pri state") mc1GO2OPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2OPort1SFPExist.setStatus('current') if mibBuilder.loadTexts: mc1GO2OPort1SFPExist.setDescription('O2O Port1 SFP indication') mc1GO2OPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2OPort2SFPExist.setStatus('current') if mibBuilder.loadTexts: mc1GO2OPort2SFPExist.setDescription('O2O Port2 SFP indication') mc1GO2OPort3SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2OPort3SFPExist.setStatus('current') if mibBuilder.loadTexts: mc1GO2OPort3SFPExist.setDescription('O2O Port3 SFP indication') mc1GO2OPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2OPortHWPri.setStatus('current') if mibBuilder.loadTexts: mc1GO2OPortHWPri.setDescription("Local card's Port Hardward Pri state") mc1GO2OPort3HWSpd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("M100", 1), ("M1000", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2OPort3HWSpd.setStatus('current') if mibBuilder.loadTexts: mc1GO2OPort3HWSpd.setDescription("Local card's Port3 Hardward Speed state") mc1GO2ORmtPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPlink.setDescription("Remote card's port1 SFP's link status") mc1GO2ORmtPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPlink.setDescription("Remote card's port2 SFP's link status") mc1GO2ORmtPort3SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPlink.setStatus('mandatory') if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPlink.setDescription("Remote card's port3 SFP's link status") mc1GO2ORmtPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPExist.setStatus('current') if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPExist.setDescription('O2O Port1 SFP indication') mc1GO2ORmtPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPExist.setStatus('current') if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPExist.setDescription('O2O Port2 SFP indication') mc1GO2ORmtPort3SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPExist.setStatus('current') if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPExist.setDescription("Remote card's SFP3 indication") mc1GO2ORmtPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2ORmtPortHWPri.setStatus('current') if mibBuilder.loadTexts: mc1GO2ORmtPortHWPri.setDescription("Remot card's Port Hardward Pri state") mc1GO2ORmtPort3HWSpd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("M100", 1), ("M1000", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc1GO2ORmtPort3HWSpd.setStatus('current') if mibBuilder.loadTexts: mc1GO2ORmtPort3HWSpd.setDescription("Remot card's Port3 Hardward Speed state") mc1GO2OSFP3Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2)) mc1GO2OSfp3Table = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1), ) if mibBuilder.loadTexts: mc1GO2OSfp3Table.setStatus('current') if mibBuilder.loadTexts: mc1GO2OSfp3Table.setDescription('MC 1G SFP table') mc1GO2OSfp3Entry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg")) if mibBuilder.loadTexts: mc1GO2OSfp3Entry.setStatus('current') if mibBuilder.loadTexts: mc1GO2OSfp3Entry.setDescription('MC 1G O2O SFP3 entry definition') mc1go2o_getSfpCmd = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setLabel("mc1go2o-getSfpCmd").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc1go2o_getSfpCmd.setStatus('current') if mibBuilder.loadTexts: mc1go2o_getSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.') mc1go2o_sfpCompliance = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 2), Integer32()).setLabel("mc1go2o-sfpCompliance").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpCompliance.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored') mc1go2o_sfpConnector = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 3), Integer32()).setLabel("mc1go2o-sfpConnector").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpConnector.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported') mc1go2o_sfpTransCode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 4), Integer32()).setLabel("mc1go2o-sfpTransCode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpTransCode.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit2: MultiMode bit3: MultiMode others: unsupported') mc1go2o_sfpSmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 5), Integer32()).setLabel("mc1go2o-sfpSmLength").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpSmLength.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode') mc1go2o_sfpMmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 6), Integer32()).setLabel("mc1go2o-sfpMmLength").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpMmLength.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode') mc1go2o_sfpCopperLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 7), Integer32()).setLabel("mc1go2o-sfpCopperLength").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpCopperLength.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45') mc1go2o_sfpBrSpeed = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 8), Integer32()).setLabel("mc1go2o-sfpBrSpeed").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpBrSpeed.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)') mc1go2o_sfpWavelength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 9), Integer32()).setLabel("mc1go2o-sfpWavelength").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpWavelength.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpWavelength.setDescription('SFP laser wavelength (one word)') mc1go2o_sfpTemperature = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 10), Integer32()).setLabel("mc1go2o-sfpTemperature").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpTemperature.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpTemperature.setDescription('SFP temperature (one type, signed)') mc1go2o_sfpTranPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 11), Integer32()).setLabel("mc1go2o-sfpTranPower").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpTranPower.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpTranPower.setDescription('SFP tx power (one type, signed)') mc1go2o_sfpRecvPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 12), Integer32()).setLabel("mc1go2o-sfpRecvPower").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpRecvPower.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpRecvPower.setDescription('SFP rx power (one type, signed)') mc1go2o_sfpVoltage = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 13), Integer32()).setLabel("mc1go2o-sfpVoltage").setMaxAccess("readonly") if mibBuilder.loadTexts: mc1go2o_sfpVoltage.setStatus('current') if mibBuilder.loadTexts: mc1go2o_sfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)') mc10GOEO1RObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14)) mc10GOEO1RCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1)) mc10GOEO1RCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1), ) if mibBuilder.loadTexts: mc10GOEO1RCardTable.setStatus('current') if mibBuilder.loadTexts: mc10GOEO1RCardTable.setDescription('MC 10G OEO 1R Configuration table') mc10GOEO1RCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc10GOEO1RCardEntry.setStatus('current') if mibBuilder.loadTexts: mc10GOEO1RCardEntry.setDescription('MC Configuration entry definition') mcAccXFP1WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunability.setStatus('mandatory') if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunability.setDescription("XFP1's wavelength tunability") mcAccXFP1WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunable.setStatus('mandatory') if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunable.setDescription("XFP1's wavelength tunable status") mcAccXFP1WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 3), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcAccXFP1WaveLength.setStatus('mandatory') if mibBuilder.loadTexts: mcAccXFP1WaveLength.setDescription("XFP1's wavelength") mcNtwXFP2WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunability.setStatus('mandatory') if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunability.setDescription("XFP2's wavelength tunability") mcNtwXFP2WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunable.setStatus('mandatory') if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunable.setDescription("XFP2's wavelength tunable status") mcNtwXFP2WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 6), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcNtwXFP2WaveLength.setStatus('mandatory') if mibBuilder.loadTexts: mcNtwXFP2WaveLength.setDescription("XFP2's wavelength") mcAccXFP1TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcAccXFP1TunableType.setStatus('mandatory') if mibBuilder.loadTexts: mcAccXFP1TunableType.setDescription("XFP1's wavelength tunable type") mcNtwXFP2TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcNtwXFP2TunableType.setStatus('mandatory') if mibBuilder.loadTexts: mcNtwXFP2TunableType.setDescription("XFP2's wavelength tunable type") mc10GOEO3RObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15)) mc10GOEO3RCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1)) mc10GOEO3RCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1), ) if mibBuilder.loadTexts: mc10GOEO3RCardTable.setStatus('current') if mibBuilder.loadTexts: mc10GOEO3RCardTable.setDescription('MC 10G OEO 3R tunable wavelength Configuration table') mc10GOEO3RCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc10GOEO3RCardEntry.setStatus('current') if mibBuilder.loadTexts: mc10GOEO3RCardEntry.setDescription('MC Configuration entry definition') accXFP1WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: accXFP1WaveLengthTunability.setStatus('mandatory') if mibBuilder.loadTexts: accXFP1WaveLengthTunability.setDescription("XFP1's wavelength tunability") accXFP1WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: accXFP1WaveLengthTunable.setStatus('mandatory') if mibBuilder.loadTexts: accXFP1WaveLengthTunable.setDescription("XFP1's wavelength tunable status") accXFP1WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 3), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: accXFP1WaveLength.setStatus('mandatory') if mibBuilder.loadTexts: accXFP1WaveLength.setDescription("XFP1's wavelength") ntwXFP2WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ntwXFP2WaveLengthTunability.setStatus('mandatory') if mibBuilder.loadTexts: ntwXFP2WaveLengthTunability.setDescription("XFP2's wavelength tunability") ntwXFP2WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ntwXFP2WaveLengthTunable.setStatus('mandatory') if mibBuilder.loadTexts: ntwXFP2WaveLengthTunable.setDescription("XFP2's wavelength tunable status") ntwXFP2WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 6), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ntwXFP2WaveLength.setStatus('mandatory') if mibBuilder.loadTexts: ntwXFP2WaveLength.setDescription("XFP2's wavelength") accXFP1TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: accXFP1TunableType.setStatus('mandatory') if mibBuilder.loadTexts: accXFP1TunableType.setDescription("XFP1's wavelength tunable type") ntwXFP2TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ntwXFP2TunableType.setStatus('mandatory') if mibBuilder.loadTexts: ntwXFP2TunableType.setDescription("XFP2's wavelength tunable type") mcCWDMObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16)) mcCWDMCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1)) mcCWDMCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1), ) if mibBuilder.loadTexts: mcCWDMCardTable.setStatus('current') if mibBuilder.loadTexts: mcCWDMCardTable.setDescription('MC CWDM table') mcCWDMCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mcCWDMCardEntry.setStatus('current') if mibBuilder.loadTexts: mcCWDMCardEntry.setDescription('MC Configuration entry definition') cwdmWavelengthCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cwdmWavelengthCount.setStatus('mandatory') if mibBuilder.loadTexts: cwdmWavelengthCount.setDescription('wavelength count') cwdmWavelength1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cwdmWavelength1.setStatus('mandatory') if mibBuilder.loadTexts: cwdmWavelength1.setDescription('CWDM Card wavelenth 1') cwdmWavelength2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 3), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cwdmWavelength2.setStatus('mandatory') if mibBuilder.loadTexts: cwdmWavelength2.setDescription('CWDM Card wavelenth 2') cwdmWavelength3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 4), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cwdmWavelength3.setStatus('mandatory') if mibBuilder.loadTexts: cwdmWavelength3.setDescription('CWDM Card wavelenth 3') cwdmWavelength4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 5), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cwdmWavelength4.setStatus('mandatory') if mibBuilder.loadTexts: cwdmWavelength4.setDescription('CWDM Card wavelenth 4') cwdmWavelength5 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 6), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cwdmWavelength5.setStatus('mandatory') if mibBuilder.loadTexts: cwdmWavelength5.setDescription('CWDM Card wavelenth 5') cwdmWavelength6 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 7), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cwdmWavelength6.setStatus('mandatory') if mibBuilder.loadTexts: cwdmWavelength6.setDescription('CWDM Card wavelenth 6') cwdmWavelength7 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 8), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cwdmWavelength7.setStatus('mandatory') if mibBuilder.loadTexts: cwdmWavelength7.setDescription('CWDM Card wavelenth 7') cwdmWavelength8 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 9), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cwdmWavelength8.setStatus('mandatory') if mibBuilder.loadTexts: cwdmWavelength8.setDescription('CWDM Card wavelenth 8') mc10G_OEO2RObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17)).setLabel("mc10G-OEO2RObjects") mc10G_OEO2RCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1)).setLabel("mc10G-OEO2RCardObjects") mc10G_OEO2RCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1), ).setLabel("mc10G-OEO2RCardTable") if mibBuilder.loadTexts: mc10G_OEO2RCardTable.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2RCardTable.setDescription('MC 10G OEO 2R Configuration table') mc10G_OEO2RCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1), ).setLabel("mc10G-OEO2RCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc10G_OEO2RCardEntry.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2RCardEntry.setDescription('MC Configuration entry definition') mc10G_OEO2RCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Speed85", 1), ("Speed103to117", 2), ("Speed995to113", 3), ("not-support", 4)))).setLabel("mc10G-OEO2RCurSpdMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2RCurSpdMode.setStatus('mandatory') if mibBuilder.loadTexts: mc10G_OEO2RCurSpdMode.setDescription("Center card's current speed mode 1: 8.5G 2: 10.3G-11.7G 3: 9.95G-11.3G ") mc10G_OEO2RCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Speed85", 1), ("Speed103to117", 2), ("Speed995to113", 3), ("not-support", 4)))).setLabel("mc10G-OEO2RCfgSpdMode").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10G_OEO2RCfgSpdMode.setStatus('mandatory') if mibBuilder.loadTexts: mc10G_OEO2RCfgSpdMode.setDescription("Center card's current speed mode 1: 8.5G 2: 10.3G-11.7G 3: 9.95G-11.3G ") mc10G_OEO2RSFP1Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP1Loopback").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10G_OEO2RSFP1Loopback.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2RSFP1Loopback.setDescription("card's SFP1 Loopback state") mc10G_OEO2RSFP2Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP2Loopback").setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10G_OEO2RSFP2Loopback.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2RSFP2Loopback.setDescription("card's SFP2 Loopback state") mc10G_OEO2RSFP1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP1").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2RSFP1.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2RSFP1.setDescription("Center card's SFP1 link status") mc10G_OEO2RSFP2 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP2").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2RSFP2.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2RSFP2.setDescription("Center card's SFP2 link status") mc10G_OEO2RHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Speed85", 1), ("Speed103to117", 2), ("Speed995to113", 3), ("not-support", 4)))).setLabel("mc10G-OEO2RHWSpdMode").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2RHWSpdMode.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2RHWSpdMode.setDescription("Center card's current speed mode 1: 8.5G 2: 10.3G-11.7G 3: 9.95G-11.3G ") mc10G_OEO2RHWSFP1Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RHWSFP1Loopback").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2RHWSFP1Loopback.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2RHWSFP1Loopback.setDescription("card's HW Loopback state") mc10G_OEO2RHWSFP2Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RHWSFP2Loopback").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2RHWSFP2Loopback.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2RHWSFP2Loopback.setDescription("card's HW Loopback state") mc10G_OEO2RVersion = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 10), DisplayString()).setLabel("mc10G-OEO2RVersion").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2RVersion.setStatus('mandatory') if mibBuilder.loadTexts: mc10G_OEO2RVersion.setDescription('MC version') mc10GXFP1WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunability.setStatus('mandatory') if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunability.setDescription("XFP1's wavelength tunability") mc10GXFP1WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunable.setStatus('mandatory') if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunable.setDescription("XFP1's wavelength tunable status") mc10GXFP1WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 13), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10GXFP1WaveLength.setStatus('mandatory') if mibBuilder.loadTexts: mc10GXFP1WaveLength.setDescription("XFP1's wavelength") mc10GXFP2WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunability.setStatus('mandatory') if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunability.setDescription("XFP2's wavelength tunability") mc10GXFP2WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunable.setStatus('mandatory') if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunable.setDescription("XFP2's wavelength tunable status") mc10GXFP2WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 16), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: mc10GXFP2WaveLength.setStatus('mandatory') if mibBuilder.loadTexts: mc10GXFP2WaveLength.setDescription("XFP2's wavelength") mc10G_OEO2R_accType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO2R-accType").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2R_accType.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2R_accType.setDescription('') mc10G_OEO2R_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO2R-ntwType").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2R_ntwType.setStatus('current') if mibBuilder.loadTexts: mc10G_OEO2R_ntwType.setDescription('') mc10G_OEO2R_accTunableType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setLabel("mc10G-OEO2R-accTunableType").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2R_accTunableType.setStatus('mandatory') if mibBuilder.loadTexts: mc10G_OEO2R_accTunableType.setDescription("XFP1's wavelength tunable type") mc10G_OEO2R_ntwTunableType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setLabel("mc10G-OEO2R-ntwTunableType").setMaxAccess("readonly") if mibBuilder.loadTexts: mc10G_OEO2R_ntwTunableType.setStatus('mandatory') if mibBuilder.loadTexts: mc10G_OEO2R_ntwTunableType.setDescription("XFP2's wavelength tunable type") mcQCA8334Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18)) mcQCA8334CardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1)) mcQCA8334CardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1, 1), ) if mibBuilder.loadTexts: mcQCA8334CardTable.setStatus('current') if mibBuilder.loadTexts: mcQCA8334CardTable.setDescription('MC QCA8334 Configuration table') mcQCA8334CardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mcQCA8334CardEntry.setStatus('current') if mibBuilder.loadTexts: mcQCA8334CardEntry.setDescription('MC Configuration entry definition') mcQCA8334VlanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Normal", 1), ("mode1", 2), ("mode2", 3), ("not-support", 4)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcQCA8334VlanMode.setStatus('current') if mibBuilder.loadTexts: mcQCA8334VlanMode.setDescription("Center card's vlan mode") mcQCA8334PortObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2)) mcQCA8334PortTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1), ) if mibBuilder.loadTexts: mcQCA8334PortTable.setStatus('current') if mibBuilder.loadTexts: mcQCA8334PortTable.setDescription('MC QCA8334 Configuration table') mcQCA8334PortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcQCA8334PortIdx")) if mibBuilder.loadTexts: mcQCA8334PortEntry.setStatus('current') if mibBuilder.loadTexts: mcQCA8334PortEntry.setDescription('MC Configuration entry definition') mcQCA8334PortIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("port1", 1), ("port2", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcQCA8334PortIdx.setStatus('current') if mibBuilder.loadTexts: mcQCA8334PortIdx.setDescription('Port index') mcQCA8334CurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcQCA8334CurWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcQCA8334CurWorkMode.setDescription("Center card's port current work mode") mcQCA8334CfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcQCA8334CfgWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcQCA8334CfgWorkMode.setDescription("Center card's port configurable work mode") mcQCA8334UpStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(32, 1000000))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcQCA8334UpStream.setStatus('current') if mibBuilder.loadTexts: mcQCA8334UpStream.setDescription("Center card's port up stream of MC") mcQCA8334DownStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 5), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(32, 1000000))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcQCA8334DownStream.setStatus('current') if mibBuilder.loadTexts: mcQCA8334DownStream.setDescription("Center card's port down stream of MC") mcQCA8334Txlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcQCA8334Txlink.setStatus('current') if mibBuilder.loadTexts: mcQCA8334Txlink.setDescription("Center card's port 1 electrical port's link status") mcQCA8334RmtCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcQCA8334RmtCurWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcQCA8334RmtCurWorkMode.setDescription("Remote card's port 1 current work mode") mcQCA8334RmtCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcQCA8334RmtCfgWorkMode.setStatus('mandatory') if mibBuilder.loadTexts: mcQCA8334RmtCfgWorkMode.setDescription("Remote card's port1 configurable work mode") mcQCA8334RmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcQCA8334RmtTxlink.setStatus('current') if mibBuilder.loadTexts: mcQCA8334RmtTxlink.setDescription("Remote card's port electrial port status") mcE1T1Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19)) mcE1T1CardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1)) mcE1T1CardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1), ) if mibBuilder.loadTexts: mcE1T1CardTable.setStatus('current') if mibBuilder.loadTexts: mcE1T1CardTable.setDescription('MC E1T1 Configuration table') mcE1T1CardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mcE1T1CardEntry.setStatus('current') if mibBuilder.loadTexts: mcE1T1CardEntry.setDescription('MC Configuration entry definition') mcE1T1Type = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("E1", 1), ("T1", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1T1Type.setStatus('mandatory') if mibBuilder.loadTexts: mcE1T1Type.setDescription("Center card's current type") mcE1T1FLink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Up", 1), ("Down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1T1FLink.setStatus('mandatory') if mibBuilder.loadTexts: mcE1T1FLink.setDescription("Center card's current link") mcE1T1FLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1T1FLossAlarm.setStatus('current') if mibBuilder.loadTexts: mcE1T1FLossAlarm.setDescription('') mcE1T1TLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1T1TLossAlarm.setStatus('current') if mibBuilder.loadTexts: mcE1T1TLossAlarm.setDescription('') mcE1T1AISAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1T1AISAlarm.setStatus('current') if mibBuilder.loadTexts: mcE1T1AISAlarm.setDescription('') mcE1T1TLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcE1T1TLoop.setStatus('current') if mibBuilder.loadTexts: mcE1T1TLoop.setDescription('Tx Loopback state') mcE1T1FLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcE1T1FLoop.setStatus('current') if mibBuilder.loadTexts: mcE1T1FLoop.setDescription('Fx Loopback state') mcE1T1CodeType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("E1-HDB3-Or-T1-B8ZS", 1), ("AMI", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcE1T1CodeType.setStatus('current') if mibBuilder.loadTexts: mcE1T1CodeType.setDescription('coding type') mcE1T1Version = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 9), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1T1Version.setStatus('mandatory') if mibBuilder.loadTexts: mcE1T1Version.setDescription('MC version') mcE1T1RmtFLink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Up", 1), ("Down", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1T1RmtFLink.setStatus('mandatory') if mibBuilder.loadTexts: mcE1T1RmtFLink.setDescription("Center card's current link") mcE1T1RmtFLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1T1RmtFLossAlarm.setStatus('current') if mibBuilder.loadTexts: mcE1T1RmtFLossAlarm.setDescription('') mcE1T1RmtTLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1T1RmtTLossAlarm.setStatus('current') if mibBuilder.loadTexts: mcE1T1RmtTLossAlarm.setDescription('') mcE1T1RmtAISAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: mcE1T1RmtAISAlarm.setStatus('current') if mibBuilder.loadTexts: mcE1T1RmtAISAlarm.setDescription('') mcE1T1RmtTLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcE1T1RmtTLoop.setStatus('current') if mibBuilder.loadTexts: mcE1T1RmtTLoop.setDescription('Tx Loopback state') mcE1T1RmtFLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcE1T1RmtFLoop.setStatus('current') if mibBuilder.loadTexts: mcE1T1RmtFLoop.setDescription('Fx Loopback state') mcE1T1RmtCodeType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("E1-HDB3-Or-T1-B8ZS", 1), ("AMI", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcE1T1RmtCodeType.setStatus('current') if mibBuilder.loadTexts: mcE1T1RmtCodeType.setDescription('coding type') mc10GOEEXFPTunableObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20)) mc10GOEEXFPTunableCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1)) mc10GOEEXFPTunableCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1), ) if mibBuilder.loadTexts: mc10GOEEXFPTunableCardTable.setStatus('current') if mibBuilder.loadTexts: mc10GOEEXFPTunableCardTable.setDescription('MC 10G OEE tunable wavelength Configuration table') mc10GOEEXFPTunableCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mc10GOEEXFPTunableCardEntry.setStatus('current') if mibBuilder.loadTexts: mc10GOEEXFPTunableCardEntry.setDescription('MC Configuration entry definition') xfpWaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: xfpWaveLengthTunability.setStatus('mandatory') if mibBuilder.loadTexts: xfpWaveLengthTunability.setDescription("XFP's wavelength tunability") xfpWaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: xfpWaveLengthTunable.setStatus('mandatory') if mibBuilder.loadTexts: xfpWaveLengthTunable.setDescription("XFP's wavelength tunable status") xfpWaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 3), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: xfpWaveLength.setStatus('mandatory') if mibBuilder.loadTexts: xfpWaveLength.setDescription("XFP's wavelength") xfpTunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: xfpTunableType.setStatus('mandatory') if mibBuilder.loadTexts: xfpTunableType.setDescription("XFP's wavelength tunable type") mcPmObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3)) mcPmTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1), ) if mibBuilder.loadTexts: mcPmTable.setStatus('current') if mibBuilder.loadTexts: mcPmTable.setDescription('MC Performance table') mcPmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx")) if mibBuilder.loadTexts: mcPmEntry.setStatus('current') if mibBuilder.loadTexts: mcPmEntry.setDescription('MC Performance entry definition') mcRxByteHi = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 1), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRxByteHi.setStatus('current') if mibBuilder.loadTexts: mcRxByteHi.setDescription('The total number of reveive bytes (high)') mcRxByteLo = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 2), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mcRxByteLo.setStatus('current') if mibBuilder.loadTexts: mcRxByteLo.setDescription('The total number of reveive bytes (low)') mcTxByteHi = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mcTxByteHi.setStatus('current') if mibBuilder.loadTexts: mcTxByteHi.setDescription('The total number of transmit bytes (high)') mcTxByteLo = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mcTxByteLo.setStatus('current') if mibBuilder.loadTexts: mcTxByteLo.setDescription('The total number of transmit bytes (low)') mcPmRest = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("idle", 1), ("reset", 2), ("not-support", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mcPmRest.setStatus('current') if mibBuilder.loadTexts: mcPmRest.setDescription('reset counter') shelf_Detected = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 1)).setLabel("shelf-Detected").setObjects(("XXX-MIB", "shelfIdx")) if mibBuilder.loadTexts: shelf_Detected.setStatus('current') if mibBuilder.loadTexts: shelf_Detected.setDescription('A slave shelf is detected (1~19)') shelf_Lost = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 2)).setLabel("shelf-Lost").setObjects(("XXX-MIB", "shelfIdx")) if mibBuilder.loadTexts: shelf_Lost.setStatus('current') if mibBuilder.loadTexts: shelf_Lost.setDescription('A shelf is lost') shelf_psuA_On = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 3)).setLabel("shelf-psuA-On").setObjects(("XXX-MIB", "shelfIdx")) if mibBuilder.loadTexts: shelf_psuA_On.setStatus('current') if mibBuilder.loadTexts: shelf_psuA_On.setDescription('PSU A is detected') shelf_psuA_Off = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 4)).setLabel("shelf-psuA-Off").setObjects(("XXX-MIB", "shelfIdx")) if mibBuilder.loadTexts: shelf_psuA_Off.setStatus('current') if mibBuilder.loadTexts: shelf_psuA_Off.setDescription('PSU A is lost') shelf_psuB_On = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 5)).setLabel("shelf-psuB-On").setObjects(("XXX-MIB", "shelfIdx")) if mibBuilder.loadTexts: shelf_psuB_On.setStatus('current') if mibBuilder.loadTexts: shelf_psuB_On.setDescription('PSU B is detected') shelf_psuB_Off = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 6)).setLabel("shelf-psuB-Off").setObjects(("XXX-MIB", "shelfIdx")) if mibBuilder.loadTexts: shelf_psuB_Off.setStatus('current') if mibBuilder.loadTexts: shelf_psuB_Off.setDescription('PSU B is lost') shelf_fan_On = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 7)).setLabel("shelf-fan-On").setObjects(("XXX-MIB", "shelfIdx")) if mibBuilder.loadTexts: shelf_fan_On.setStatus('current') if mibBuilder.loadTexts: shelf_fan_On.setDescription('Fan A is detected') shelf_fan_Off = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 8)).setLabel("shelf-fan-Off").setObjects(("XXX-MIB", "shelfIdx")) if mibBuilder.loadTexts: shelf_fan_Off.setStatus('current') if mibBuilder.loadTexts: shelf_fan_Off.setDescription('Fan A is lost') card_Detected = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 20)).setLabel("card-Detected").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_Detected.setStatus('current') if mibBuilder.loadTexts: card_Detected.setDescription('A card is detected (20~29)') card_Lost = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 21)).setLabel("card-Lost").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_Lost.setStatus('current') if mibBuilder.loadTexts: card_Lost.setDescription('A card is lost') card_MC_Co_Tx_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 30)).setLabel("card-MC-Co-Tx-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Tx_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Tx_Up.setDescription('The tx link of mc in center side is up (above 30)') card_MC_Co_Tx_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 31)).setLabel("card-MC-Co-Tx-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Tx_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Tx_Down.setDescription('The tx link of mc in center side is broken') card_MC_Co_Fx_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 32)).setLabel("card-MC-Co-Fx-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Fx_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Fx_Up.setDescription('The fx link of mc in center side is up') card_MC_Co_Fx_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 33)).setLabel("card-MC-Co-Fx-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Fx_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Fx_Down.setDescription('The fx link of mc in center side is broken') card_MC_Rmt_Tx_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 34)).setLabel("card-MC-Rmt-Tx-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up.setDescription('The tx link of mc in customer side is up') card_MC_Rmt_Tx_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 35)).setLabel("card-MC-Rmt-Tx-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down.setDescription('The tx link of mc in customer side is broken') card_MC_Rmt_PwrDown = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 36)).setLabel("card-MC-Rmt-PwrDown").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_PwrDown.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_PwrDown.setDescription('Remote mc power down detected') card_MC_Co_Ntw_SFP_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 37)).setLabel("card-MC-Co-Ntw-SFP-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Inserted.setDescription('Local network port SFP inserted') card_MC_Co_Ntw_SFP_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 38)).setLabel("card-MC-Co-Ntw-SFP-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Removed.setDescription('Local network port SFP removed') card_MC_Co_Acc_SFP_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 39)).setLabel("card-MC-Co-Acc-SFP-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Inserted.setDescription('Local access port SFP inserted') card_MC_Co_Acc_SFP_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 40)).setLabel("card-MC-Co-Acc-SFP-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Removed.setDescription('Local access port SFP removed') card_MC_Rmt_Acc_SFP_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 41)).setLabel("card-MC-Rmt-Acc-SFP-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Inserted.setDescription('Remote access port SFP inserted') card_MC_Rmt_Acc_SFP_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 42)).setLabel("card-MC-Rmt-Acc-SFP-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Removed.setDescription('Remote access port SFP removed') card_MC_Co_Tx_Up1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 43)).setLabel("card-MC-Co-Tx-Up1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Tx_Up1.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Tx_Up1.setDescription('The tx1 link of mc in center side is up') card_MC_Co_Tx_Down1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 44)).setLabel("card-MC-Co-Tx-Down1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Tx_Down1.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Tx_Down1.setDescription('The tx1 link of mc in center side is broken') card_MC_Co_Tx_Up2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 45)).setLabel("card-MC-Co-Tx-Up2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Tx_Up2.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Tx_Up2.setDescription('The tx2 link of mc in center side is up') card_MC_Co_Tx_Down2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 46)).setLabel("card-MC-Co-Tx-Down2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Tx_Down2.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Tx_Down2.setDescription('The tx2 link of mc in center side is broken') card_MC_Rmt_Tx_Up1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 47)).setLabel("card-MC-Rmt-Tx-Up1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up1.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up1.setDescription('The tx1 link of mc in customer side is up') card_MC_Rmt_Tx_Down1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 48)).setLabel("card-MC-Rmt-Tx-Down1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down1.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down1.setDescription('The tx1 link of mc in customer side is broken') card_MC_Rmt_Tx_Up2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 49)).setLabel("card-MC-Rmt-Tx-Up2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up2.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up2.setDescription('The tx2 link of mc in customer side is up') card_MC_Rmt_Tx_Down2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 50)).setLabel("card-MC-Rmt-Tx-Down2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down2.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down2.setDescription('The tx2 link of mc in customer side is broken') card_MC_Co_SFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 51)).setLabel("card-MC-Co-SFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP1_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP1_Inserted.setDescription('Local SFP1 inserted') card_MC_Co_SFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 52)).setLabel("card-MC-Co-SFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP1_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP1_Removed.setDescription('Local SFP1 removed') card_MC_Co_SFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 53)).setLabel("card-MC-Co-SFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP2_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP2_Inserted.setDescription('Local SFP2 inserted') card_MC_Co_SFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 54)).setLabel("card-MC-Co-SFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP2_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP2_Removed.setDescription('Local SFP2 removed') card_MC_Co_SFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 55)).setLabel("card-MC-Co-SFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP1_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP1_Up.setDescription('The SFP1 link of mc in center side is up') card_MC_Co_SFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 56)).setLabel("card-MC-Co-SFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP1_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP1_Down.setDescription('The SFP1 link of mc in center side is broken') card_MC_Co_SFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 57)).setLabel("card-MC-Co-SFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP2_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP2_Up.setDescription('The SFP2 link of mc in center side is up') card_MC_Co_SFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 58)).setLabel("card-MC-Co-SFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP2_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP2_Down.setDescription('The SFP2 link of mc in center side is broken') card_MC_Rmt_SFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 59)).setLabel("card-MC-Rmt-SFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Inserted.setDescription('Remote SFP1 inserted') card_MC_Rmt_SFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 60)).setLabel("card-MC-Rmt-SFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Removed.setDescription('Remote SFP1 removed') card_MC_Rmt_SFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 61)).setLabel("card-MC-Rmt-SFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Up.setDescription('The SFP1 link of mc in customer side is up') card_MC_Rmt_SFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 62)).setLabel("card-MC-Rmt-SFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Down.setDescription('The SFP1 link of mc in customer side is broken') card_MC_Co_SFPSFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 63)).setLabel("card-MC-Co-SFPSFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Inserted.setDescription('Local SFP+1 inserted') card_MC_Co_SFPSFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 64)).setLabel("card-MC-Co-SFPSFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Removed.setDescription('Local SFP+1 removed') card_MC_Co_SFPSFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 65)).setLabel("card-MC-Co-SFPSFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Inserted.setDescription('Local SFPSFP2 inserted') card_MC_Co_SFPSFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 66)).setLabel("card-MC-Co-SFPSFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Removed.setDescription('Local SFP+2 removed') card_MC_Rmt_SFPSFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 67)).setLabel("card-MC-Rmt-SFPSFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Inserted.setDescription('Remote SFP+1 inserted') card_MC_Rmt_SFPSFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 68)).setLabel("card-MC-Rmt-SFPSFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Removed.setDescription('Remote SFP+1 removed') card_MC_Co_XFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 69)).setLabel("card-MC-Co-XFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_XFP1_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_XFP1_Inserted.setDescription('Local XFP+1 inserted') card_MC_Co_XFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 70)).setLabel("card-MC-Co-XFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_XFP1_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_XFP1_Removed.setDescription('Local XFP+1 removed') card_MC_Co_XFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 71)).setLabel("card-MC-Co-XFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_XFP2_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_XFP2_Inserted.setDescription('Local XFP2 inserted') card_MC_Co_XFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 72)).setLabel("card-MC-Co-XFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_XFP2_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_XFP2_Removed.setDescription('Local XFP+2 removed') card_MC_Rmt_XFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 73)).setLabel("card-MC-Rmt-XFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Inserted.setDescription('Remote XFP+1 inserted') card_MC_Rmt_XFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 74)).setLabel("card-MC-Rmt-XFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Removed.setDescription('Remote XFP+1 removed') card_MC_Co_SFPSFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 75)).setLabel("card-MC-Co-SFPSFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Up.setDescription('The SFP+1 link of mc in center side is up') card_MC_Co_SFPSFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 76)).setLabel("card-MC-Co-SFPSFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Down.setDescription('The SFP+1 link of mc in center side is broken') card_MC_Co_SFPSFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 77)).setLabel("card-MC-Co-SFPSFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Up.setDescription('The SFP+2 link of mc in center side is up') card_MC_Co_SFPSFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 78)).setLabel("card-MC-Co-SFPSFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Down.setDescription('The SFP+2 link of mc in center side is broken') card_MC_Rmt_SFPSFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 79)).setLabel("card-MC-Rmt-SFPSFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Up.setDescription('The SFPSFP1 link of mc in customer side is up') card_MC_Rmt_SFPSFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 80)).setLabel("card-MC-Rmt-SFPSFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Down.setDescription('The SFP+1 link of mc in customer side is broken') card_MC_Co_XFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 81)).setLabel("card-MC-Co-XFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_XFP1_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_XFP1_Up.setDescription('The XFP1 link of mc in center side is up') card_MC_Co_XFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 82)).setLabel("card-MC-Co-XFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_XFP1_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_XFP1_Down.setDescription('The XFP1 link of mc in center side is broken') card_MC_Co_XFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 83)).setLabel("card-MC-Co-XFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_XFP2_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_XFP2_Up.setDescription('The XFP2 link of mc in center side is up') card_MC_Co_XFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 84)).setLabel("card-MC-Co-XFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_XFP2_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_XFP2_Down.setDescription('The XFP2 link of mc in center side is broken') card_MC_Rmt_XFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 85)).setLabel("card-MC-Rmt-XFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Up.setDescription('The XFP1 link of mc in customer side is up') card_MC_Rmt_XFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 86)).setLabel("card-MC-Rmt-XFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Down.setDescription('The XFP link of mc in customer side is broken') card_MC_Co_SFP3_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 87)).setLabel("card-MC-Co-SFP3-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP3_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP3_Inserted.setDescription('Local SFP3 inserted') card_MC_Co_SFP3_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 88)).setLabel("card-MC-Co-SFP3-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP3_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP3_Removed.setDescription('Local SFP3 removed') card_MC_Co_Port1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 89)).setLabel("card-MC-Co-Port1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Port1_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Port1_Up.setDescription('The Port1 link of mc in center side is up') card_MC_Co_Port1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 90)).setLabel("card-MC-Co-Port1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Port1_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Port1_Down.setDescription('The Port1 link of mc in center side is broken') card_MC_Co_Port2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 91)).setLabel("card-MC-Co-Port2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Port2_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Port2_Up.setDescription('The Port2 link of mc in center side is up') card_MC_Co_Port2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 92)).setLabel("card-MC-Co-Port2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Port2_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Port2_Down.setDescription('The Port2 link of mc in center side is broken') card_MC_Co_Port3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 93)).setLabel("card-MC-Co-Port3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Port3_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Port3_Up.setDescription('The Port3 link of mc in center side is up') card_MC_Co_Port3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 94)).setLabel("card-MC-Co-Port3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_Port3_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_Port3_Down.setDescription('The Port3 link of mc in center side is broken') card_MC_FAN_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 100)).setLabel("card-MC-FAN-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_FAN_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_FAN_Normal.setDescription('Fan card work normally') card_MC_FAN_Abnormal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 101)).setLabel("card-MC-FAN-Abnormal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_FAN_Abnormal.setStatus('current') if mibBuilder.loadTexts: card_MC_FAN_Abnormal.setDescription('Fan card work abnormally') card_MC_Co_QSFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 102)).setLabel("card-MC-Co-QSFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP1_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP1_Inserted.setDescription('Local QSFP1 inserted') card_MC_Co_QSFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 103)).setLabel("card-MC-Co-QSFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP1_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP1_Removed.setDescription('Local QSFP1 removed') card_MC_Co_QSFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 104)).setLabel("card-MC-Co-QSFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP2_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP2_Inserted.setDescription('Local QSFP2 inserted') card_MC_Co_QSFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 105)).setLabel("card-MC-Co-QSFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP2_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP2_Removed.setDescription('Local QSFP2 removed') card_MC_Co_QSFP1_Lane1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 106)).setLabel("card-MC-Co-QSFP1-Lane1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Up.setDescription('The QSFP1 Lane1 link of mc in center side is up') card_MC_Co_QSFP1_Lane1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 107)).setLabel("card-MC-Co-QSFP1-Lane1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Down.setDescription('The QSFP1 lane1 link of mc in center side is broken') card_MC_Co_QSFP1_Lane2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 108)).setLabel("card-MC-Co-QSFP1-Lane2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Up.setDescription('The QSFP1 Lane2 link of mc in center side is up') card_MC_Co_QSFP1_Lane2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 109)).setLabel("card-MC-Co-QSFP1-Lane2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Down.setDescription('The QSFP1 lane2 link of mc in center side is broken') card_MC_Co_QSFP1_Lane3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 110)).setLabel("card-MC-Co-QSFP1-Lane3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Up.setDescription('The QSFP1 Lane3 link of mc in center side is up') card_MC_Co_QSFP1_Lane3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 111)).setLabel("card-MC-Co-QSFP1-Lane3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Down.setDescription('The QSFP1 lane3 link of mc in center side is broken') card_MC_Co_QSFP1_Lane4_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 112)).setLabel("card-MC-Co-QSFP1-Lane4-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Up.setDescription('The QSFP1 Lane4 link of mc in center side is up') card_MC_Co_QSFP1_Lane4_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 113)).setLabel("card-MC-Co-QSFP1-Lane4-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Down.setDescription('The QSFP1 lane4 link of mc in center side is broken') card_MC_Co_QSFP2_Lane1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 114)).setLabel("card-MC-Co-QSFP2-Lane1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Up.setDescription('The QSFP2 Lane1 link of mc in center side is up') card_MC_Co_QSFP2_Lane1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 115)).setLabel("card-MC-Co-QSFP2-Lane1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Down.setDescription('The QSFP2 lane1 link of mc in center side is broken') card_MC_Co_QSFP2_Lane2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 116)).setLabel("card-MC-Co-QSFP2-Lane2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Up.setDescription('The QSFP2 Lane2 link of mc in center side is up') card_MC_Co_QSFP2_Lane2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 117)).setLabel("card-MC-Co-QSFP2-Lane2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Down.setDescription('The QSFP2 lane2 link of mc in center side is broken') card_MC_Co_QSFP2_Lane3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 118)).setLabel("card-MC-Co-QSFP2-Lane3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Up.setDescription('The QSFP2 Lane3 link of mc in center side is up') card_MC_Co_QSFP2_Lane3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 119)).setLabel("card-MC-Co-QSFP2-Lane3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Down.setDescription('The QSFP2 lane3 link of mc in center side is broken') card_MC_Co_QSFP2_Lane4_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 120)).setLabel("card-MC-Co-QSFP2-Lane4-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Up.setDescription('The QSFP2 Lane4 link of mc in center side is up') card_MC_Co_QSFP2_Lane4_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 121)).setLabel("card-MC-Co-QSFP2-Lane4-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Down.setDescription('The QSFP2 lane4 link of mc in center side is broken') card_MC_Rmt_SFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 122)).setLabel("card-MC-Rmt-SFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Inserted.setDescription('Remote SFP2 inserted') card_MC_Rmt_SFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 123)).setLabel("card-MC-Rmt-SFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Removed.setDescription('Remote SFP2 removed') card_MC_Rmt_SFP3_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 124)).setLabel("card-MC-Rmt-SFP3-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Inserted.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Inserted.setDescription('Remote SFP3 inserted') card_MC_Rmt_SFP3_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 125)).setLabel("card-MC-Rmt-SFP3-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Removed.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Removed.setDescription('Remote SFP3 removed') card_MC_Rmt_SFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 126)).setLabel("card-MC-Rmt-SFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Up.setDescription('The SFP2 link of mc in customer side is up') card_MC_Rmt_SFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 127)).setLabel("card-MC-Rmt-SFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Down.setDescription('The SFP2 link of mc in customer side is broken') card_MC_Rmt_SFP3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 128)).setLabel("card-MC-Rmt-SFP3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Up.setDescription('The SFP3 link of mc in customer side is up') card_MC_Rmt_SFP3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 129)).setLabel("card-MC-Rmt-SFP3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Down.setDescription('The SFP3 link of mc in customer side is broken') card_MC_E1_Co_Port1_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 130)).setLabel("card-MC-E1-Co-Port1-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Alarm.setDescription('Port1 LOS alarm in center side') card_MC_E1_Co_Port1_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 131)).setLabel("card-MC-E1-Co-Port1-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Normal.setDescription('Port1 LOS normal in center side') card_MC_E1_Co_Port1_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 132)).setLabel("card-MC-E1-Co-Port1-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Alarm.setDescription('Port1 AIS alarm in center side') card_MC_E1_Co_Port1_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 133)).setLabel("card-MC-E1-Co-Port1-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Normal.setDescription('Port1 AIS normal in center side') card_MC_E1_Co_Port1_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 134)).setLabel("card-MC-E1-Co-Port1-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Alarm.setDescription('Port1 CV alarm in center side') card_MC_E1_Co_Port1_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 135)).setLabel("card-MC-E1-Co-Port1-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Normal.setDescription('Port1 CV normal in center side') card_MC_E1_Co_Port2_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 136)).setLabel("card-MC-E1-Co-Port2-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Alarm.setDescription('Port2 LOS alarm in center side') card_MC_E1_Co_Port2_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 137)).setLabel("card-MC-E1-Co-Port2-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Normal.setDescription('Port2 LOS normal in center side') card_MC_E1_Co_Port2_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 138)).setLabel("card-MC-E1-Co-Port2-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Alarm.setDescription('Port2 AIS alarm in center side') card_MC_E1_Co_Port2_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 139)).setLabel("card-MC-E1-Co-Port2-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Normal.setDescription('Port2 AIS normal in center side') card_MC_E1_Co_Port2_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 140)).setLabel("card-MC-E1-Co-Port2-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Alarm.setDescription('Port2 CV alarm in center side') card_MC_E1_Co_Port2_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 141)).setLabel("card-MC-E1-Co-Port2-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Normal.setDescription('Port2 CV normal in center side') card_MC_E1_Rmt_Port1_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 142)).setLabel("card-MC-E1-Rmt-Port1-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Alarm.setDescription('Port1 LOS alarm in customer side') card_MC_E1_Rmt_Port1_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 143)).setLabel("card-MC-E1-Rmt-Port1-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Normal.setDescription('Port1 LOS normal in customer side') card_MC_E1_Rmt_Port1_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 144)).setLabel("card-MC-E1-Rmt-Port1-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Alarm.setDescription('Port1 AIS alarm in customer side') card_MC_E1_Rmt_Port1_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 145)).setLabel("card-MC-E1-Rmt-Port1-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Normal.setDescription('Port1 AIS normal in customer side') card_MC_E1_Rmt_Port1_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 146)).setLabel("card-MC-E1-Rmt-Port1-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Alarm.setDescription('Port1 CV alarm in customer side') card_MC_E1_Rmt_Port1_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 147)).setLabel("card-MC-E1-Rmt-Port1-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Normal.setDescription('Port1 CV normal in customer side') card_MC_E1_Rmt_Port2_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 148)).setLabel("card-MC-E1-Rmt-Port2-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Alarm.setDescription('Port2 LOS alarm in customer side') card_MC_E1_Rmt_Port2_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 149)).setLabel("card-MC-E1-Rmt-Port2-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Normal.setDescription('Port2 LOS normal in customer side') card_MC_E1_Rmt_Port2_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 150)).setLabel("card-MC-E1-Rmt-Port2-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Alarm.setDescription('Port2 AIS alarm in customer side') card_MC_E1_Rmt_Port2_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 151)).setLabel("card-MC-E1-Rmt-Port2-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Normal.setDescription('Port2 AIS normal in customer side') card_MC_E1_Rmt_Port2_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 152)).setLabel("card-MC-E1-Rmt-Port2-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Alarm.setDescription('Port2 CV alarm in customer side') card_MC_E1_Rmt_Port2_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 153)).setLabel("card-MC-E1-Rmt-Port2-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Normal.setDescription('Port2 CV normal in customer side') card_MC_Co_SFP3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 154)).setLabel("card-MC-Co-SFP3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP3_Up.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP3_Up.setDescription('The SFP3 link of mc in center side is up') card_MC_Co_SFP3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 155)).setLabel("card-MC-Co-SFP3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_Co_SFP3_Down.setStatus('current') if mibBuilder.loadTexts: card_MC_Co_SFP3_Down.setDescription('The SFP3 link of mc in center side is broken') card_MC_E1T1_Co_TXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 156)).setLabel("card-MC-E1T1-Co-TXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Alarm.setDescription('Tx LOS alarm in center side') card_MC_E1T1_Co_TXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 157)).setLabel("card-MC-E1T1-Co-TXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Normal.setDescription('Tx LOS normal in center side') card_MC_E1T1_Co_FXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 158)).setLabel("card-MC-E1T1-Co-FXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Alarm.setDescription('Fx LOS alarm in center side') card_MC_E1T1_Co_FXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 159)).setLabel("card-MC-E1T1-Co-FXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Normal.setDescription('Fx LOS normal in center side') card_MC_E1T1_Co_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 160)).setLabel("card-MC-E1T1-Co-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Alarm.setDescription('AIS alarm in center side') card_MC_E1T1_Co_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 161)).setLabel("card-MC-E1T1-Co-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Normal.setDescription('AIS normal in center side') card_MC_E1T1_Rmt_TXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 162)).setLabel("card-MC-E1T1-Rmt-TXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Alarm.setDescription('Tx LOS alarm in customer side') card_MC_E1T1_Rmt_TXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 163)).setLabel("card-MC-E1T1-Rmt-TXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Normal.setDescription('Tx LOS normal in customer side') card_MC_E1T1_Rmt_FXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 164)).setLabel("card-MC-E1T1-Rmt-FXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Alarm.setDescription('Fx LOS alarm in customer side') card_MC_E1T1_Rmt_FXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 165)).setLabel("card-MC-E1T1-Rmt-FXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Normal.setDescription('Fx LOS normal in customer side') card_MC_E1T1_Rmt_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 166)).setLabel("card-MC-E1T1-Rmt-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Alarm.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Alarm.setDescription('AIS alarm in customer side') card_MC_E1T1_Rmt_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 167)).setLabel("card-MC-E1T1-Rmt-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx")) if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Normal.setStatus('current') if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Normal.setDescription('AIS normal in customer side') mibBuilder.exportSymbols("XXX-MIB", mc40G_OEOLane2LoopMode=mc40G_OEOLane2LoopMode, company=company, mcFanCardObjects=mcFanCardObjects, mc10G_OEECurSpd=mc10G_OEECurSpd, card_MC_Co_QSFP1_Lane2_Up=card_MC_Co_QSFP1_Lane2_Up, mc4_25G_OEONtwPD=mc4_25G_OEONtwPD, mcCm1gSfpEntry=mcCm1gSfpEntry, card_MC_Co_QSFP1_Lane1_Down=card_MC_Co_QSFP1_Lane1_Down, mc10G_OEO2RObjects=mc10G_OEO2RObjects, mcTransceiverDist=mcTransceiverDist, mc2_5GMCCardObjects=mc2_5GMCCardObjects, mcPortState=mcPortState, mc1GE2OPort2SFPExist=mc1GE2OPort2SFPExist, shelfNum=shelfNum, mcIP175DCurWorkMode=mcIP175DCurWorkMode, card_MC_Rmt_Tx_Up1=card_MC_Rmt_Tx_Up1, mc1go2o_sfpTransCode=mc1go2o_sfpTransCode, sysName=sysName, psuB=psuB, ntwXFP2WaveLength=ntwXFP2WaveLength, mcAccXFP1WaveLengthTunability=mcAccXFP1WaveLengthTunability, mc10G_OEO_Test_Error_Counter=mc10G_OEO_Test_Error_Counter, mc2_5g_sfpWavelength=mc2_5g_sfpWavelength, mc2_5g_sfpCompliance=mc2_5g_sfpCompliance, card_MC_E1_Co_Port1_CV_Alarm=card_MC_E1_Co_Port1_CV_Alarm, mcIP175DCardTable=mcIP175DCardTable, mc4_25G_OEO_Test_Result=mc4_25G_OEO_Test_Result, mc40G_OEOHWLoopMode=mc40G_OEOHWLoopMode, mcE1T1RmtFLink=mcE1T1RmtFLink, ntwXFP2WaveLengthTunability=ntwXFP2WaveLengthTunability, card_MC_E1T1_Co_TXLOS_Normal=card_MC_E1T1_Co_TXLOS_Normal, mcCmEntry=mcCmEntry, mc1go2o_sfpCompliance=mc1go2o_sfpCompliance, shelfIdx=shelfIdx, trapHost1=trapHost1, mcCm1gSfpObjects=mcCm1gSfpObjects, volA=volA, getNtwQSfpCmd=getNtwQSfpCmd, mc1GO2OPortHWPri=mc1GO2OPortHWPri, card_MC_Co_SFP3_Up=card_MC_Co_SFP3_Up, mcE1Port2Loop=mcE1Port2Loop, xfpWaveLengthTunable=xfpWaveLengthTunable, rmtCardNum=rmtCardNum, shelf_fan_Off=shelf_fan_Off, mcDownStream=mcDownStream, mcE1SFP1Link=mcE1SFP1Link, mc10G_OEOCardEntry=mc10G_OEOCardEntry, mcAccXFP1WaveLengthTunable=mcAccXFP1WaveLengthTunable, nmuType=nmuType, mc10G_OEO_Get_Test_Rst=mc10G_OEO_Get_Test_Rst, mc1GE2ORmtPort1SFPlink=mc1GE2ORmtPort1SFPlink, slotObjects=slotObjects, mcIP175DObjects=mcIP175DObjects, card_MC_Co_Tx_Up1=card_MC_Co_Tx_Up1, mcRmtE1Port1CV=mcRmtE1Port1CV, mcRmtHWTransmitMode=mcRmtHWTransmitMode, mcIP175DTxlink=mcIP175DTxlink, mc10G_OEOCurSpdMode=mc10G_OEOCurSpdMode, mc10G_OEELoopMode=mc10G_OEELoopMode, mcE1T1FLossAlarm=mcE1T1FLossAlarm, shelf_psuA_On=shelf_psuA_On, mcQCA8334RmtCfgWorkMode=mcQCA8334RmtCfgWorkMode, shelfTable=shelfTable, ipaddr=ipaddr, mc1GE2OPort1SFPlink=mc1GE2OPort1SFPlink, mc10G_OEO_accType=mc10G_OEO_accType, card_MC_E1_Rmt_Port1_AIS_Normal=card_MC_E1_Rmt_Port1_AIS_Normal, sfpVoltage=sfpVoltage, mc10GOEO1RCardTable=mc10GOEO1RCardTable, mc10GXFP2WaveLengthTunable=mc10GXFP2WaveLengthTunable, mcRmtE1Port1Loop=mcRmtE1Port1Loop, mc4_25G_OEOCardEntry=mc4_25G_OEOCardEntry, card_MC_E1_Rmt_Port1_AIS_Alarm=card_MC_E1_Rmt_Port1_AIS_Alarm, mcRmt4_25G_OEOHWWorkMode=mcRmt4_25G_OEOHWWorkMode, mcCm1gIpEntry=mcCm1gIpEntry, cwdmWavelength5=cwdmWavelength5, mcE1Port1AIS=mcE1Port1AIS, accXFP1TunableType=accXFP1TunableType, mcRmtPwrDown=mcRmtPwrDown, mc10G_OEOObjects=mc10G_OEOObjects, card_MC_Rmt_Tx_Down=card_MC_Rmt_Tx_Down, mc10G_OEEHWLoopback=mc10G_OEEHWLoopback, card_MC_Co_Port3_Down=card_MC_Co_Port3_Down, card_MC_Co_SFP3_Down=card_MC_Co_SFP3_Down, mcQCA8334RmtTxlink=mcQCA8334RmtTxlink, card_MC_E1T1_Co_AIS_Normal=card_MC_E1T1_Co_AIS_Normal, mcRmtE1TxCurWorkMode=mcRmtE1TxCurWorkMode, qsfpAccRxPower4=qsfpAccRxPower4, mcRmt10G_OEOLoopback=mcRmt10G_OEOLoopback, card_MC_E1_Rmt_Port2_CV_Normal=card_MC_E1_Rmt_Port2_CV_Normal, card_MC_E1_Co_Port2_CV_Normal=card_MC_E1_Co_Port2_CV_Normal, mcTxByteLo=mcTxByteLo, mcCardIdx=mcCardIdx, card_MC_Rmt_SFP3_Up=card_MC_Rmt_SFP3_Up, card_MC_E1T1_Co_FXLOS_Alarm=card_MC_E1T1_Co_FXLOS_Alarm, mcRmt4_25G_OEOHWSpdMode=mcRmt4_25G_OEOHWSpdMode, card_MC_Co_QSFP1_Lane4_Up=card_MC_Co_QSFP1_Lane4_Up, mcUtility=mcUtility, mc4_25G_OEOObjects=mc4_25G_OEOObjects, mc10GOEO1RCardEntry=mc10GOEO1RCardEntry, getAccSfpCmd=getAccSfpCmd, mc1GO2OPort3SFPExist=mc1GO2OPort3SFPExist, mcRmtCfgWorkMode=mcRmtCfgWorkMode, mcNtwQSfpObjects=mcNtwQSfpObjects, mcShelfIdx=mcShelfIdx, mc40G_OEOCardEntry=mc40G_OEOCardEntry, mcE1T1CardTable=mcE1T1CardTable, accsfpRecvPower=accsfpRecvPower, mc10GXFP1WaveLengthTunability=mc10GXFP1WaveLengthTunability, cwdmWavelength7=cwdmWavelength7, cwdmWavelength1=cwdmWavelength1, mc40G_OEOQsfp2Lane3_link=mc40G_OEOQsfp2Lane3_link, mcE1Port2CV=mcE1Port2CV, mcAccQSfpObjects=mcAccQSfpObjects, mcRmtE1Port1AIS=mcRmtE1Port1AIS, mcIP175DCardEntry=mcIP175DCardEntry, accsfpTemperature=accsfpTemperature, mc10G_OEO2R_accTunableType=mc10G_OEO2R_accTunableType, card_MC_Co_QSFP2_Lane2_Up=card_MC_Co_QSFP2_Lane2_Up, mc4_25G_OEO_Test_Lock=mc4_25G_OEO_Test_Lock, mcRmtUtility=mcRmtUtility, mc2_5GMCSfp3Exist=mc2_5GMCSfp3Exist, mc4_25G_OEO_Start_Test=mc4_25G_OEO_Start_Test, qsfpAccTemperature=qsfpAccTemperature, card_MC_Rmt_Tx_Down2=card_MC_Rmt_Tx_Down2, sfpConnector=sfpConnector, mcFanObjects=mcFanObjects, card_MC_Co_SFP2_Inserted=card_MC_Co_SFP2_Inserted, mc1GE2OPortPri=mc1GE2OPortPri, mc1GO2OPort1SFPExist=mc1GO2OPort1SFPExist, card_MC_Rmt_SFP3_Down=card_MC_Rmt_SFP3_Down, mcRmt10G_OEOCfgSpdMode=mcRmt10G_OEOCfgSpdMode, mc1go2o_sfpWavelength=mc1go2o_sfpWavelength, card_MC_Co_SFP1_Removed=card_MC_Co_SFP1_Removed, mc40G_OEOQsfp1Lane1_link=mc40G_OEOQsfp1Lane1_link, card_MC_Rmt_XFP1_Up=card_MC_Rmt_XFP1_Up, card_MC_E1T1_Co_TXLOS_Alarm=card_MC_E1T1_Co_TXLOS_Alarm, sfpCopperLength=sfpCopperLength, mc2_5GMCCardTable=mc2_5GMCCardTable, mc2_5g_getSfpCmd=mc2_5g_getSfpCmd, mcE1T1RmtTLossAlarm=mcE1T1RmtTLossAlarm, trapHost3=trapHost3, mcRmtE1Port2LOS=mcRmtE1Port2LOS, card_MC_Co_SFPSFP1_Removed=card_MC_Co_SFPSFP1_Removed, card_MC_E1T1_Co_FXLOS_Normal=card_MC_E1T1_Co_FXLOS_Normal, mc1GO2OPort3SFPlink=mc1GO2OPort3SFPlink, mc1go2o_getSfpCmd=mc1go2o_getSfpCmd, sfpCompliance=sfpCompliance, card_MC_Co_QSFP2_Lane2_Down=card_MC_Co_QSFP2_Lane2_Down, mc10GXFP1WaveLength=mc10GXFP1WaveLength, mc10GOEO3RCardTable=mc10GOEO3RCardTable, mc1GO2ORmtPortHWPri=mc1GO2ORmtPortHWPri, card_MC_Rmt_Tx_Up2=card_MC_Rmt_Tx_Up2, mcRmtHWLFP=mcRmtHWLFP, card_MC_Co_QSFP1_Lane1_Up=card_MC_Co_QSFP1_Lane1_Up, accsfpSmLength=accsfpSmLength, mcIP175DPortObjects=mcIP175DPortObjects, mcType=mcType, accXFP1WaveLength=accXFP1WaveLength, mc40G_OEOQsfp1Lane3_link=mc40G_OEOQsfp1Lane3_link, mc10GXFP2WaveLengthTunability=mc10GXFP2WaveLengthTunability, card_MC_E1T1_Rmt_TXLOS_Normal=card_MC_E1T1_Rmt_TXLOS_Normal, mcRmtHWWorkMode=mcRmtHWWorkMode, mcQCA8334UpStream=mcQCA8334UpStream, card_MC_Rmt_SFP1_Down=card_MC_Rmt_SFP1_Down, mc4_25G_OEOHWLoopback=mc4_25G_OEOHWLoopback, mc10G_OEEFxlink=mc10G_OEEFxlink, mcE1Port1LOS=mcE1Port1LOS, mc2_5GMCPort1link=mc2_5GMCPort1link, qsfpAccRxPower3=qsfpAccRxPower3, mcNtwSfpExist=mcNtwSfpExist, mcNtwXFP2WaveLength=mcNtwXFP2WaveLength, mcRmt10G_OEOHWSpdMode=mcRmt10G_OEOHWSpdMode, cwdmWavelengthCount=cwdmWavelengthCount, card_MC_Rmt_SFPSFP1_Up=card_MC_Rmt_SFPSFP1_Up, mc10G_OEOHWSpdMode=mc10G_OEOHWSpdMode, mc40G_OEOQsfp2Lane4_link=mc40G_OEOQsfp2Lane4_link, mcNtwXFP2WaveLengthTunable=mcNtwXFP2WaveLengthTunable, mc2_5GMCSFP3Objects=mc2_5GMCSFP3Objects, mcHWTransmitMode=mcHWTransmitMode, slotIdx=slotIdx, qsfpNtwTxPower1=qsfpNtwTxPower1, mcHWLFP=mcHWLFP, mcE1T1RmtAISAlarm=mcE1T1RmtAISAlarm, volB=volB, mcRmtCurWorkMode=mcRmtCurWorkMode, mc1GE2OCardTable=mc1GE2OCardTable, accsfpWavelength=accsfpWavelength, mcAccQSfpEntry=mcAccQSfpEntry, mcTxByteHi=mcTxByteHi, mc10G_OEO2RHWSFP1Loopback=mc10G_OEO2RHWSFP1Loopback, sysContact=sysContact, slotEntry=slotEntry, mcCurWorkMode=mcCurWorkMode, card_MC_E1_Rmt_Port2_AIS_Alarm=card_MC_E1_Rmt_Port2_AIS_Alarm, mcIP175DUpStream=mcIP175DUpStream, mcRmtDetect=mcRmtDetect, mc10G_OEO_Test_Lock=mc10G_OEO_Test_Lock, mc2_5g_sfpTranPower=mc2_5g_sfpTranPower, mc2_5g_sfpBrSpeed=mc2_5g_sfpBrSpeed, mc40G_OEOSpeedMode=mc40G_OEOSpeedMode, mc1GO2OSfp3Table=mc1GO2OSfp3Table, mc1GE2ORmtPort2SFPlink=mc1GE2ORmtPort2SFPlink, mcCm1gAccSfpObjects=mcCm1gAccSfpObjects, rmtCardType=rmtCardType, card_MC_Co_SFP2_Removed=card_MC_Co_SFP2_Removed, card_MC_Co_Tx_Down1=card_MC_Co_Tx_Down1, card_MC_Co_QSFP1_Lane2_Down=card_MC_Co_QSFP1_Lane2_Down, card_MC_Co_QSFP2_Lane4_Down=card_MC_Co_QSFP2_Lane4_Down, mcRxByteHi=mcRxByteHi, card_MC_Rmt_SFP3_Inserted=card_MC_Rmt_SFP3_Inserted, mcE1CardEntry=mcE1CardEntry, card_Detected=card_Detected, card_MC_Co_XFP2_Up=card_MC_Co_XFP2_Up, card_MC_Rmt_SFP2_Up=card_MC_Rmt_SFP2_Up, trapHost2=trapHost2, mc2_5Cm1gSfpTable=mc2_5Cm1gSfpTable, mcCWDMCardEntry=mcCWDMCardEntry, mcE1T1TLoop=mcE1T1TLoop, mc1GO2OPortPri=mc1GO2OPortPri, mcLFPCfg=mcLFPCfg, qsfpAccRxPower1=qsfpAccRxPower1, temperature=temperature, qsfpAccTxPower3=qsfpAccTxPower3, card_MC_Co_SFP1_Down=card_MC_Co_SFP1_Down, mc4_25G_OEOCurSpdMode=mc4_25G_OEOCurSpdMode, mc10G_OEECardEntry=mc10G_OEECardEntry, mcFanStatus=mcFanStatus, mc2_5GMCPort3link=mc2_5GMCPort3link, systemMIB=systemMIB, mcTxlink=mcTxlink, mcCmTable=mcCmTable, mcRmtTxlink=mcRmtTxlink, mc40G_OEOQsfp1Lane4_link=mc40G_OEOQsfp1Lane4_link, sfpTranPower=sfpTranPower, coCardType=coCardType, mcFanCardEntry=mcFanCardEntry, cwdmWavelength3=cwdmWavelength3, card_MC_E1_Co_Port1_AIS_Normal=card_MC_E1_Co_Port1_AIS_Normal, mc1GE2OPortHWPri=mc1GE2OPortHWPri, mcE1T1TLossAlarm=mcE1T1TLossAlarm, mcQCA8334DownStream=mcQCA8334DownStream, mcIP175DPortEntry=mcIP175DPortEntry, mcE1T1Version=mcE1T1Version, card_MC_Co_QSFP1_Lane3_Down=card_MC_Co_QSFP1_Lane3_Down, card_MC_E1_Rmt_Port1_CV_Normal=card_MC_E1_Rmt_Port1_CV_Normal, card_MC_Rmt_SFP2_Removed=card_MC_Rmt_SFP2_Removed, card_MC_Co_SFPSFP1_Down=card_MC_Co_SFPSFP1_Down, accsfpBrSpeed=accsfpBrSpeed, cwdmWavelength8=cwdmWavelength8, mcRmt10G_OEOCurSpdMode=mcRmt10G_OEOCurSpdMode, mc10GOEEXFPTunableCardObjects=mc10GOEEXFPTunableCardObjects, mc1GE2ORmtPortHWPri=mc1GE2ORmtPortHWPri, mc10G_OEO_Test_Continue_Time=mc10G_OEO_Test_Continue_Time) mibBuilder.exportSymbols("XXX-MIB", mc4_25G_OEO_Test_Error_Counter=mc4_25G_OEO_Test_Error_Counter, mc1GO2OPort1SFPlink=mc1GO2OPort1SFPlink, card_MC_E1_Rmt_Port1_LOS_Normal=card_MC_E1_Rmt_Port1_LOS_Normal, mc1GO2OSFP3Objects=mc1GO2OSFP3Objects, xfpWaveLength=xfpWaveLength, card_MC_Co_XFP1_Removed=card_MC_Co_XFP1_Removed, card_MC_Rmt_SFP2_Inserted=card_MC_Rmt_SFP2_Inserted, mc4_25G_OEOWorkMode=mc4_25G_OEOWorkMode, card_MC_Co_XFP2_Inserted=card_MC_Co_XFP2_Inserted, mc2_5g_sfpSmLength=mc2_5g_sfpSmLength, card_MC_Co_Port2_Down=card_MC_Co_Port2_Down, card_MC_Rmt_Acc_SFP_Removed=card_MC_Rmt_Acc_SFP_Removed, card_MC_Co_QSFP2_Removed=card_MC_Co_QSFP2_Removed, mc10GOEEXFPTunableCardTable=mc10GOEEXFPTunableCardTable, card_MC_Co_SFPSFP2_Down=card_MC_Co_SFPSFP2_Down, mcQCA8334PortIdx=mcQCA8334PortIdx, card_MC_E1T1_Rmt_AIS_Alarm=card_MC_E1T1_Rmt_AIS_Alarm, mcIP175DPortTable=mcIP175DPortTable, mc1go2o_sfpRecvPower=mc1go2o_sfpRecvPower, mc10G_OEO2R_ntwType=mc10G_OEO2R_ntwType, card_MC_Rmt_Tx_Up=card_MC_Rmt_Tx_Up, mcCm1gAccSfpTable=mcCm1gAccSfpTable, mcE1T1RmtFLoop=mcE1T1RmtFLoop, card_MC_E1_Co_Port1_CV_Normal=card_MC_E1_Co_Port1_CV_Normal, mc10GXFP1WaveLengthTunable=mc10GXFP1WaveLengthTunable, card_MC_Co_Tx_Up2=card_MC_Co_Tx_Up2, mcRmt10G_OEOHWLoopback=mcRmt10G_OEOHWLoopback, mc10G_OEECardObjects=mc10G_OEECardObjects, mcRmt4_25G_OEOWorkMode=mcRmt4_25G_OEOWorkMode, card_MC_Co_Port1_Up=card_MC_Co_Port1_Up, mc10GXFP2WaveLength=mc10GXFP2WaveLength, mc10G_OEOCardTable=mc10G_OEOCardTable, mc10G_OEE_ntwType=mc10G_OEE_ntwType, mcRmt10G_OEO_ntwType=mcRmt10G_OEO_ntwType, card_MC_Co_SFPSFP1_Up=card_MC_Co_SFPSFP1_Up, mc1GO2OPort2SFPlink=mc1GO2OPort2SFPlink, mcE1T1AISAlarm=mcE1T1AISAlarm, mcLoOrRmtFg=mcLoOrRmtFg, mc10G_OEOCfgSpdMode=mc10G_OEOCfgSpdMode, mcE1T1CardObjects=mcE1T1CardObjects, card_MC_Co_SFP3_Removed=card_MC_Co_SFP3_Removed, mcE1T1FLink=mcE1T1FLink, nmuObjects=nmuObjects, mc1go2o_sfpCopperLength=mc1go2o_sfpCopperLength, mc1GO2OPort3HWSpd=mc1GO2OPort3HWSpd, mc1GO2ORmtPort3SFPExist=mc1GO2ORmtPort3SFPExist, mc10G_OEO2R_ntwTunableType=mc10G_OEO2R_ntwTunableType, shelf_psuB_On=shelf_psuB_On, mcPmTable=mcPmTable, mcCm1gSpecificObjects=mcCm1gSpecificObjects, mc4_25G_OEO_Get_Test_Rst=mc4_25G_OEO_Get_Test_Rst, mcE1T1CardEntry=mcE1T1CardEntry, mc4_25G_OEO_Test_Continue_Time=mc4_25G_OEO_Test_Continue_Time, mcQsfpSpecificObjects=mcQsfpSpecificObjects, mc10GOEO3RCardObjects=mc10GOEO3RCardObjects, card_MC_E1_Co_Port2_AIS_Alarm=card_MC_E1_Co_Port2_AIS_Alarm, mc1GE2OCardObjects=mc1GE2OCardObjects, mc2_5g_sfpRecvPower=mc2_5g_sfpRecvPower, height2HU=height2HU, mc2_5g_sfpMmLength=mc2_5g_sfpMmLength, mc1GE2OObjects=mc1GE2OObjects, mc1GO2OObjects=mc1GO2OObjects, mcCm1gIpTable=mcCm1gIpTable, mcNtwQSfpEntry=mcNtwQSfpEntry, mc40G_OEOLane1LoopMode=mc40G_OEOLane1LoopMode, mc1go2o_sfpConnector=mc1go2o_sfpConnector, card_MC_Co_SFPSFP2_Up=card_MC_Co_SFPSFP2_Up, mcE1Port1CV=mcE1Port1CV, shelfEntry=shelfEntry, sfpBrSpeed=sfpBrSpeed, mcRmtE1SFP1Link=mcRmtE1SFP1Link, card_MC_Co_Acc_SFP_Removed=card_MC_Co_Acc_SFP_Removed, mc10G_OEOSFP2=mc10G_OEOSFP2, card_MC_E1_Co_Port2_LOS_Normal=card_MC_E1_Co_Port2_LOS_Normal, mcUpStream=mcUpStream, mc4_25G_OEOCardObjects=mc4_25G_OEOCardObjects, card_MC_Co_SFP2_Up=card_MC_Co_SFP2_Up, mc2_5g_sfpTransCode=mc2_5g_sfpTransCode, card_MC_E1_Rmt_Port2_LOS_Normal=card_MC_E1_Rmt_Port2_LOS_Normal, mc10G_OEO_Start_Test=mc10G_OEO_Start_Test, accXFP1WaveLengthTunable=accXFP1WaveLengthTunable, mcIpAddr=mcIpAddr, mc40G_OEOCardTable=mc40G_OEOCardTable, mc1GO2ORmtPort3SFPlink=mc1GO2ORmtPort3SFPlink, mcRmt4_25G_OEOLoopback=mcRmt4_25G_OEOLoopback, mcE1T1Type=mcE1T1Type, qsfpNtwRxPower3=qsfpNtwRxPower3, mc10G_OEOCardObjects=mc10G_OEOCardObjects, shelfName=shelfName, card_Lost=card_Lost, accsfpCopperLength=accsfpCopperLength, card_MC_E1_Co_Port1_LOS_Alarm=card_MC_E1_Co_Port1_LOS_Alarm, mcIP175DVlanMode=mcIP175DVlanMode, card_MC_E1T1_Rmt_TXLOS_Alarm=card_MC_E1T1_Rmt_TXLOS_Alarm, mc10G_OEESpdMode=mc10G_OEESpdMode, card_MC_Co_Fx_Up=card_MC_Co_Fx_Up, rmtCardDesc=rmtCardDesc, mcAccXFP1TunableType=mcAccXFP1TunableType, card_MC_Rmt_SFPSFP1_Removed=card_MC_Rmt_SFPSFP1_Removed, mcNtwXFP2WaveLengthTunability=mcNtwXFP2WaveLengthTunability, card_MC_Co_QSFP1_Removed=card_MC_Co_QSFP1_Removed, card_MC_Co_Ntw_SFP_Removed=card_MC_Co_Ntw_SFP_Removed, mcPmRest=mcPmRest, card_MC_Co_Acc_SFP_Inserted=card_MC_Co_Acc_SFP_Inserted, mcE1T1RmtFLossAlarm=mcE1T1RmtFLossAlarm, mc10G_OEO2RCardTable=mc10G_OEO2RCardTable, card_MC_E1_Rmt_Port2_CV_Alarm=card_MC_E1_Rmt_Port2_CV_Alarm, card_MC_Co_Port3_Up=card_MC_Co_Port3_Up, mc1GE2OPort2SFPlink=mc1GE2OPort2SFPlink, mc10GOEO3RObjects=mc10GOEO3RObjects, card_MC_E1_Co_Port1_AIS_Alarm=card_MC_E1_Co_Port1_AIS_Alarm, mc10G_OEO_Test_Result=mc10G_OEO_Test_Result, card_MC_Co_SFPSFP1_Inserted=card_MC_Co_SFPSFP1_Inserted, mc4_25G_OEOHWWorkMode=mc4_25G_OEOHWWorkMode, card_MC_Co_Tx_Down=card_MC_Co_Tx_Down, mcCWDMObjects=mcCWDMObjects, mcHWRmtCtrlMode=mcHWRmtCtrlMode, mcCfgWorkMode=mcCfgWorkMode, mcQCA8334PortEntry=mcQCA8334PortEntry, mcE1CardTable=mcE1CardTable, mc40G_OEOQsfp1Lane2_link=mc40G_OEOQsfp1Lane2_link, mc10G_OEO2RCurSpdMode=mc10G_OEO2RCurSpdMode, PYSNMP_MODULE_ID=company, mc10G_OEOLoopback=mc10G_OEOLoopback, mcE1T1FLoop=mcE1T1FLoop, card_MC_Rmt_SFP1_Up=card_MC_Rmt_SFP1_Up, mcRmt4_25G_OEOCfgSpdMode=mcRmt4_25G_OEOCfgSpdMode, card_MC_Rmt_PwrDown=card_MC_Rmt_PwrDown, mc1go2o_sfpMmLength=mc1go2o_sfpMmLength, mcQCA8334CfgWorkMode=mcQCA8334CfgWorkMode, mcRmtType=mcRmtType, mcQCA8334VlanMode=mcQCA8334VlanMode, mc4_25G_OEOHWSpdMode=mc4_25G_OEOHWSpdMode, card_MC_Co_QSFP2_Lane1_Up=card_MC_Co_QSFP2_Lane1_Up, mcE1Port2LOS=mcE1Port2LOS, mc1GO2OCardObjects=mc1GO2OCardObjects, mcRmt4_25G_OEOHWLoopback=mcRmt4_25G_OEOHWLoopback, mcRmt10G_OEO_accType=mcRmt10G_OEO_accType, mc2_5GMCObjects=mc2_5GMCObjects, mcTransceiverMode=mcTransceiverMode, mc40G_OEOObjects=mc40G_OEOObjects, mcCm1gAccSfpEntry=mcCm1gAccSfpEntry, mcIP175DCfgWorkMode=mcIP175DCfgWorkMode, mc1GE2ORmtTxlink=mc1GE2ORmtTxlink, mcE1T1RmtTLoop=mcE1T1RmtTLoop, qsfpNtwTxPower2=qsfpNtwTxPower2, mc1GO2OPort2SFPExist=mc1GO2OPort2SFPExist, ntwXFP2WaveLengthTunable=ntwXFP2WaveLengthTunable, qsfpNtwRxPower2=qsfpNtwRxPower2, card_MC_Co_XFP1_Inserted=card_MC_Co_XFP1_Inserted, mc10G_OEOSFP1=mc10G_OEOSFP1, mcQCA8334CardObjects=mcQCA8334CardObjects, card_MC_Co_QSFP1_Lane3_Up=card_MC_Co_QSFP1_Lane3_Up, mcAccQSfpTable=mcAccQSfpTable, mc10G_OEO_ntwType=mc10G_OEO_ntwType, mc40G_OEOLane3LoopMode=mc40G_OEOLane3LoopMode, card_MC_Rmt_SFP3_Removed=card_MC_Rmt_SFP3_Removed, card_MC_Co_QSFP2_Lane3_Down=card_MC_Co_QSFP2_Lane3_Down, psuA=psuA, ipProduct=ipProduct, mcFanCardTable=mcFanCardTable, mcRmtE1Port2CV=mcRmtE1Port2CV, mc40G_OEOQsfp2Lane2_link=mc40G_OEOQsfp2Lane2_link, qsfpAccRxPower2=qsfpAccRxPower2, mc10G_OEO2RCfgSpdMode=mc10G_OEO2RCfgSpdMode, card_MC_E1_Rmt_Port2_LOS_Alarm=card_MC_E1_Rmt_Port2_LOS_Alarm, card_MC_E1T1_Rmt_FXLOS_Normal=card_MC_E1T1_Rmt_FXLOS_Normal, mc2_5g_sfpCopperLength=mc2_5g_sfpCopperLength, mcCmObjects=mcCmObjects, mc10GOEEXFPTunableObjects=mc10GOEEXFPTunableObjects, mc10GOEO1RObjects=mc10GOEO1RObjects, mc40G_OEOHWSpeedMode=mc40G_OEOHWSpeedMode, getAccQSfpCmd=getAccQSfpCmd, mcQCA8334CardTable=mcQCA8334CardTable, mc10G_OEO2R_accType=mc10G_OEO2R_accType, card_MC_E1_Co_Port1_LOS_Normal=card_MC_E1_Co_Port1_LOS_Normal, card_MC_Rmt_SFPSFP1_Inserted=card_MC_Rmt_SFPSFP1_Inserted, mc2_5g_sfpTemperature=mc2_5g_sfpTemperature, mcRmtE1Txlink=mcRmtE1Txlink, accsfpTranPower=accsfpTranPower, mcE1T1CodeType=mcE1T1CodeType, sfpRecvPower=sfpRecvPower, card_MC_Co_Fx_Down=card_MC_Co_Fx_Down, card_MC_Co_QSFP2_Inserted=card_MC_Co_QSFP2_Inserted, mcRmtLFP=mcRmtLFP, card_MC_Rmt_SFP1_Inserted=card_MC_Rmt_SFP1_Inserted, accsfpTransCode=accsfpTransCode, sysLocation=sysLocation, qsfpNtwRxPower4=qsfpNtwRxPower4, card_MC_E1_Rmt_Port1_LOS_Alarm=card_MC_E1_Rmt_Port1_LOS_Alarm, sfpSmLength=sfpSmLength, cwdmWavelength6=cwdmWavelength6, mcE1TxCurWorkMode=mcE1TxCurWorkMode, mc1GE2ORmtPort1SFPExist=mc1GE2ORmtPort1SFPExist, mcCm1gIpObjects=mcCm1gIpObjects, mcRmtE1Port2Loop=mcRmtE1Port2Loop, mc10G_OEO2RCardEntry=mc10G_OEO2RCardEntry, card_MC_Co_SFP1_Up=card_MC_Co_SFP1_Up, qsfpAccTxPower2=qsfpAccTxPower2, card_MC_Rmt_XFP1_Removed=card_MC_Rmt_XFP1_Removed, qsfpAccTxPower1=qsfpAccTxPower1, qsfpAccConnector=qsfpAccConnector, mcRmtE1Port2AIS=mcRmtE1Port2AIS, card_MC_E1T1_Rmt_FXLOS_Alarm=card_MC_E1T1_Rmt_FXLOS_Alarm, card_MC_Co_QSFP1_Inserted=card_MC_Co_QSFP1_Inserted, card_MC_FAN_Normal=card_MC_FAN_Normal, mcNtwQSfpTable=mcNtwQSfpTable, mc10G_OEE_checkResult=mc10G_OEE_checkResult, card_MC_E1T1_Rmt_AIS_Normal=card_MC_E1T1_Rmt_AIS_Normal, mc2_5GMCCardEntry=mc2_5GMCCardEntry, mcE1Txlink=mcE1Txlink, mcQCA8334CardEntry=mcQCA8334CardEntry, mcRmtLoopback=mcRmtLoopback, mcQCA8334CurWorkMode=mcQCA8334CurWorkMode, card_MC_Rmt_SFP2_Down=card_MC_Rmt_SFP2_Down, card_MC_Rmt_SFP1_Removed=card_MC_Rmt_SFP1_Removed, mcRmtE1Port1LOS=mcRmtE1Port1LOS, card_MC_Co_SFP1_Inserted=card_MC_Co_SFP1_Inserted, qsfpNtwRxPower1=qsfpNtwRxPower1, mc10G_OEO2RSFP2=mc10G_OEO2RSFP2, slotTable=slotTable, mc10G_OEECardTable=mc10G_OEECardTable, mc40G_OEOLoopMode=mc40G_OEOLoopMode, mc1go2o_sfpTranPower=mc1go2o_sfpTranPower, mc1GO2ORmtPort2SFPlink=mc1GO2ORmtPort2SFPlink, mc10G_OEO2RCardObjects=mc10G_OEO2RCardObjects, mcCm1gSfpTable=mcCm1gSfpTable, mc1go2o_sfpTemperature=mc1go2o_sfpTemperature, mc1GO2ORmtPort2SFPExist=mc1GO2ORmtPort2SFPExist, card_MC_E1_Co_Port2_CV_Alarm=card_MC_E1_Co_Port2_CV_Alarm, mc1GE2OPort1SFPExist=mc1GE2OPort1SFPExist, accsfpConnector=accsfpConnector, coCardNum=coCardNum, mc1GO2OCardTable=mc1GO2OCardTable, gateway=gateway, qsfpNtwTemperature=qsfpNtwTemperature, card_MC_Rmt_XFP1_Inserted=card_MC_Rmt_XFP1_Inserted, mcIP175DRmtCurWorkMode=mcIP175DRmtCurWorkMode, mc2_5g_sfpConnector=mc2_5g_sfpConnector, mcE1T1RmtCodeType=mcE1T1RmtCodeType, mc1GO2ORmtPort1SFPExist=mc1GO2ORmtPort1SFPExist, mc4_25G_OEOCfgSpdMode=mc4_25G_OEOCfgSpdMode, card_MC_Co_SFP2_Down=card_MC_Co_SFP2_Down, sfpMmLength=sfpMmLength, mc10GOEEXFPTunableCardEntry=mc10GOEEXFPTunableCardEntry, mcIP175DPortIdx=mcIP175DPortIdx, sfpWavelength=sfpWavelength, shelf_psuA_Off=shelf_psuA_Off, card_MC_Co_Port1_Down=card_MC_Co_Port1_Down, mcRmtAccSfpExist=mcRmtAccSfpExist, mcAccXFP1WaveLength=mcAccXFP1WaveLength, card_MC_Co_QSFP2_Lane4_Up=card_MC_Co_QSFP2_Lane4_Up, card_MC_Co_QSFP2_Lane3_Up=card_MC_Co_QSFP2_Lane3_Up, mcE1CardObjects=mcE1CardObjects) mibBuilder.exportSymbols("XXX-MIB", ntwXFP2TunableType=ntwXFP2TunableType, card_MC_Co_Ntw_SFP_Inserted=card_MC_Co_Ntw_SFP_Inserted, card_MC_E1_Rmt_Port2_AIS_Normal=card_MC_E1_Rmt_Port2_AIS_Normal, mc2_5GMCPort2link=mc2_5GMCPort2link, mc1GE2ORmtPort2SFPExist=mc1GE2ORmtPort2SFPExist, getSfpCmd=getSfpCmd, mc10G_OEO2RSFP1=mc10G_OEO2RSFP1, card_MC_Co_SFPSFP2_Removed=card_MC_Co_SFPSFP2_Removed, mc1GO2ORmtPort1SFPlink=mc1GO2ORmtPort1SFPlink, mcPmObjects=mcPmObjects, card_MC_Co_Tx_Up=card_MC_Co_Tx_Up, qsfpAccTxPower4=qsfpAccTxPower4, shelf_Lost=shelf_Lost, mcPmEntry=mcPmEntry, mc2_5Cm1gSfpEntry=mc2_5Cm1gSfpEntry, mc1GO2OSfp3Entry=mc1GO2OSfp3Entry, cwdmWavelength4=cwdmWavelength4, xfpWaveLengthTunability=xfpWaveLengthTunability, card_MC_FAN_Abnormal=card_MC_FAN_Abnormal, mc10G_OEO2RSFP2Loopback=mc10G_OEO2RSFP2Loopback, subnet=subnet, card_MC_Co_Port2_Up=card_MC_Co_Port2_Up, card_MC_Co_XFP2_Removed=card_MC_Co_XFP2_Removed, shelf_fan_On=shelf_fan_On, mcQCA8334PortObjects=mcQCA8334PortObjects, mcE1Port1Loop=mcE1Port1Loop, mc10G_OEETxlink=mc10G_OEETxlink, mc10G_OEOHWLoopback=mc10G_OEOHWLoopback, mcRmt4_25G_OEOCurSpdMode=mcRmt4_25G_OEOCurSpdMode, coCardDesc=coCardDesc, nmuConfig=nmuConfig, mc1GO2OCardEntry=mc1GO2OCardEntry, mc1go2o_sfpBrSpeed=mc1go2o_sfpBrSpeed, fan=fan, mc10GOEO3RCardEntry=mc10GOEO3RCardEntry, card_MC_Co_XFP1_Down=card_MC_Co_XFP1_Down, card_MC_Rmt_XFP1_Down=card_MC_Rmt_XFP1_Down, mcE1Objects=mcE1Objects, card_MC_Rmt_Tx_Down1=card_MC_Rmt_Tx_Down1, mcHWWorkMode=mcHWWorkMode, card_MC_Rmt_SFPSFP1_Down=card_MC_Rmt_SFPSFP1_Down, card_MC_Co_QSFP2_Lane1_Down=card_MC_Co_QSFP2_Lane1_Down, mc4_25G_OEOAccPD=mc4_25G_OEOAccPD, cwdmWavelength2=cwdmWavelength2, trapHost4=trapHost4, mcQCA8334PortTable=mcQCA8334PortTable, mcCWDMCardObjects=mcCWDMCardObjects, mcTransmitMode=mcTransmitMode, sfpTemperature=sfpTemperature, qsfpNtwConnector=qsfpNtwConnector, mc4_25G_OEOCardTable=mc4_25G_OEOCardTable, mc40G_OEOLane4LoopMode=mc40G_OEOLane4LoopMode, mcQCA8334Objects=mcQCA8334Objects, mc40G_OEOCardObjects=mc40G_OEOCardObjects, shelf_psuB_Off=shelf_psuB_Off, mcIP175DRmtTxlink=mcIP175DRmtTxlink, mc2_5g_sfpVoltage=mc2_5g_sfpVoltage, mcCWDMCardTable=mcCWDMCardTable, mcRmtTransmitMode=mcRmtTransmitMode, mcFxlink=mcFxlink, mcRxByteLo=mcRxByteLo, mc1GE2OTxlink=mc1GE2OTxlink, mc1go2o_sfpVoltage=mc1go2o_sfpVoltage, mc10GOEO1RCardObjects=mc10GOEO1RCardObjects, card_MC_E1_Rmt_Port1_CV_Alarm=card_MC_E1_Rmt_Port1_CV_Alarm, card_MC_Co_XFP1_Up=card_MC_Co_XFP1_Up, mc10G_OEEObjects=mc10G_OEEObjects, card_MC_E1T1_Co_AIS_Alarm=card_MC_E1T1_Co_AIS_Alarm, mc1go2o_sfpSmLength=mc1go2o_sfpSmLength, shelf_Detected=shelf_Detected, card_MC_Co_SFPSFP2_Inserted=card_MC_Co_SFPSFP2_Inserted, card_MC_Rmt_Acc_SFP_Inserted=card_MC_Rmt_Acc_SFP_Inserted, mcIP175DCardObjects=mcIP175DCardObjects, card_MC_Co_XFP2_Down=card_MC_Co_XFP2_Down, mcQCA8334Txlink=mcQCA8334Txlink, mcE1Port2AIS=mcE1Port2AIS, card_MC_Co_Tx_Down2=card_MC_Co_Tx_Down2, sfpTransCode=sfpTransCode, mc4_25G_OEOLoopback=mc4_25G_OEOLoopback, mcNtwXFP2TunableType=mcNtwXFP2TunableType, accsfpVoltage=accsfpVoltage, mcRmt10G_OEOSFP1=mcRmt10G_OEOSFP1, mc1GO2ORmtPort3HWSpd=mc1GO2ORmtPort3HWSpd, mc10G_OEO2RHWSFP2Loopback=mc10G_OEO2RHWSFP2Loopback, qsfpNtwTxPower3=qsfpNtwTxPower3, mc10G_OEO2RVersion=mc10G_OEO2RVersion, accsfpMmLength=accsfpMmLength, mc10G_OEO2RHWSpdMode=mc10G_OEO2RHWSpdMode, mc10G_OEO2RSFP1Loopback=mc10G_OEO2RSFP1Loopback, mc1GE2OCardEntry=mc1GE2OCardEntry, mcQCA8334RmtCurWorkMode=mcQCA8334RmtCurWorkMode, xfpTunableType=xfpTunableType, card_MC_Co_QSFP1_Lane4_Down=card_MC_Co_QSFP1_Lane4_Down, mcIP175DDownStream=mcIP175DDownStream, cardObjects=cardObjects, alarmMIB=alarmMIB, accXFP1WaveLengthTunability=accXFP1WaveLengthTunability, qsfpNtwTxPower4=qsfpNtwTxPower4, mcE1T1Objects=mcE1T1Objects, mcAccSfpExist=mcAccSfpExist, card_MC_E1_Co_Port2_AIS_Normal=card_MC_E1_Co_Port2_AIS_Normal, card_MC_E1_Co_Port2_LOS_Alarm=card_MC_E1_Co_Port2_LOS_Alarm, mcIP175DRmtCfgWorkMode=mcIP175DRmtCfgWorkMode, mc40G_OEOQsfp2Lane1_link=mc40G_OEOQsfp2Lane1_link, accsfpCompliance=accsfpCompliance, card_MC_Co_SFP3_Inserted=card_MC_Co_SFP3_Inserted)
141.884835
10,350
0.750826
0
0
0
0
0
0
0
0
55,977
0.224928
d9e551f94d290cc9b470d1fddfc0e91666dab7ba
444
py
Python
setup.py
zhanghang1989/notedown
b0fa1eac88d1cd7fa2261d6c454f82669e6f552b
[ "BSD-2-Clause" ]
null
null
null
setup.py
zhanghang1989/notedown
b0fa1eac88d1cd7fa2261d6c454f82669e6f552b
[ "BSD-2-Clause" ]
null
null
null
setup.py
zhanghang1989/notedown
b0fa1eac88d1cd7fa2261d6c454f82669e6f552b
[ "BSD-2-Clause" ]
null
null
null
from setuptools import setup # create __version__ exec(open('./_version.py').read()) setup( name="notedown", version=__version__, description="Convert markdown to IPython notebook.", author="Aaron O'Leary", author_email='[email protected]', url='http://github.com/aaren/notedown', install_requires=['ipython', ], entry_points={ 'console_scripts': [ 'notedown = notedown:cli', ], } )
22.2
56
0.628378
0
0
0
0
0
0
0
0
198
0.445946
d9e5c18f6a37dd4a96dd21f7ddefb31b197848dd
2,853
py
Python
multithreaded_webcrawler.py
the-muses-ltd/Multithreaded-Webcrawler-Cassandra-
eee68faf3c6ecb548edd0e96ce445dcd366fb735
[ "MIT" ]
null
null
null
multithreaded_webcrawler.py
the-muses-ltd/Multithreaded-Webcrawler-Cassandra-
eee68faf3c6ecb548edd0e96ce445dcd366fb735
[ "MIT" ]
null
null
null
multithreaded_webcrawler.py
the-muses-ltd/Multithreaded-Webcrawler-Cassandra-
eee68faf3c6ecb548edd0e96ce445dcd366fb735
[ "MIT" ]
null
null
null
# This is a reusable webcraawler architecture that can be adapted to scrape any webstie. # RESULTS: # Roughly 24 seconds per thousand courses scraped for ThreadPoolExecutor vs 63s for unthreaded script. # This is a very basic implementation of multithreading in order to show the proof of concept, but is a good base to build off of. import requests from bs4 import BeautifulSoup import csv from concurrent.futures import ProcessPoolExecutor, as_completed, ThreadPoolExecutor import time import logging from mitopencourseware_crawler_worker import mit_crawler def courses_spider(max_pages): data_to_csv = [] #holds all data to send to csv print("Webcrawler workers have started, please wait while we finish crawling...") # remove max pages loop (unecessary) page = 1 while page <= max_pages: url = 'https://ocw.mit.edu/courses/' source_code = requests.get(url) plain_text = source_code.text soup = BeautifulSoup(plain_text, 'html.parser') # Multithread only the work: # Tuning is required to find the most efficient amount of workers in the thread pool. with ThreadPoolExecutor(max_workers=30) as executor: start = time.time() futures = [ executor.submit(work, link) for link in soup.findAll('h4', {'class': 'course_title'}, limit=100) ] data_to_csv = [] for result in as_completed(futures): data_to_csv.append(result.result()) end = time.time() print("Time Taken to complete: {:.6f}s".format(end-start)) print("Courses extracted: ", len(data_to_csv)) page += 1 export_to_csv(data_to_csv) def work(link): # replace this fucntion with the specific crawler you want to use: return mit_crawler(link) # Exports data to a formatted csv file, this will be replaced with multithreaded API calls to the Cassandra Prisma Database # or on the cloud in production, it will be sent to the S3 temporary database to be picked up by the AWS Lambda funtion which will push it to the Cassandra Database def export_to_csv(csv_data): with open('web_crawl_data.csv',mode='w') as csv_file: field_names = ['Title','URL extension','External Website Logo','URL(href)','Description','Course logo URL'] csv_writer = csv.DictWriter(csv_file, fieldnames=field_names)#delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) csv_writer.writeheader() for course in csv_data: course_data = { 'Title':course[0], 'URL extension':course[1], 'External Website Logo':course[2], 'URL(href)':course[3], 'Description':course[4], 'Course logo URL':course[5], } csv_writer.writerow(course_data)
42.58209
164
0.667368
0
0
0
0
0
0
0
0
1,311
0.459516
d9e62b20786a73ca86ccde01bde160623cc32657
3,710
py
Python
genyrator/entities/Template.py
jumblesale/genyrator
c4429f689e92e8447b0b944e7d9b434f99cae51d
[ "MIT" ]
1
2020-07-01T16:54:39.000Z
2020-07-01T16:54:39.000Z
genyrator/entities/Template.py
jumblesale/genyrator
c4429f689e92e8447b0b944e7d9b434f99cae51d
[ "MIT" ]
10
2018-11-16T15:04:21.000Z
2021-06-01T22:27:38.000Z
genyrator/entities/Template.py
jumblesale/genyrator
c4429f689e92e8447b0b944e7d9b434f99cae51d
[ "MIT" ]
2
2018-08-08T10:42:35.000Z
2019-07-25T11:56:06.000Z
from typing import List, Optional, NewType, Tuple, NamedTuple, Type import attr from jinja2 import Template as JinjaTemplate, StrictUndefined from genyrator.entities.Entity import Entity from genyrator.path import create_relative_path OutPath = NewType('OutPath', Tuple[List[str], str]) Import = NamedTuple('Import', [('module_name', str), ('imports', List[str]), ]) @attr.s class Template(object): template_name: str = attr.ib() template_file_name: str = attr.ib() template_file_path: List[str] = attr.ib() relative_path: List[str] = attr.ib() out_path: Optional[OutPath] = attr.ib() def create_template(self): path = create_relative_path( [*self.template_file_path, self.template_file_name] ) with open(path) as f: template = JinjaTemplate(f.read(), undefined=StrictUndefined) return template def render(self): return self.create_template().render(template=self) def create_template( constructor, template_path: Optional[List[str]] = None, out_path: Optional[OutPath] = None, **kwargs, ) -> Template: relative_path = template_path[0:-1] path = ['genyrator', 'templates'] + relative_path template_name = template_path[-1] return constructor( template_name=template_name, template_file_name='{}.j2'.format(template_name), template_file_path=path, out_path=out_path, relative_path=relative_path, **kwargs, ) @attr.s class RootInit(Template): db_import_path: str = attr.ib() module_name: str = attr.ib() @attr.s class RootSchema(Template): module_name: str = attr.ib() entities: List[Entity] = attr.ib() @attr.s class ConvertDict(Template): module_name: str = attr.ib() @attr.s class SQLAlchemyModel(Template): module_name: str = attr.ib() db_import_path: str = attr.ib() entity: Entity = attr.ib() @attr.s class ModelToDict(Template): module_name: str = attr.ib() @attr.s class Config(Template): module_name: str = attr.ib() @attr.s class SQLAlchemyModelInit(Template): module_name: str = attr.ib() db_import_path: str = attr.ib() imports: List[Import] = attr.ib() @attr.s class RestplusModel(Template): entity: Entity = attr.ib() @attr.s class Resource(Template): module_name: str = attr.ib() db_import_path: str = attr.ib() entity: Entity = attr.ib() restplus_template: str = attr.ib() TypeOption: Type = attr.ib() @attr.s class ResourcesInit(Template): entities: List[Entity] = attr.ib() module_name: str = attr.ib() api_name: str = attr.ib() api_description: str = attr.ib() @attr.s class DomainModel(Template): entity: Entity = attr.ib() module_name: str = attr.ib() def sqlalchemy_model_imports(self): return list(set([ rel.target_entity_class_name for rel in self.entity.relationships ])) @attr.s class ConvertProperties(Template): module_name: str = attr.ib() @attr.s class ConvertModels(Template): module_name: str = attr.ib() @attr.s class JoinEntities(Template): module_name: str = attr.ib() @attr.s class ConvertDictToMarshmallow(Template): module_name: str = attr.ib() db_import_path: str = attr.ib() @attr.s class Fixture(Template): db_import_path: str = attr.ib() module_name: str = attr.ib() entity: Entity = attr.ib()
24.090909
73
0.616712
2,560
0.690027
0
0
2,696
0.726685
0
0
68
0.018329
d9e78859b4482aaef1db18210493138799d91b2f
1,969
py
Python
MIDI Remote Scripts/Push2/mode_collector.py
aarkwright/ableton_devices
fe5df3bbd64ccbc136bba722ba1e131a02969798
[ "MIT" ]
null
null
null
MIDI Remote Scripts/Push2/mode_collector.py
aarkwright/ableton_devices
fe5df3bbd64ccbc136bba722ba1e131a02969798
[ "MIT" ]
null
null
null
MIDI Remote Scripts/Push2/mode_collector.py
aarkwright/ableton_devices
fe5df3bbd64ccbc136bba722ba1e131a02969798
[ "MIT" ]
null
null
null
# uncompyle6 version 3.3.5 # Python bytecode 2.7 (62211) # Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)] # Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\Push2\mode_collector.py # Compiled at: 2018-11-30 15:48:11 from __future__ import absolute_import, print_function, unicode_literals from ableton.v2.base import listenable_property, listens, EventObject class ModeCollector(EventObject): def __init__(self, main_modes=None, mix_modes=None, global_mix_modes=None, device_modes=None, *a, **k): super(ModeCollector, self).__init__(*a, **k) self._main_modes = main_modes self._mix_modes = mix_modes self._global_mix_modes = global_mix_modes self._device_modes = device_modes self._on_selected_main_mode_changed.subject = main_modes self._on_selected_mix_mode_changed.subject = mix_modes self._on_selected_global_mix_mode_changed.subject = global_mix_modes self._on_selected_device_mode_changed.subject = device_modes @listenable_property def main_mode(self): return self._main_modes.selected_mode @listens(b'selected_mode') def _on_selected_main_mode_changed(self, mode): self.notify_main_mode() @listenable_property def mix_mode(self): return self._mix_modes.selected_mode @listens(b'selected_mode') def _on_selected_mix_mode_changed(self, mode): self.notify_mix_mode() @listenable_property def global_mix_mode(self): return self._global_mix_modes.selected_mode @listens(b'selected_mode') def _on_selected_global_mix_mode_changed(self, mode): self.notify_global_mix_mode() @listenable_property def device_mode(self): return self._device_modes.selected_mode @listens(b'selected_mode') def _on_selected_device_mode_changed(self, mode): self.notify_device_mode()
37.865385
124
0.742509
1,515
0.769426
0
0
832
0.42255
0
0
369
0.187405
d9e7a46d631c672aae25d04f18b75876427b787e
817
py
Python
src/topicModel.py
daidaotong/SingleView
db3249ca5afba97f750495cccbc185de88bf2287
[ "MIT" ]
null
null
null
src/topicModel.py
daidaotong/SingleView
db3249ca5afba97f750495cccbc185de88bf2287
[ "MIT" ]
null
null
null
src/topicModel.py
daidaotong/SingleView
db3249ca5afba97f750495cccbc185de88bf2287
[ "MIT" ]
null
null
null
from gensim import corpora, models, similarities, matutils,utils from gensim.models import KeyedVectors import numpy as np #Word2vec Experiment testString = ['PAST_MEDICAL_HISTORY','PAST_SURGICAL_HISTORY','PHYSICAL_EXAMINATION'] ''' word_vectors = KeyedVectors.load_word2vec_format('~/Downloads/GoogleNews-vectors-negative300.bin', binary=True) #model.save("file.txt") print word_vectors.most_similar(positive=['woman', 'king'], negative=['man']) print "******************************************************" print word_vectors.similarity('woman', 'man') #print word_vectors.most_similar(positive=['san_francisco']) print word_vectors.most_similar(positive=['SURGICAL']) #word_vectors.similarity(testString[0],testString[1]) ''' a=[1,4,3,6,3,6] print a[:-1] #print zip(a[:-1],a[1:]) print np.random.randn(3, 2)
35.521739
111
0.71481
0
0
0
0
0
0
0
0
615
0.752754
d9e8867f9d8fa5dbea3f62a0b298eac5f535d37a
9,499
py
Python
src/bots/test/test_inputs.py
drewbitt/lightnovel-crawler
fa9546ad9dcff49c75296b0b8772f6578689adcc
[ "Apache-2.0" ]
1
2019-03-10T13:02:23.000Z
2019-03-10T13:02:23.000Z
src/bots/test/test_inputs.py
drewbitt/lightnovel-crawler
fa9546ad9dcff49c75296b0b8772f6578689adcc
[ "Apache-2.0" ]
null
null
null
src/bots/test/test_inputs.py
drewbitt/lightnovel-crawler
fa9546ad9dcff49c75296b0b8772f6578689adcc
[ "Apache-2.0" ]
null
null
null
from base64 import decodestring as b64decode allowed_failures = [ 'https://ranobelib.me/', 'https://www.aixdzs.com/', 'https://webnovelindonesia.com/', b64decode("aHR0cHM6Ly9jb21yYWRlbWFvLmNvbS8=".encode()).decode() ] test_user_inputs = { b64decode("aHR0cHM6Ly9jb21yYWRlbWFvLmNvbS8=".encode()).decode(): [ b64decode( "aHR0cHM6Ly9jb21yYWRlbWFvLmNvbS9ub3ZlbC90c3VydWdpLW5vLWpvb3UtdG8tcmFrdWluLW5vLWtvLw==".encode()).decode() ], 'https://novelsrock.com/': [ 'https://novelsrock.com/novel/the-returner/', 'kuro' ], 'http://gravitytales.com/': [ 'http://gravitytales.com/posts/novel/a-dragons-curiosity' ], 'http://novelfull.com/': [ 'http://novelfull.com/dungeon-defense.html', 'Sinister Ex Girlfriend', ], 'http://www.machinenoveltranslation.com/': [ 'http://www.machinenoveltranslation.com/a-thought-through-eternity', ], 'http://zenithnovels.com/': [ 'http://zenithnovels.com/infinity-armament/', ], 'https://anythingnovel.com/': [ 'https://anythingnovel.com/novel/king-of-gods/', ], 'https://boxnovel.com/': [ 'https://boxnovel.com/novel/the-rest-of-my-life-is-for-you/', 'cultivation chat', ], 'https://crescentmoon.blog/': [ 'https://crescentmoon.blog/dark-blue-and-moonlight/', ], 'https://litnet.com/': [ 'https://litnet.com/en/book/candy-lips-1-b106232', 'candy lips', ], 'https://lnmtl.com/': [ 'https://lnmtl.com/novel/the-strongest-dan-god', ], 'https://m.chinesefantasynovels.com/': [ 'https://m.chinesefantasynovels.com/3838/', ], 'https://m.novelspread.com/': [ 'https://m.novelspread.com/novel/the-legend-of-the-concubine-s-daughter-minglan', ], 'https://m.romanticlovebooks.com/': [ 'https://m.romanticlovebooks.com/xuanhuan/207.html', ], 'http://www.tiknovel.com/': [ 'http://www.tiknovel.com/book/index?id=717', ], 'https://www.wuxiaworld.co/': [ 'sword', ], 'https://m.wuxiaworld.co/': [ 'https://m.wuxiaworld.co/Reincarnation-Of-The-Strongest-Sword-God/', ], 'https://meionovel.id/': [ 'https://meionovel.id/novel/the-legendary-mechanic/', ], 'https://mtled-novels.com/': [ 'https://mtled-novels.com/novels/great-ruler/', 'great ruler' ], 'https://bestlightnovel.com/': [ 'https://bestlightnovel.com/novel_888103800', 'martial' ], 'https://novelplanet.com/': [ 'https://novelplanet.com/Novel/Returning-from-the-Immortal-World', 'immortal' ], 'https://www.volarenovels.com/': [ 'https://www.volarenovels.com/novel/adorable-creature-attacks', ], 'https://webnovel.online/': [ 'https://webnovel.online/full-marks-hidden-marriage-pick-up-a-son-get-a-free-husband', ], 'https://www.idqidian.us/': [ 'https://www.idqidian.us/novel/peerless-martial-god/' ], 'https://www.novelall.com/': [ 'https://www.novelall.com/novel/Virtual-World-Close-Combat-Mage.html', 'combat' ], 'https://www.novelspread.com/': [ 'https://www.novelspread.com/novel/the-legend-of-the-concubine-s-daughter-minglan' ], 'https://www.readlightnovel.org/': [ 'https://www.readlightnovel.org/top-furious-doctor-soldier' ], 'https://www.romanticlovebooks.com/': [ 'https://www.romanticlovebooks.com/xianxia/251.html' ], 'https://www.royalroad.com/': [ 'https://www.royalroad.com/fiction/21220/mother-of-learning', 'mother' ], 'https://www.scribblehub.com/': [ 'https://www.scribblehub.com/series/73550/modern-life-of-the-exalted-immortal/', 'cultivation' ], 'https://www.webnovel.com/': [ 'https://www.webnovel.com/book/8212987205006305/Trial-Marriage-Husband%3A-Need-to-Work-Hard', 'martial', ], 'https://www.worldnovel.online/': [ 'https://www.worldnovel.online/novel/solo-leveling/', ], 'https://www.wuxiaworld.co/': [ 'https://www.wuxiaworld.co/Reincarnation-Of-The-Strongest-Sword-God/', 'sword' ], 'https://rewayat.club/': [ 'https://rewayat.club/novel/almighty-sword-domain/' ], 'https://www.wuxiaworld.com/': [ 'https://www.wuxiaworld.com/novel/martial-god-asura', 'martial', ], 'https://creativenovels.com/': [ 'https://creativenovels.com/novel/eternal-reverence/', ], 'https://www.tapread.com/': [ 'https://www.tapread.com/book/detail/80', ], 'http://www.tapread.com/': [ 'http://www.tapread.com/book/detail/80', ], 'https://readnovelfull.com/': [ 'https://readnovelfull.com/lord-of-all-realms.html', 'cultivation' ], 'https://myoniyonitranslations.com/': [ 'https://myoniyonitranslations.com/top-management/', 'https://myoniyonitranslations.com/category/god-of-tennis', ], 'https://babelnovel.com/': [ 'https://babelnovel.com/books/ceo-let-me-go', 'dazzle Good' ], 'https://wuxiaworld.online/': [ 'https://wuxiaworld.online/trial-marriage-husband-need-to-work-hard', 'cultivation', ], 'https://www.novelv.com/': [ 'https://www.novelv.com/0/349/' ], 'http://fullnovel.live/': [ 'http://fullnovel.live/novel-a-will-eternal', 'will eternal', ], 'https://www.noveluniverse.com/': [ 'https://www.noveluniverse.com/index/novel/info/id/15.html' ], 'https://novelraw.blogspot.com/': [ 'https://novelraw.blogspot.com/2019/03/dragon-king-son-in-law-mtl.html' ], 'https://light-novel.online/': [ 'https://light-novel.online/great-tyrannical-deity', 'tyrannical' ], 'https://www.rebirth.online/': [ 'https://www.rebirth.online/novel/upside-down' ], 'https://www.jieruihao.cn/': [ 'https://www.jieruihao.cn/novel/against-the-gods/', ], 'https://www.wattpad.com/': [ 'https://www.wattpad.com/story/87505567-loving-mr-jerkface-%E2%9C%94%EF%B8%8F' ], 'https://novelgo.id/': [ 'https://novelgo.id/novel/the-mightiest-leveling-system/' ], 'https://yukinovel.me/': [ 'https://yukinovel.me/novel/the-second-coming-of-avarice/', ], 'https://www.asianhobbyist.com/': [ 'https://www.asianhobbyist.com/series/that-time-i-got-reincarnated-as-a-slime/' ], 'https://kisslightnovels.info/': [ 'https://kisslightnovels.info/novel/solo-leveling/' ], 'https://novelonlinefull.com/': [ 'https://novelonlinefull.com/novel/abo1520855001564322110' ], 'https://www.machine-translation.org/': [ 'https://www.machine-translation.org/novel/bace21c9b10d34e9/world-of-cultivation.html' ], 'https://www.fanfiction.net/': [ 'https://www.fanfiction.net/s/7268451/1/Facebook-For-wizards' ], 'https://www.mtlnovel.com/': [ 'https://www.mtlnovel.com/trapped-in-a-typical-idol-drama/' ], 'https://wordexcerpt.com/': [ 'https://wordexcerpt.com/series/transmigration-raising-the-child-of-the-male-lead-boss/' ], 'https://www.translateindo.com/': [ 'https://www.translateindo.com/demon-wang-golden-status-favoured-fei/' ], 'https://ranobelib.me/': [ 'https://ranobelib.me/sozvezdie-klinka' ], 'https://novelringan.com/': [ 'https://novelringan.com/series/the-most-loving-marriage-in-history-master-mus-pampered-wife/' ], 'https://wuxiaworld.site/': [ 'https://wuxiaworld.site/novel/only-i-level-up/' ], 'https://id.mtlnovel.com/': [ 'https://id.mtlnovel.com/the-strongest-plane-becomes-god/' ], 'https://www.shinsori.com/': [ 'https://www.shinsori.com/akuyaku-reijou-ni-nanka-narimasen/' ], 'https://www.flying-lines.com/': [ 'https://www.flying-lines.com/novel/one-useless-rebirth' ], 'https://book.qidian.com/': [ 'https://book.qidian.com/info/1016597088' ], 'https://kiss-novel.com/': [ 'https://kiss-novel.com/the-first-order' ], 'https://www.machine-translation.org/': [ 'https://www.machine-translation.org/novel/a5eee127d75da0d2/long-live-summons.html' ], 'https://www.aixdzs.com/': [ 'https://www.aixdzs.com/d/66/66746/' ], 'https://webnovelonline.com/': [ 'https://webnovelonline.com/novel/the_anarchic_consort' ], 'https://4scanlation.com/': [ 'https://4scanlation.com/tensei-shitara-slime-datta-ken-wn/' ], 'https://listnovel.com/': [ 'https://listnovel.com/novel/my-sassy-crown-princess/' ], 'https://tomotranslations.com/': [ 'https://tomotranslations.com/this-hero-is-invincible-but-too-cautious/' ], 'https://www.wuxialeague.com/': [ 'https://www.wuxialeague.com/novel/245/' ], 'http://liberspark.com/': [ 'http://liberspark.com/novel/black-irons-glory' ], 'https://webnovelindonesia.com/': [ 'https://webnovelindonesia.com/nv/almighty-student' ], 'https://webnovelindonesia.com/': [ 'https://webnovelindonesia.com/nv/almighty-student' ], 'http://tiknovel.com/': [ 'http://tiknovel.com/book/index?id=717' ], 'http://boxnovel.org/': [ 'http://boxnovel.org/novel/martial-god-asura' ] }
34.922794
117
0.596694
0
0
0
0
0
0
0
0
7,146
0.75229
d9ea76a8227b5405cef7b2e6991bcba1911971f4
5,819
py
Python
wikisourcesort.py
ostropunk/wikisourcesort
3af2d086df0818a75b3e6c34550e2cc1382911a5
[ "MIT" ]
null
null
null
wikisourcesort.py
ostropunk/wikisourcesort
3af2d086df0818a75b3e6c34550e2cc1382911a5
[ "MIT" ]
null
null
null
wikisourcesort.py
ostropunk/wikisourcesort
3af2d086df0818a75b3e6c34550e2cc1382911a5
[ "MIT" ]
null
null
null
#!/usr/bin/env python # coding: utf-8 # In[1]: import pandas as pd import re # In[2]: def get_excel_dict(excelfile, key=None, index_col=0, header=0): dataframe = pd.read_excel(excelfile, index_col=index_col, header=header) dictionary = dataframe.to_dict() if key is None: return dictionary else: return dictionary[key] # In[3]: def textreader(text): '''Opens textfile and returns the content as a string''' with open(text, 'rt', encoding="utf8") as wiki: txtstring = wiki.read() return txtstring # In[44]: def replace_from_dict(text, dictionary): '''Replaces words in text with new words in dictionary''' for word in dictionary: text = text.replace(word, dictionary[word]) return text # In[172]: def get_ref(text): ''' Finds references between the <ref>- and </ref>-tags and returns them as a list of strings ''' ref = re.findall("\<ref.+?\<\/ref\>", text) return ref # In[171]: def getrefurl(ref): '''Finds the reference url in references and returns it as a string''' url = re.search("http.+?(?=\s|\|title=|\|titel|\}\})", ref) url = url.group() return url # In[30]: def get_domain_name(url): ''' Finds the domain name of the reference url and returns that name as a string. ''' domain_name = re.search('(?<=\/\/).+?(?=\/)', url) domain_name = domain_name.group() if domain_name.startswith('www.'): domain_name = domain_name.replace('www.', '') return domain_name # In[32]: def update_ref_dict(ref, ref_dict, ref_counts): refurl = getrefurl(ref) domain_name = get_domain_name(refurl) if refurl not in ref_dict: if domain_name not in ref_counts: ref_counts.update({domain_name:1}) refname = domain_name + '.' + str(ref_counts[domain_name]) else: ref_counts[domain_name] = ref_counts[domain_name] + 1 refname = domain_name + '.' + str(ref_counts[domain_name]) ref_dict.update({refurl:{'refs': [ref], 'refname': refname, 'refurl': refurl}}) else: if ref not in ref_dict[refurl]['refs']: ref_dict[refurl]['refs'].append(ref) return ref_dict, ref_counts # In[36]: def create_ref_dict(refs): ''' Takes a list of references, extracts the reference url and name, and returns a dictionary sorted on the referenceurl as key. ''' ref_dict = {} ref_counts = {} for ref in refs: ref_dict, ref_counts = update_ref_dict(ref, ref_dict, ref_counts) return ref_dict # In[79]: def get_ref_tag(text): ''' Finds references between the <ref>- and </ref>-tags and returns them as a list of strings ''' ref = re.findall("\<ref name\=.+?\/\>", text) #ref = re.findall("\<ref.+?\<\/ref\>|\<ref name\=.+?\/\>", text) #ref = re.findall("\<ref.+?(?!\"\s\/\>)\<\/ref>", text) #ref = re.findall("\<ref.+?\<\/ref\>", text) return set(ref) # In[130]: def get_spec_ref(text, ref_tag): ''' Finds references between the <ref>- and </ref>-tags and returns them as a list of strings ''' #ref = re.findall("\<ref name\=.+?\/\>", text) #ref = re.findall("\<ref.+?\<\/ref\>|\<ref name\=.+?\/\>", text) #ref = re.findall("\<ref.+?(?!\"\s\/\>)\<\/ref>", text) ref = re.findall(f'\<ref name\=\"{ref_tag}\"\>.+?\<\/ref\>', text) ref = ref[0] return ref # In[115]: def get_ref_tag_name(ref_tag): ref_tag_name = re.findall('\".+\"', ref_tag) ref_tag_name = ref_tag_name[0].replace('"', '') return ref_tag_name # In[136]: def replace_tags(text): ref_tags = get_ref_tag(text) for tag in ref_tags: name = get_ref_tag_name(tag) spec_ref = get_spec_ref(text, name) text = text.replace(tag, spec_ref) return text # In[49]: def replace_countries(text): countries = get_excel_dict('countries2.xlsx', 'Länder') text = replace_from_dict(text, countries) return text # In[66]: def replace_headers(text): headers = {'English title':'Engelsk titel', 'Original title':'Originaltitel', 'Director(s)':'Regissör(er)', 'Country':'Land', 'School':'Skola'} text = replace_from_dict(text, headers) return text # In[169]: def reference_sorter(text): ''' Does a bunch of stuff that should be broken out in different functions. ''' references = get_ref(text) reference_dict = create_ref_dict(references) reference_list = [] reference_text = '== Referenser ==\n<references>\n' text = text.replace('== Källor ==', '== Referenser ==') text = text.replace('<references/>', '') for entry in reference_dict: for reference in reference_dict[entry]['refs']: text = text.replace(reference, '<ref name="{}" />'.format(reference_dict[entry]['refname'])) reference_list.append('<ref name="{}">{}</ref>'.format(reference_dict[entry]['refname'], entry)) for reference in reference_list: reference_text += reference +'\n' reference_text += '</references>' text = re.split('== Referenser ==', text) text = text[0] + reference_text + text[-1] return text # In[134]: def fix_wiki_entry(textfile): with open(textfile, 'r', encoding="utf8") as txt: text = txt.read() text = replace_tags(text) text = reference_sorter(text) text = replace_countries(text) text = replace_headers(text) with open('new_' + textfile, 'w', encoding='utf8') as new_text: new_text.write(text) return text # In[173]: def main(): fix_wiki_entry(input('Please enter input textfile:')) if __name__ == "__main__": main()
23.75102
104
0.598385
0
0
0
0
0
0
0
0
2,007
0.344727
d9ea7ffbac1c307ae6a48a478a94b12a44b81de1
3,325
py
Python
backend/radar/engine/body_objects.py
me-anton/radar-app
cc7d1e876e0ce9b6173b6d7b484d5553e247166e
[ "MIT" ]
null
null
null
backend/radar/engine/body_objects.py
me-anton/radar-app
cc7d1e876e0ce9b6173b6d7b484d5553e247166e
[ "MIT" ]
null
null
null
backend/radar/engine/body_objects.py
me-anton/radar-app
cc7d1e876e0ce9b6173b6d7b484d5553e247166e
[ "MIT" ]
null
null
null
import logging import json from dataclasses import dataclass from redis import Redis from typing import Iterable, Tuple, List, Iterator, Union, Dict from typing_extensions import TypedDict from backend import settings from caching.scripts import RedisScriptsPool from share.metaclasses import Singleton from radar.models import AlienBody from radar.validation import validate_body_str_profile logger = logging.getLogger(__name__) BodiesUpdate = TypedDict('BodiesUpdate', {'dropped_keys': List[str], 'new_records': Dict[str, str]}) @dataclass(frozen=True) class BodyObject: key: str matrix: List[List[str]] width: int height: int @staticmethod def generate(key: str, body: str) -> 'BodyObject': line_list = body.splitlines() matrix = [list(line) for line in line_list] return BodyObject(key=key, matrix=matrix, width=len(matrix[0]), height=len(matrix)) class BodyObjectsPool(metaclass=Singleton): """ An object for getting BodyObject instances from database or cache """ body_key_prefix = 'body:' body_lookup_pattern = body_key_prefix + '*' body_expiration = 10 # in seconds def __init__(self, num_of_default_bodies=3): self.num_of_default_bodies = num_of_default_bodies self.__default_bodies: Tuple[BodyObject, ...] = \ self._generate_defaults(num_of_default_bodies) self._redis = Redis(host=settings.REDIS_HOSTNAME) self._scripts = RedisScriptsPool() def add_body(self, body: Union[str, bytes], body_id: str) -> None: """Cache the requested body string in Redis db""" validate_body_str_profile(body) key = self.make_body_key(body_id) self._redis.set(key, body, self.body_expiration) def ping_body(self, body_id: str): """Reset expiration time of a body""" key = self.make_body_key(body_id) self._redis.expire(key, self.body_expiration) def update_bodies(self, known_bodies_keys: Iterable[str], max_capacity: int) -> BodiesUpdate: """ Give update on state of body objects' records in Redis db :param known_bodies_keys: redis keys of already known bodies :param max_capacity: maximum relevant for requester number of bodies including already known ones """ return json.loads( self._scripts.update_records(keys=known_bodies_keys, args=[max_capacity, self.body_lookup_pattern]) ) def make_body_key(self, body_id: str): return self.body_key_prefix + body_id @property def first(self): return self._get_default(0) @property def second(self): return self._get_default(1) @property def third(self): return self._get_default(2) def _get_default(self, index) -> BodyObject: return self.__default_bodies[index] @staticmethod def _generate_defaults(num_of_defaults): logger.info('Generating default bodies') query = AlienBody.objects.filter(id__lte=num_of_defaults) return tuple(BodyObject.generate(str(body.id), body.body_str) for body in query)
33.25
76
0.657143
2,719
0.817744
0
0
878
0.26406
0
0
533
0.160301
d9eb0ee449a6b916e969b15c42a07550484f36ad
959
py
Python
djangocms_baseplugins/spacer/cms_plugins.py
benzkji/djangocms-baseplugins
7f041a030ed93dcdec70e4ca777b841846b8f2f2
[ "MIT" ]
2
2019-04-14T01:31:22.000Z
2020-03-05T13:06:57.000Z
djangocms_baseplugins/spacer/cms_plugins.py
benzkji/djangocms-baseplugins
7f041a030ed93dcdec70e4ca777b841846b8f2f2
[ "MIT" ]
32
2017-04-04T09:28:06.000Z
2021-08-18T16:23:02.000Z
djangocms_baseplugins/spacer/cms_plugins.py
bnzk/djangocms-baseplugins
7f041a030ed93dcdec70e4ca777b841846b8f2f2
[ "MIT" ]
null
null
null
# coding: utf-8 from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from django import forms from django.utils.translation import ugettext_lazy as _ from djangocms_baseplugins.baseplugin import defaults from djangocms_baseplugins.baseplugin.cms_plugins import BasePluginMixin from djangocms_baseplugins.baseplugin.utils import get_fields_from_fieldsets, get_baseplugin_widgets from . import conf from .models import Spacer class SpacerPluginForm(forms.ModelForm): class Meta: model = Spacer fields = get_fields_from_fieldsets(conf.FIELDSETS) # exclude = [] widgets = get_baseplugin_widgets(conf) class SpacerPlugin(BasePluginMixin, CMSPluginBase): model = Spacer form = SpacerPluginForm module = defaults.SPECIAL_LABEL name = _(u'Spacer') render_template = "djangocms_baseplugins/spacer.html" fieldsets = conf.FIELDSETS plugin_pool.register_plugin(SpacerPlugin)
29.96875
100
0.788321
455
0.474453
0
0
0
0
0
0
73
0.076121
d9ec253823566d98d214c4860b8c8d8ac8c80515
2,188
py
Python
python_utilities/plotting/util.py
sdaxen/python_utilities
7b9d6cc21bfc31be83629d2ac02b27e886ebc2bb
[ "MIT" ]
2
2020-04-13T20:17:36.000Z
2020-05-12T01:13:12.000Z
python_utilities/plotting/util.py
sethaxen/python_utilities
7b9d6cc21bfc31be83629d2ac02b27e886ebc2bb
[ "MIT" ]
5
2015-10-20T22:57:51.000Z
2017-09-07T01:10:23.000Z
python_utilities/plotting/util.py
sethaxen/python_utilities
7b9d6cc21bfc31be83629d2ac02b27e886ebc2bb
[ "MIT" ]
3
2015-08-17T17:55:41.000Z
2018-09-19T13:56:42.000Z
"""Utility functions for plotting. Author: Seth Axen E-mail: [email protected]""" from collections import deque import numpy as np def rgb_to_hsv(rgb): """Convert RGB colors to HSV colors.""" r, g, b = tuple(map(float, rgb)) if any([r > 1, g > 1, b > 1]): r /= 255. g /= 255. b /= 255. mmax = max(r, g, b) mmin = min(r, g, b) c = mmax - mmin if (c == 0.): hp = 0. elif (mmax == r): hp = ((g - b) / c) % 6 elif (mmax == g): hp = ((b - r) / c) + 2 elif (mmax == b): hp = ((r - g) / c) + 4 h = 60 * hp v = mmax if (c == 0): s = 0 else: s = c / v return (h, s, v) def hsv_to_rgb(hsv): """Convert HSV colors to RGB colors.""" h, s, v = tuple(map(float, hsv)) c = v * s m = v - c hp = h / 60. x = c * (1. - abs((hp % 2) - 1.)) hp = int(hp) rgb = deque((c + m, x + m, m)) if (hp % 2): rgb.reverse() rgb.rotate((hp - 3) / 2) else: rgb.rotate(hp / 2) return tuple(rgb) def rgb_to_yuv(rgb): """Convert RGB colors to Y'UV colors, useful for comparison.""" rgbv = np.array(rgb).reshape(3, 1) if np.any(rgbv > 1.): rgbv = rgbv / 255. yuv = np.dot(np.array([[ .299, .587, .114], [-.14713, -.28886, .436], [ .615, -.51499, -.10001]], dtype=np.double), rgbv) return list(yuv) def yuv_to_rgb(yuv): """Convert Y'UV colors to RGB colors.""" yuvv = np.array(yuv).reshape(3, 1) rgb = np.dot(np.array([[1., 0., 1.13983], [1., -.39465, -.58060], [1., 2.03211, 0.]], dtype=np.double), yuvv) return list(rgb) def compute_yuv_dist(rgb1, rgb2): """Compute Euclidean Y'UV distance between RGB colors.""" yuv1 = rgb_to_yuv(rgb1) yuv2 = rgb_to_yuv(rgb2) return float(sum((np.array(yuv1) - np.array(yuv2))**2)**.5) def lighten_rgb(rgb, p=0.): """Lighten RGB colors by percentage p of total.""" h, s, v = rgb_to_hsv(rgb) hsv = (h, s, min(1, v + p)) return hsv_to_rgb(hsv)
24.863636
74
0.472121
0
0
0
0
0
0
0
0
372
0.170018
d9ec2cc7a1a6ba6f4583fe5b1a6bc53ffc63f837
618
py
Python
tests/test_process.py
confluentinc/utils-core
6001b4c61f7d923d273a23dc5a1580e0fa277d2c
[ "MIT" ]
null
null
null
tests/test_process.py
confluentinc/utils-core
6001b4c61f7d923d273a23dc5a1580e0fa277d2c
[ "MIT" ]
null
null
null
tests/test_process.py
confluentinc/utils-core
6001b4c61f7d923d273a23dc5a1580e0fa277d2c
[ "MIT" ]
1
2021-01-14T11:33:35.000Z
2021-01-14T11:33:35.000Z
import pytest from utils.process import run, silent_run, RunError from utils.fs import in_temp_dir def test_run(capsys): with in_temp_dir(): assert run('echo hello > hello.txt; echo world >> hello.txt', shell=True) out = run('ls', return_output=True) assert out == 'hello.txt\n' out = run(['cat', 'hello.txt'], return_output=True) assert out == 'hello\nworld\n' with pytest.raises(RunError): run('blah') assert not run('blah', raises=False) assert silent_run('ls -l') out, _ = capsys.readouterr() assert out == ''
24.72
81
0.600324
0
0
0
0
0
0
0
0
119
0.192557
d9ec7fb034397cf9a445f613d02c81768a1461eb
3,410
py
Python
bokeh/client/util.py
areaweb/bokeh
9d131e45d626a912e85aee5b2647139c194dc893
[ "BSD-3-Clause" ]
1
2021-01-31T22:13:13.000Z
2021-01-31T22:13:13.000Z
bokeh/client/util.py
adsbxchange/bokeh
47aa8f8420944c47e876c1c36be182d257c14b87
[ "BSD-3-Clause" ]
1
2017-01-12T00:37:38.000Z
2017-01-12T00:37:38.000Z
bokeh/client/util.py
adsbxchange/bokeh
47aa8f8420944c47e876c1c36be182d257c14b87
[ "BSD-3-Clause" ]
null
null
null
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved. # # Powered by the Bokeh Development Team. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- ''' Internal utility functions used by ``bokeh.client`` ''' #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function, unicode_literals import logging log = logging.getLogger(__name__) from bokeh.util.api import public, internal ; public, internal #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Standard library imports # External imports # Bokeh imports #----------------------------------------------------------------------------- # Globals and constants #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Public API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Internal API #----------------------------------------------------------------------------- @internal((1,0,0)) def server_url_for_websocket_url(url): ''' Convert an ``ws(s)`` URL for a Bokeh server into the appropriate ``http(s)`` URL for the websocket endpoint. Args: url (str): An ``ws(s)`` URL ending in ``/ws`` Returns: str: The corresponding ``http(s)`` URL. Raises: ValueError: If the input URL is not of the proper form. ''' if url.startswith("ws:"): reprotocoled = "http" + url[2:] elif url.startswith("wss:"): reprotocoled = "https" + url[3:] else: raise ValueError("URL has non-websocket protocol " + url) if not reprotocoled.endswith("/ws"): raise ValueError("websocket URL does not end in /ws") return reprotocoled[:-2] @internal((1,0,0)) def websocket_url_for_server_url(url): ''' Convert an ``http(s)`` URL for a Bokeh server websocket endpoint into the appropriate ``ws(s)`` URL Args: url (str): An ``http(s)`` URL Returns: str: The corresponding ``ws(s)`` URL ending in ``/ws`` Raises: ValueError: If the input URL is not of the proper form. ''' if url.startswith("http:"): reprotocoled = "ws" + url[4:] elif url.startswith("https:"): reprotocoled = "wss" + url[5:] else: raise ValueError("URL has unknown protocol " + url) if reprotocoled.endswith("/"): return reprotocoled + "ws" else: return reprotocoled + "/ws" #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
31.574074
82
0.389443
0
0
0
0
1,526
0.447507
0
0
2,521
0.739296
d9ed79fef6ca74a4e312f154a876ffa2123179f7
16,276
py
Python
slim/nets/inception_resnet_v2.py
PPTMiao/mtl-ssl
b61449c3f902414304657de6ec217077e441a6b9
[ "Apache-2.0" ]
90
2019-06-12T06:11:39.000Z
2022-03-21T22:28:38.000Z
slim/nets/inception_resnet_v2.py
PPTMiao/mtl-ssl
b61449c3f902414304657de6ec217077e441a6b9
[ "Apache-2.0" ]
3
2020-03-24T17:01:25.000Z
2021-02-02T22:00:11.000Z
slim/nets/inception_resnet_v2.py
PPTMiao/mtl-ssl
b61449c3f902414304657de6ec217077e441a6b9
[ "Apache-2.0" ]
17
2019-06-15T08:49:46.000Z
2022-01-24T06:46:23.000Z
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Contains the definition of the Inception Resnet V2 architecture. As described in http://arxiv.org/abs/1602.07261. Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf slim = tf.contrib.slim def block35(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None): """Builds the 35x35 resnet block.""" with tf.variable_scope(scope, 'Block35', [net], reuse=reuse): with tf.variable_scope('Branch_0'): tower_conv = slim.conv2d(net, 32, 1, scope='Conv2d_1x1') with tf.variable_scope('Branch_1'): tower_conv1_0 = slim.conv2d(net, 32, 1, scope='Conv2d_0a_1x1') tower_conv1_1 = slim.conv2d(tower_conv1_0, 32, 3, scope='Conv2d_0b_3x3') with tf.variable_scope('Branch_2'): tower_conv2_0 = slim.conv2d(net, 32, 1, scope='Conv2d_0a_1x1') tower_conv2_1 = slim.conv2d(tower_conv2_0, 48, 3, scope='Conv2d_0b_3x3') tower_conv2_2 = slim.conv2d(tower_conv2_1, 64, 3, scope='Conv2d_0c_3x3') mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_1, tower_conv2_2]) up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None, activation_fn=None, scope='Conv2d_1x1') net += scale * up if activation_fn: net = activation_fn(net) return net def block17(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None): """Builds the 17x17 resnet block.""" with tf.variable_scope(scope, 'Block17', [net], reuse=reuse): with tf.variable_scope('Branch_0'): tower_conv = slim.conv2d(net, 192, 1, scope='Conv2d_1x1') with tf.variable_scope('Branch_1'): tower_conv1_0 = slim.conv2d(net, 128, 1, scope='Conv2d_0a_1x1') tower_conv1_1 = slim.conv2d(tower_conv1_0, 160, [1, 7], scope='Conv2d_0b_1x7') tower_conv1_2 = slim.conv2d(tower_conv1_1, 192, [7, 1], scope='Conv2d_0c_7x1') mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_2]) up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None, activation_fn=None, scope='Conv2d_1x1') net += scale * up if activation_fn: net = activation_fn(net) return net def block8(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None): """Builds the 8x8 resnet block.""" with tf.variable_scope(scope, 'Block8', [net], reuse=reuse): with tf.variable_scope('Branch_0'): tower_conv = slim.conv2d(net, 192, 1, scope='Conv2d_1x1') with tf.variable_scope('Branch_1'): tower_conv1_0 = slim.conv2d(net, 192, 1, scope='Conv2d_0a_1x1') tower_conv1_1 = slim.conv2d(tower_conv1_0, 224, [1, 3], scope='Conv2d_0b_1x3') tower_conv1_2 = slim.conv2d(tower_conv1_1, 256, [3, 1], scope='Conv2d_0c_3x1') mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_2]) up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None, activation_fn=None, scope='Conv2d_1x1') net += scale * up if activation_fn: net = activation_fn(net) return net def inception_resnet_v2_base(inputs, final_endpoint='Conv2d_7b_1x1', output_stride=16, align_feature_maps=False, scope=None): """Inception model from http://arxiv.org/abs/1602.07261. Constructs an Inception Resnet v2 network from inputs to the given final endpoint. This method can construct the network up to the final inception block Conv2d_7b_1x1. Args: inputs: a tensor of size [batch_size, height, width, channels]. final_endpoint: specifies the endpoint to construct the network up to. It can be one of ['Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3', 'MaxPool_3a_3x3', 'Conv2d_3b_1x1', 'Conv2d_4a_3x3', 'MaxPool_5a_3x3', 'Mixed_5b', 'Mixed_6a', 'PreAuxLogits', 'Mixed_7a', 'Conv2d_7b_1x1'] output_stride: A scalar that specifies the requested ratio of input to output spatial resolution. Only supports 8 and 16. align_feature_maps: When true, changes all the VALID paddings in the network to SAME padding so that the feature maps are aligned. scope: Optional variable_scope. Returns: tensor_out: output tensor corresponding to the final_endpoint. end_points: a set of activations for external use, for example summaries or losses. Raises: ValueError: if final_endpoint is not set to one of the predefined values, or if the output_stride is not 8 or 16, or if the output_stride is 8 and we request an end point after 'PreAuxLogits'. """ if output_stride != 8 and output_stride != 16: raise ValueError('output_stride must be 8 or 16.') padding = 'SAME' if align_feature_maps else 'VALID' end_points = {} def add_and_check_final(name, net): end_points[name] = net return name == final_endpoint with tf.variable_scope(scope, 'InceptionResnetV2', [inputs]): with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d], stride=1, padding='SAME'): # 149 x 149 x 32 net = slim.conv2d(inputs, 32, 3, stride=2, padding=padding, scope='Conv2d_1a_3x3') if add_and_check_final('Conv2d_1a_3x3', net): return net, end_points # 147 x 147 x 32 net = slim.conv2d(net, 32, 3, padding=padding, scope='Conv2d_2a_3x3') if add_and_check_final('Conv2d_2a_3x3', net): return net, end_points # 147 x 147 x 64 net = slim.conv2d(net, 64, 3, scope='Conv2d_2b_3x3') if add_and_check_final('Conv2d_2b_3x3', net): return net, end_points # 73 x 73 x 64 net = slim.max_pool2d(net, 3, stride=2, padding=padding, scope='MaxPool_3a_3x3') if add_and_check_final('MaxPool_3a_3x3', net): return net, end_points # 73 x 73 x 80 net = slim.conv2d(net, 80, 1, padding=padding, scope='Conv2d_3b_1x1') if add_and_check_final('Conv2d_3b_1x1', net): return net, end_points # 71 x 71 x 192 net = slim.conv2d(net, 192, 3, padding=padding, scope='Conv2d_4a_3x3') if add_and_check_final('Conv2d_4a_3x3', net): return net, end_points # 35 x 35 x 192 net = slim.max_pool2d(net, 3, stride=2, padding=padding, scope='MaxPool_5a_3x3') if add_and_check_final('MaxPool_5a_3x3', net): return net, end_points # 35 x 35 x 320 with tf.variable_scope('Mixed_5b'): with tf.variable_scope('Branch_0'): tower_conv = slim.conv2d(net, 96, 1, scope='Conv2d_1x1') with tf.variable_scope('Branch_1'): tower_conv1_0 = slim.conv2d(net, 48, 1, scope='Conv2d_0a_1x1') tower_conv1_1 = slim.conv2d(tower_conv1_0, 64, 5, scope='Conv2d_0b_5x5') with tf.variable_scope('Branch_2'): tower_conv2_0 = slim.conv2d(net, 64, 1, scope='Conv2d_0a_1x1') tower_conv2_1 = slim.conv2d(tower_conv2_0, 96, 3, scope='Conv2d_0b_3x3') tower_conv2_2 = slim.conv2d(tower_conv2_1, 96, 3, scope='Conv2d_0c_3x3') with tf.variable_scope('Branch_3'): tower_pool = slim.avg_pool2d(net, 3, stride=1, padding='SAME', scope='AvgPool_0a_3x3') tower_pool_1 = slim.conv2d(tower_pool, 64, 1, scope='Conv2d_0b_1x1') net = tf.concat( [tower_conv, tower_conv1_1, tower_conv2_2, tower_pool_1], 3) if add_and_check_final('Mixed_5b', net): return net, end_points # TODO(alemi): Register intermediate endpoints net = slim.repeat(net, 10, block35, scale=0.17) # 17 x 17 x 1088 if output_stride == 8, # 33 x 33 x 1088 if output_stride == 16 use_atrous = output_stride == 8 with tf.variable_scope('Mixed_6a'): with tf.variable_scope('Branch_0'): tower_conv = slim.conv2d(net, 384, 3, stride=1 if use_atrous else 2, padding=padding, scope='Conv2d_1a_3x3') with tf.variable_scope('Branch_1'): tower_conv1_0 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1') tower_conv1_1 = slim.conv2d(tower_conv1_0, 256, 3, scope='Conv2d_0b_3x3') tower_conv1_2 = slim.conv2d(tower_conv1_1, 384, 3, stride=1 if use_atrous else 2, padding=padding, scope='Conv2d_1a_3x3') with tf.variable_scope('Branch_2'): tower_pool = slim.max_pool2d(net, 3, stride=1 if use_atrous else 2, padding=padding, scope='MaxPool_1a_3x3') net = tf.concat([tower_conv, tower_conv1_2, tower_pool], 3) if add_and_check_final('Mixed_6a', net): return net, end_points # TODO(alemi): register intermediate endpoints with slim.arg_scope([slim.conv2d], rate=2 if use_atrous else 1): net = slim.repeat(net, 20, block17, scale=0.10) if add_and_check_final('PreAuxLogits', net): return net, end_points if output_stride == 8: # TODO(gpapan): Properly support output_stride for the rest of the net. raise ValueError('output_stride==8 is only supported up to the ' 'PreAuxlogits end_point for now.') # 8 x 8 x 2080 with tf.variable_scope('Mixed_7a'): with tf.variable_scope('Branch_0'): tower_conv = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1') tower_conv_1 = slim.conv2d(tower_conv, 384, 3, stride=2, padding=padding, scope='Conv2d_1a_3x3') with tf.variable_scope('Branch_1'): tower_conv1 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1') tower_conv1_1 = slim.conv2d(tower_conv1, 288, 3, stride=2, padding=padding, scope='Conv2d_1a_3x3') with tf.variable_scope('Branch_2'): tower_conv2 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1') tower_conv2_1 = slim.conv2d(tower_conv2, 288, 3, scope='Conv2d_0b_3x3') tower_conv2_2 = slim.conv2d(tower_conv2_1, 320, 3, stride=2, padding=padding, scope='Conv2d_1a_3x3') with tf.variable_scope('Branch_3'): tower_pool = slim.max_pool2d(net, 3, stride=2, padding=padding, scope='MaxPool_1a_3x3') net = tf.concat( [tower_conv_1, tower_conv1_1, tower_conv2_2, tower_pool], 3) if add_and_check_final('Mixed_7a', net): return net, end_points # TODO(alemi): register intermediate endpoints net = slim.repeat(net, 9, block8, scale=0.20) net = block8(net, activation_fn=None) # 8 x 8 x 1536 net = slim.conv2d(net, 1536, 1, scope='Conv2d_7b_1x1') if add_and_check_final('Conv2d_7b_1x1', net): return net, end_points raise ValueError('final_endpoint (%s) not recognized', final_endpoint) def inception_resnet_v2(inputs, num_classes=1001, is_training=True, dropout_keep_prob=0.8, reuse=None, scope='InceptionResnetV2', create_aux_logits=True): """Creates the Inception Resnet V2 model. Args: inputs: a 4-D tensor of size [batch_size, height, width, 3]. num_classes: number of predicted classes. is_training: whether is training or not. dropout_keep_prob: float, the fraction to keep before final layer. reuse: whether or not the network and its variables should be reused. To be able to reuse 'scope' must be given. scope: Optional variable_scope. create_aux_logits: Whether to include the auxilliary logits. Returns: logits: the logits outputs of the model. end_points: the set of end_points from the inception model. """ end_points = {} with tf.variable_scope(scope, 'InceptionResnetV2', [inputs, num_classes], reuse=reuse) as scope: with slim.arg_scope([slim.batch_norm, slim.dropout], is_training=is_training): net, end_points = inception_resnet_v2_base(inputs, scope=scope) if create_aux_logits: with tf.variable_scope('AuxLogits'): aux = end_points['PreAuxLogits'] aux = slim.avg_pool2d(aux, 5, stride=3, padding='VALID', scope='Conv2d_1a_3x3') aux = slim.conv2d(aux, 128, 1, scope='Conv2d_1b_1x1') aux = slim.conv2d(aux, 768, aux.get_shape()[1:3], padding='VALID', scope='Conv2d_2a_5x5') aux = slim.flatten(aux) aux = slim.fully_connected(aux, num_classes, activation_fn=None, scope='Logits') end_points['AuxLogits'] = aux with tf.variable_scope('Logits'): net = slim.avg_pool2d(net, net.get_shape()[1:3], padding='VALID', scope='AvgPool_1a_8x8') net = slim.flatten(net) net = slim.dropout(net, dropout_keep_prob, is_training=is_training, scope='Dropout') end_points['PreLogitsFlatten'] = net logits = slim.fully_connected(net, num_classes, activation_fn=None, scope='Logits') end_points['Logits'] = logits end_points['Predictions'] = tf.nn.softmax(logits, name='Predictions') return logits, end_points inception_resnet_v2.default_image_size = 299 def inception_resnet_v2_arg_scope(weight_decay=0.00004, batch_norm_decay=0.9997, batch_norm_epsilon=0.001, trainable=True): """Returns the scope with the default parameters for inception_resnet_v2. Args: weight_decay: the weight decay for weights variables. batch_norm_decay: decay for the moving average of batch_norm momentums. batch_norm_epsilon: small float added to variance to avoid dividing by zero. Returns: a arg_scope with the parameters needed for inception_resnet_v2. """ # Set weight_decay for weights in conv2d and fully_connected layers. with slim.arg_scope([slim.conv2d, slim.fully_connected], weights_regularizer=slim.l2_regularizer(weight_decay), biases_regularizer=slim.l2_regularizer(weight_decay), trainable=trainable): batch_norm_params = { 'decay': batch_norm_decay, 'epsilon': batch_norm_epsilon, 'trainable': trainable } # Set activation_fn and parameters for batch_norm. with slim.arg_scope([slim.conv2d], activation_fn=tf.nn.relu, normalizer_fn=slim.batch_norm, normalizer_params=batch_norm_params) as scope: return scope
45.085873
80
0.616552
0
0
0
0
0
0
0
0
5,496
0.337675
d9ee27c57dbf76a3c2165139cae647ead0e58c46
6,479
py
Python
tests/boilerplate_client/boilerplate_cmd.py
LedgerHQ/ledger-app-neo3
48e1e0dec3e4801fc3ab1b07c4fe4ed86735a642
[ "MIT" ]
null
null
null
tests/boilerplate_client/boilerplate_cmd.py
LedgerHQ/ledger-app-neo3
48e1e0dec3e4801fc3ab1b07c4fe4ed86735a642
[ "MIT" ]
5
2021-09-13T16:41:52.000Z
2022-01-12T16:00:21.000Z
tests/boilerplate_client/boilerplate_cmd.py
isabella232/app-neo3
c48ec5032143fe606d694372c2cfc02082b2ce03
[ "MIT" ]
3
2021-09-01T11:40:09.000Z
2022-03-06T06:45:13.000Z
import struct from typing import Tuple from ledgercomm import Transport from boilerplate_client.boilerplate_cmd_builder import BoilerplateCommandBuilder, InsType from boilerplate_client.button import Button from boilerplate_client.exception import DeviceException from boilerplate_client.transaction import Transaction from neo3.network import payloads class BoilerplateCommand: def __init__(self, transport: Transport, debug: bool = False) -> None: self.transport = transport self.builder = BoilerplateCommandBuilder(debug=debug) self.debug = debug def get_app_and_version(self) -> Tuple[str, str]: sw, response = self.transport.exchange_raw( self.builder.get_app_and_version() ) # type: int, bytes if sw != 0x9000: raise DeviceException(error_code=sw, ins=0x01) # response = format_id (1) || # app_name_len (1) || # app_name (var) || # version_len (1) || # version (var) || offset: int = 0 format_id: int = response[offset] offset += 1 app_name_len: int = response[offset] offset += 1 app_name: str = response[offset:offset + app_name_len].decode("ascii") offset += app_name_len version_len: int = response[offset] offset += 1 version: str = response[offset:offset + version_len].decode("ascii") offset += version_len return app_name, version def get_version(self) -> Tuple[int, int, int]: sw, response = self.transport.exchange_raw( self.builder.get_version() ) # type: int, bytes if sw != 0x9000: raise DeviceException(error_code=sw, ins=InsType.INS_GET_VERSION) # response = MAJOR (1) || MINOR (1) || PATCH (1) assert len(response) == 3 major, minor, patch = struct.unpack( "BBB", response ) # type: int, int, int return major, minor, patch def get_app_name(self) -> str: sw, response = self.transport.exchange_raw( self.builder.get_app_name() ) # type: int, bytes if sw != 0x9000: raise DeviceException(error_code=sw, ins=InsType.INS_GET_APP_NAME) return response.decode("ascii") def get_public_key(self, bip44_path: str, display: bool = False) -> bytes: sw, response = self.transport.exchange_raw( self.builder.get_public_key(bip44_path=bip44_path) ) # type: int, bytes if sw != 0x9000: raise DeviceException(error_code=sw, ins=InsType.INS_GET_PUBLIC_KEY) assert len(response) == 65 # 04 + 64 bytes of uncompressed key return response def sign_tx(self, bip44_path: str, transaction: payloads.Transaction, network_magic: int, button: Button) -> Tuple[int, bytes]: sw: int response: bytes = b"" for is_last, chunk in self.builder.sign_tx(bip44_path=bip44_path, transaction=transaction, network_magic=network_magic): self.transport.send_raw(chunk) if is_last: # Review Transaction button.right_click() # Destination address button.right_click() button.right_click() button.right_click() # Token Amount button.right_click() # Target network button.right_click() # System fee button.right_click() # Network fee button.right_click() # Total fees button.right_click() # Valid until button.right_click() # Signer 1 of 1 button.right_click() # Account 1/3, 2/3, 3/3 button.right_click() button.right_click() button.right_click() # Scope button.right_click() # custom contracts if (len(transaction.signers) > 0 and payloads.WitnessScope.CUSTOM_CONTRACTS in transaction.signers[0].scope): for _ in range(len(transaction.signers[0].allowed_contracts)): button.right_click() button.right_click() button.right_click() # Approve button.both_click() sw, response = self.transport.recv() # type: int, bytes if sw != 0x9000: raise DeviceException(error_code=sw, ins=InsType.INS_SIGN_TX) return response def sign_vote_tx(self, bip44_path: str, transaction: Transaction, network_magic: int, button: Button) -> Tuple[int, bytes]: sw: int response: bytes = b"" for is_last, chunk in self.builder.sign_tx(bip44_path=bip44_path, transaction=transaction, network_magic=network_magic): self.transport.send_raw(chunk) if is_last: # Review Transaction button.right_click() # Vote to public key button.right_click() button.right_click() button.right_click() button.right_click() # Target network button.right_click() # System fee button.right_click() # Network fee button.right_click() # Total fees button.right_click() # Valid until button.right_click() # Signer 1 of 1 button.right_click() # Account 1/3, 2/3, 3/3 button.right_click() button.right_click() button.right_click() # Scope button.right_click() # Approve button.both_click() sw, response = self.transport.recv() # type: int, bytes if sw != 0x9000: raise DeviceException(error_code=sw, ins=InsType.INS_SIGN_TX) return response
34.462766
131
0.533416
6,122
0.944899
0
0
0
0
0
0
748
0.11545
d9efa4ffda8cacd286187e29ce110d292c7a1e64
946
py
Python
clpy/sparse/util.py
fixstars/clpy
693485f85397cc110fa45803c36c30c24c297df0
[ "BSD-3-Clause" ]
142
2018-06-07T07:43:10.000Z
2021-10-30T21:06:32.000Z
clpy/sparse/util.py
fixstars/clpy
693485f85397cc110fa45803c36c30c24c297df0
[ "BSD-3-Clause" ]
282
2018-06-07T08:35:03.000Z
2021-03-31T03:14:32.000Z
clpy/sparse/util.py
fixstars/clpy
693485f85397cc110fa45803c36c30c24c297df0
[ "BSD-3-Clause" ]
19
2018-06-19T11:07:53.000Z
2021-05-13T20:57:04.000Z
import clpy import clpy.sparse.base _preamble_atomic_add = ''' #if __CUDA_ARCH__ < 600 __device__ double atomicAdd(double* address, double val) { unsigned long long* address_as_ull = (unsigned long long*)address; unsigned long long old = *address_as_ull, assumed; do { assumed = old; old = atomicCAS(address_as_ull, assumed, __double_as_longlong(val + __longlong_as_double(assumed))); } while (assumed != old); return __longlong_as_double(old); } #endif ''' def isintlike(x): try: return bool(int(x) == x) except (TypeError, ValueError): return False def isscalarlike(x): return clpy.isscalar(x) or (clpy.sparse.base.isdense(x) and x.ndim == 0) def isshape(x): if not isinstance(x, tuple) or len(x) != 2: return False m, n = x return isintlike(m) and isintlike(n)
24.25641
76
0.60148
0
0
0
0
0
0
0
0
524
0.553911
d9efc68d74f0ff6411265258b8ee1094b0fa820e
1,316
py
Python
test/test_cartesian.py
hwazni/discopy
812a4c77de4c766591bad74306720b518cdc54fc
[ "BSD-3-Clause" ]
205
2019-12-29T09:45:09.000Z
2022-03-24T09:29:13.000Z
test/test_cartesian.py
hwazni/discopy
812a4c77de4c766591bad74306720b518cdc54fc
[ "BSD-3-Clause" ]
61
2019-12-11T10:46:38.000Z
2022-03-28T17:10:52.000Z
test/test_cartesian.py
hwazni/discopy
812a4c77de4c766591bad74306720b518cdc54fc
[ "BSD-3-Clause" ]
46
2020-04-08T23:33:31.000Z
2022-03-18T21:58:35.000Z
from pytest import raises from discopy.cartesian import * def test_Box_repr(): f = Box('f', 1, 2, lambda x: (x, x)) assert "Box('f', 1, 2" in repr(f) def test_Function_str(): f = Function(2, 1, lambda x, y: x + y) assert 'Function(dom=2, cod=1,' in str(f) def test_Function_call(): f = Swap(2, 1) values = (2, 3) with raises(TypeError) as err: f(*values) assert str(err.value) == messages.expected_input_length(f, values) def test_Function_then(): f, g = Function(2, 1, lambda x, y: x + y), Function(1, 1, lambda x: x + 1) assert Function.id(2).then(*(f, g))(20, 21) == 42 def test_Function_then_err(): f = Function(2, 1, lambda x, y: x + y) g = (lambda x: x, ) with raises(TypeError) as err: f >> g assert str(err.value) == messages.type_err(Function, g) g = Function.id(2) with raises(AxiomError) as err: f >> g assert str(err.value) == messages.does_not_compose(f, g) def test_Function_tensor(): assert Function.id(3)(1, 2, 3)\ == Function.id(0).tensor(*(3 * [Function.id(1)]))(1, 2, 3) def test_Function_tensor_err(): f = Function(2, 1, lambda x, y: x + y) g = (lambda x: x, ) with raises(TypeError) as err: f @ g assert str(err.value) == messages.type_err(Function, g)
25.803922
78
0.595745
0
0
0
0
0
0
0
0
42
0.031915
d9f04eac1f39d4c14950ae0caf3dff21f18defd4
84,990
py
Python
source/browseMode.py
neal-hub/nvda-test
4c3a67b2eafa9721c5de3f671d10e60ab2d43865
[ "bzip2-1.0.6" ]
1
2022-02-20T23:10:39.000Z
2022-02-20T23:10:39.000Z
source/browseMode.py
neal-hub/nvda-test
4c3a67b2eafa9721c5de3f671d10e60ab2d43865
[ "bzip2-1.0.6" ]
null
null
null
source/browseMode.py
neal-hub/nvda-test
4c3a67b2eafa9721c5de3f671d10e60ab2d43865
[ "bzip2-1.0.6" ]
null
null
null
# A part of NonVisual Desktop Access (NVDA) # Copyright (C) 2007-2021 NV Access Limited, Babbage B.V., James Teh, Leonard de Ruijter, # Thomas Stivers, Accessolutions, Julien Cochuyt # This file is covered by the GNU General Public License. # See the file COPYING for more details. from typing import Any, Callable, Union import os import itertools import collections import winsound import time import weakref import wx import core from logHandler import log import documentBase import review import scriptHandler import eventHandler import nvwave import queueHandler import gui import ui import cursorManager from scriptHandler import script, isScriptWaiting, willSayAllResume import aria import controlTypes from controlTypes import OutputReason import config import textInfos import braille import vision import speech from speech import sayAll import treeInterceptorHandler import inputCore import api import gui.guiHelper from gui.dpiScalingHelper import DpiScalingHelperMixinWithoutInit from NVDAObjects import NVDAObject import gui.contextHelp from abc import ABCMeta, abstractmethod import globalVars from typing import Optional def reportPassThrough(treeInterceptor,onlyIfChanged=True): """Reports the pass through mode if it has changed. @param treeInterceptor: The current Browse Mode treeInterceptor. @type treeInterceptor: L{BrowseModeTreeInterceptor} @param onlyIfChanged: if true reporting will not happen if the last reportPassThrough reported the same thing. @type onlyIfChanged: bool """ if not onlyIfChanged or treeInterceptor.passThrough != reportPassThrough.last: if config.conf["virtualBuffers"]["passThroughAudioIndication"]: sound = "focusMode.wav" if treeInterceptor.passThrough else "browseMode.wav" nvwave.playWaveFile(os.path.join(globalVars.appDir, "waves", sound)) else: if treeInterceptor.passThrough: # Translators: The mode to interact with controls in documents ui.message(_("Focus mode")) else: # Translators: The mode that presents text in a flat representation # that can be navigated with the cursor keys like in a text document ui.message(_("Browse mode")) reportPassThrough.last = treeInterceptor.passThrough reportPassThrough.last = False def mergeQuickNavItemIterators(iterators,direction="next"): """ Merges multiple iterators that emit L{QuickNavItem} objects, yielding them from first to last. They are sorted using min or max (__lt__ should be implemented on the L{QuickNavItem} objects). @param iters: the iterators you want to merge. @type iters: sequence of iterators that emit L{QuicknavItem} objects. @param direction: the direction these iterators are searching (e.g. next, previous) @type direction: string """ finder=min if direction=="next" else max curValues=[] # Populate a list with all iterators and their corisponding first value for it in iterators: try: val=next(it) except StopIteration: continue curValues.append((it,val)) # Until all iterators have been used up, # Find the first (minimum or maximum) of all the values, # emit that, and update the list with the next available value for the iterator whose value was emitted. while len(curValues)>0: first=finder(curValues,key=lambda x: x[1]) curValues.remove(first) it,val=first yield val try: newVal=next(it) except StopIteration: continue curValues.append((it,newVal)) class QuickNavItem(object, metaclass=ABCMeta): """ Emitted by L{BrowseModeTreeInterceptor._iterNodesByType}, this represents one of many positions in a browse mode document, based on the type of item being searched for (e.g. link, heading, table etc).""" itemType=None #: The type of items searched for (e.g. link, heading, table etc) label=None #: The label that should represent this item in the Elements list. isAfterSelection=False #: Is this item positioned after the caret in the document? Used by the elements list to place its own selection. def __init__(self,itemType,document): """ @param itemType: the type that was searched for (e.g. link, heading, table etc) @type itemType: string @param document: the browse mode document this item is a part of. @type document: L{BrowseModeTreeInterceptor} """ self.itemType=itemType self.document=document @abstractmethod def isChild(self,parent): """ Is this item a child of the given parent? This is used when representing items in a hierarchical tree structure, such as the Elements List. @param parent: the item of whom this item may be a child of. @type parent: L{QuickNavItem} @return: True if this item is a child, false otherwise. @rtype: bool """ raise NotImplementedError @abstractmethod def report(self,readUnit=None): """ Reports the contents of this item. @param readUnit: the optional unit (e.g. line, paragraph) that should be used to announce the item position when moved to. If not given, then the full sise of the item is used. @type readUnit: a L{textInfos}.UNIT_* constant. """ raise NotImplementedError @abstractmethod def moveTo(self): """ Moves the browse mode caret or focus to this item. """ raise NotImplementedError def activate(self): """ Activates this item's position. E.g. follows a link, presses a button etc. """ raise NotImplementedError def rename(self,newName): """ Renames this item with the new name. """ raise NotImplementedError @property def isRenameAllowed(self): return False class TextInfoQuickNavItem(QuickNavItem): """ Represents a quick nav item in a browse mode document who's positions are represented by a L{textInfos.TextInfo}. """ def __init__(self,itemType,document,textInfo): """ See L{QuickNavItem.__init__} for itemType and document argument definitions. @param textInfo: the textInfo position this item represents. @type textInfo: L{textInfos.TextInfo} """ self.textInfo=textInfo super(TextInfoQuickNavItem,self).__init__(itemType,document) def __lt__(self,other): return self.textInfo.compareEndPoints(other.textInfo,"startToStart")<0 @property def obj(self): return self.textInfo.basePosition if isinstance(self.textInfo.basePosition,NVDAObject) else None @property def label(self): return self.textInfo.text.strip() def isChild(self,parent): if parent.textInfo.isOverlapping(self.textInfo): return True return False def report(self,readUnit=None): info=self.textInfo # If we are dealing with a form field, ensure we don't read the whole content if it's an editable text. if self.itemType == "formField": if self.obj.role == controlTypes.Role.EDITABLETEXT: readUnit = textInfos.UNIT_LINE if readUnit: fieldInfo = info.copy() info.collapse() info.move(readUnit, 1, endPoint="end") if info.compareEndPoints(fieldInfo, "endToEnd") > 0: # We've expanded past the end of the field, so limit to the end of the field. info.setEndPoint(fieldInfo, "endToEnd") speech.speakTextInfo(info, reason=OutputReason.QUICKNAV) def activate(self): self.textInfo.obj._activatePosition(info=self.textInfo) def moveTo(self): if self.document.passThrough and getattr(self, "obj", False): if controlTypes.State.FOCUSABLE in self.obj.states: self.obj.setFocus() return self.document.passThrough = False reportPassThrough(self.document) info = self.textInfo.copy() info.collapse() self.document._set_selection(info, reason=OutputReason.QUICKNAV) @property def isAfterSelection(self): caret=self.document.makeTextInfo(textInfos.POSITION_CARET) return self.textInfo.compareEndPoints(caret, "startToStart") > 0 def _getLabelForProperties(self, labelPropertyGetter: Callable[[str], Optional[Any]]): """ Fetches required properties for this L{TextInfoQuickNavItem} and constructs a label to be shown in an elements list. This can be used by subclasses to implement the L{label} property. @Param labelPropertyGetter: A callable taking 1 argument, specifying the property to fetch. For example, if L{itemType} is landmark, the callable must return the landmark type when "landmark" is passed as the property argument. Alternative property names might be name or value. The callable must return None if the property doesn't exist. An expected callable might be get method on a L{Dict}, or "lambda property: getattr(self.obj, property, None)" for an L{NVDAObject}. """ content = self.textInfo.text.strip() if self.itemType == "heading": # Output: displayed text of the heading. return content labelParts = None name = labelPropertyGetter("name") if self.itemType == "landmark": landmark = aria.landmarkRoles.get(labelPropertyGetter("landmark")) # Example output: main menu; navigation labelParts = (name, landmark) else: role: Union[controlTypes.Role, int] = labelPropertyGetter("role") role = controlTypes.Role(role) roleText = role.displayString # Translators: Reported label in the elements list for an element which which has no name and value unlabeled = _("Unlabeled") realStates = labelPropertyGetter("states") labeledStates = " ".join(controlTypes.processAndLabelStates(role, realStates, OutputReason.FOCUS)) if self.itemType == "formField": if role in ( controlTypes.Role.BUTTON, controlTypes.Role.DROPDOWNBUTTON, controlTypes.Role.TOGGLEBUTTON, controlTypes.Role.SPLITBUTTON, controlTypes.Role.MENUBUTTON, controlTypes.Role.DROPDOWNBUTTONGRID, controlTypes.Role.TREEVIEWBUTTON ): # Example output: Mute; toggle button; pressed labelParts = (content or name or unlabeled, roleText, labeledStates) else: # Example output: Find a repository...; edit; has auto complete; NVDA labelParts = (name or unlabeled, roleText, labeledStates, content) elif self.itemType in ("link", "button"): # Example output: You have unread notifications; visited labelParts = (content or name or unlabeled, labeledStates) if labelParts: label = "; ".join(lp for lp in labelParts if lp) else: label = content return label class BrowseModeTreeInterceptor(treeInterceptorHandler.TreeInterceptor): scriptCategory = inputCore.SCRCAT_BROWSEMODE _disableAutoPassThrough = False APPLICATION_ROLES = (controlTypes.Role.APPLICATION, controlTypes.Role.DIALOG) def _get_currentNVDAObject(self): raise NotImplementedError def _get_currentFocusableNVDAObject(self): return self.makeTextInfo(textInfos.POSITION_CARET).focusableNVDAObjectAtStart def event_treeInterceptor_gainFocus(self): """Triggered when this browse mode interceptor gains focus. This event is only fired upon entering this treeInterceptor when it was not the current treeInterceptor before. This is different to L{event_gainFocus}, which is fired when an object inside this treeInterceptor gains focus, even if that object is in the same treeInterceptor. """ reportPassThrough(self) ALWAYS_SWITCH_TO_PASS_THROUGH_ROLES = frozenset({ controlTypes.Role.COMBOBOX, controlTypes.Role.EDITABLETEXT, controlTypes.Role.LIST, controlTypes.Role.LISTITEM, controlTypes.Role.SLIDER, controlTypes.Role.TABCONTROL, controlTypes.Role.MENUBAR, controlTypes.Role.POPUPMENU, controlTypes.Role.TREEVIEW, controlTypes.Role.TREEVIEWITEM, controlTypes.Role.SPINBUTTON, controlTypes.Role.TABLEROW, controlTypes.Role.TABLECELL, controlTypes.Role.TABLEROWHEADER, controlTypes.Role.TABLECOLUMNHEADER, }) SWITCH_TO_PASS_THROUGH_ON_FOCUS_ROLES = frozenset({ controlTypes.Role.LISTITEM, controlTypes.Role.RADIOBUTTON, controlTypes.Role.TAB, controlTypes.Role.MENUITEM, controlTypes.Role.RADIOMENUITEM, controlTypes.Role.CHECKMENUITEM, }) IGNORE_DISABLE_PASS_THROUGH_WHEN_FOCUSED_ROLES = frozenset({ controlTypes.Role.MENUITEM, controlTypes.Role.RADIOMENUITEM, controlTypes.Role.CHECKMENUITEM, controlTypes.Role.TABLECELL, }) def shouldPassThrough(self, obj, reason: Optional[OutputReason] = None): """Determine whether pass through mode should be enabled (focus mode) or disabled (browse mode) for a given object. @param obj: The object in question. @type obj: L{NVDAObjects.NVDAObject} @param reason: The reason for this query; one of the output reasons, or C{None} for manual pass through mode activation by the user. @return: C{True} if pass through mode (focus mode) should be enabled, C{False} if it should be disabled (browse mode). """ if reason and ( self.disableAutoPassThrough or (reason == OutputReason.FOCUS and not config.conf["virtualBuffers"]["autoPassThroughOnFocusChange"]) or (reason == OutputReason.CARET and not config.conf["virtualBuffers"]["autoPassThroughOnCaretMove"]) ): # This check relates to auto pass through and auto pass through is disabled, so don't change the pass through state. return self.passThrough if reason == OutputReason.QUICKNAV: return False states = obj.states role = obj.role if controlTypes.State.EDITABLE in states and controlTypes.State.UNAVAILABLE not in states: return True # Menus sometimes get focus due to menuStart events even though they don't report as focused/focusable. if not obj.isFocusable and controlTypes.State.FOCUSED not in states and role != controlTypes.Role.POPUPMENU: return False # many controls that are read-only should not switch to passThrough. # However, there are exceptions. if controlTypes.State.READONLY in states: # #13221: For Slack message lists, and the MS Edge downloads window, switch to passthrough # even though the list item and list are read-only, but focusable. if ( role == controlTypes.Role.LISTITEM and controlTypes.State.FOCUSED in states and obj.parent.role == controlTypes.Role.LIST and controlTypes.State.FOCUSABLE in obj.parent.states ): return True # Certain controls such as combo boxes and readonly edits are read-only but still interactive. # #5118: read-only ARIA grids should also be allowed (focusable table cells, rows and headers). if role not in ( controlTypes.Role.EDITABLETEXT, controlTypes.Role.COMBOBOX, controlTypes.Role.TABLEROW, controlTypes.Role.TABLECELL, controlTypes.Role.TABLEROWHEADER, controlTypes.Role.TABLECOLUMNHEADER ): return False # Any roles or states for which we always switch to passThrough if role in self.ALWAYS_SWITCH_TO_PASS_THROUGH_ROLES or controlTypes.State.EDITABLE in states: return True # focus is moving to this control. Perhaps after pressing tab or clicking a button that brings up a menu (via javascript) if reason == OutputReason.FOCUS: if role in self.SWITCH_TO_PASS_THROUGH_ON_FOCUS_ROLES: return True # If this is a focus change, pass through should be enabled for certain ancestor containers. # this is done last for performance considerations. Walking up the through the parents could be costly while obj and obj != self.rootNVDAObject: if obj.role == controlTypes.Role.TOOLBAR: return True obj = obj.parent return False def _get_shouldTrapNonCommandGestures(self): return config.conf['virtualBuffers']['trapNonCommandGestures'] def script_trapNonCommandGesture(self,gesture): winsound.PlaySound("default",1) singleLetterNavEnabled=True #: Whether single letter navigation scripts should be active (true) or if these letters should fall to the application. def getAlternativeScript(self,gesture,script): if self.passThrough or not gesture.isCharacter: return script if not self.singleLetterNavEnabled: return None if not script and self.shouldTrapNonCommandGestures: script=self.script_trapNonCommandGesture return script def script_toggleSingleLetterNav(self,gesture): if self.singleLetterNavEnabled: self.singleLetterNavEnabled=False # Translators: Reported when single letter navigation in browse mode is turned off. ui.message(_("Single letter navigation off")) else: self.singleLetterNavEnabled=True # Translators: Reported when single letter navigation in browse mode is turned on. ui.message(_("Single letter navigation on")) # Translators: the description for the toggleSingleLetterNavigation command in browse mode. script_toggleSingleLetterNav.__doc__=_("Toggles single letter navigation on and off. When on, single letter keys in browse mode jump to various kinds of elements on the page. When off, these keys are passed to the application") def _get_ElementsListDialog(self): return ElementsListDialog def _iterNodesByType(self,itemType,direction="next",pos=None): """ Yields L{QuickNavItem} objects representing the ordered positions in this document according to the type being searched for (e.g. link, heading, table etc). @param itemType: the type being searched for (e.g. link, heading, table etc) @type itemType: string @param direction: the direction in which to search (next, previous, up) @type direction: string @param pos: the position in the document from where to start the search. @type pos: Usually an L{textInfos.TextInfo} @raise NotImplementedError: This type is not supported by this BrowseMode implementation """ raise NotImplementedError def _iterNotLinkBlock(self, direction="next", pos=None): raise NotImplementedError def _quickNavScript(self,gesture, itemType, direction, errorMessage, readUnit): if itemType=="notLinkBlock": iterFactory=self._iterNotLinkBlock else: iterFactory=lambda direction,info: self._iterNodesByType(itemType,direction,info) info=self.selection try: item = next(iterFactory(direction, info)) except NotImplementedError: # Translators: a message when a particular quick nav command is not supported in the current document. ui.message(_("Not supported in this document")) return except StopIteration: ui.message(errorMessage) return # #8831: Report before moving because moving might change the focus, which # might mutate the document, potentially invalidating info if it is # offset-based. if not gesture or not willSayAllResume(gesture): item.report(readUnit=readUnit) item.moveTo() @classmethod def addQuickNav( cls, itemType: str, key: Optional[str], nextDoc: str, nextError: str, prevDoc: str, prevError: str, readUnit: Optional[str] = None ): """Adds a script for the given quick nav item. @param itemType: The type of item, I.E. "heading" "Link" ... @param key: The quick navigation key to bind to the script. Shift is automatically added for the previous item gesture. E.G. h for heading. If C{None} is provided, the script is unbound by default. @param nextDoc: The command description to bind to the script that yields the next quick nav item. @param nextError: The error message if there are no more quick nav items of type itemType in this direction. @param prevDoc: The command description to bind to the script that yields the previous quick nav item. @param prevError: The error message if there are no more quick nav items of type itemType in this direction. @param readUnit: The unit (one of the textInfos.UNIT_* constants) to announce when moving to this type of item. For example, only the line is read when moving to tables to avoid reading a potentially massive table. If None, the entire item will be announced. """ scriptSuffix = itemType[0].upper() + itemType[1:] scriptName = "next%s" % scriptSuffix funcName = "script_%s" % scriptName script = lambda self,gesture: self._quickNavScript(gesture, itemType, "next", nextError, readUnit) script.__doc__ = nextDoc script.__name__ = funcName script.resumeSayAllMode = sayAll.CURSOR.CARET setattr(cls, funcName, script) if key is not None: cls.__gestures["kb:%s" % key] = scriptName scriptName = "previous%s" % scriptSuffix funcName = "script_%s" % scriptName script = lambda self,gesture: self._quickNavScript(gesture, itemType, "previous", prevError, readUnit) script.__doc__ = prevDoc script.__name__ = funcName script.resumeSayAllMode = sayAll.CURSOR.CARET setattr(cls, funcName, script) if key is not None: cls.__gestures["kb:shift+%s" % key] = scriptName def script_elementsList(self, gesture): # We need this to be a modal dialog, but it mustn't block this script. def run(): gui.mainFrame.prePopup() d = self.ElementsListDialog(self) d.ShowModal() d.Destroy() gui.mainFrame.postPopup() wx.CallAfter(run) # Translators: the description for the Elements List command in browse mode. script_elementsList.__doc__ = _("Lists various types of elements in this document") script_elementsList.ignoreTreeInterceptorPassThrough = True def _activateNVDAObject(self, obj): """Activate an object in response to a user request. This should generally perform the default action or click on the object. @param obj: The object to activate. @type obj: L{NVDAObjects.NVDAObject} """ try: obj.doAction() except NotImplementedError: log.debugWarning("doAction not implemented") def _activatePosition(self, obj=None): if not obj: obj=self.currentNVDAObject if not obj: return if obj.role == controlTypes.Role.MATH: import mathPres try: return mathPres.interactWithMathMl(obj.mathMl) except (NotImplementedError, LookupError): pass return if self.shouldPassThrough(obj): obj.setFocus() self.passThrough = True reportPassThrough(self) elif obj.role == controlTypes.Role.EMBEDDEDOBJECT or obj.role in self.APPLICATION_ROLES: obj.setFocus() speech.speakObject(obj, reason=OutputReason.FOCUS) else: self._activateNVDAObject(obj) def script_activatePosition(self,gesture): if config.conf["virtualBuffers"]["autoFocusFocusableElements"]: self._activatePosition() else: self._focusLastFocusableObject(activatePosition=True) # Translators: the description for the activatePosition script on browseMode documents. script_activatePosition.__doc__ = _("Activates the current object in the document") def _focusLastFocusableObject(self, activatePosition=False): """Used when auto focus focusable elements is disabled to sync the focus to the browse mode cursor. When auto focus focusable elements is disabled, NVDA doesn't focus elements as the user moves the browse mode cursor. However, there are some cases where the user always wants to interact with the focus; e.g. if they press the applications key to open the context menu. In these cases, this method is called first to sync the focus to the browse mode cursor. """ obj = self.currentFocusableNVDAObject if obj!=self.rootNVDAObject and self._shouldSetFocusToObj(obj) and obj!= api.getFocusObject(): obj.setFocus() # We might be about to activate or pass through a key which will cause # this object to change (e.g. checking a check box). However, we won't # actually get the focus event until after the change has occurred. # Therefore, we must cache properties for speech before the change occurs. speech.speakObject(obj, OutputReason.ONLYCACHE) self._objPendingFocusBeforeActivate = obj if activatePosition: # Make sure we activate the object at the caret, which is not necessarily focusable. self._activatePosition() def script_passThrough(self,gesture): if not config.conf["virtualBuffers"]["autoFocusFocusableElements"]: self._focusLastFocusableObject() gesture.send() # Translators: the description for the passThrough script on browseMode documents. script_passThrough.__doc__ = _("Passes gesture through to the application") def script_disablePassThrough(self, gesture): if not self.passThrough or self.disableAutoPassThrough: return gesture.send() # #3215 ARIA menus should get the Escape key unconditionally so they can handle it without invoking browse mode first obj = api.getFocusObject() if obj and obj.role in self.IGNORE_DISABLE_PASS_THROUGH_WHEN_FOCUSED_ROLES: return gesture.send() self.passThrough = False self.disableAutoPassThrough = False reportPassThrough(self) script_disablePassThrough.ignoreTreeInterceptorPassThrough = True def _set_disableAutoPassThrough(self, state): # If the user manually switches to focus mode with NVDA+space, that enables # pass-through and disables auto pass-through. If auto focusing of focusable # elements is disabled, NVDA won't have synced the focus to the browse mode # cursor. However, since the user is switching to focus mode, they probably # want to interact with the focus, so sync the focus here. if ( state and not config.conf["virtualBuffers"]["autoFocusFocusableElements"] and self.passThrough ): self._focusLastFocusableObject() self._disableAutoPassThrough = state def _get_disableAutoPassThrough(self): return self._disableAutoPassThrough __gestures={ "kb:NVDA+f7": "elementsList", "kb:enter": "activatePosition", "kb:numpadEnter": "activatePosition", "kb:space": "activatePosition", "kb:NVDA+shift+space":"toggleSingleLetterNav", "kb:escape": "disablePassThrough", "kb:control+enter": "passThrough", "kb:control+numpadEnter": "passThrough", "kb:shift+enter": "passThrough", "kb:shift+numpadEnter": "passThrough", "kb:control+shift+enter": "passThrough", "kb:control+shift+numpadEnter": "passThrough", "kb:alt+enter": "passThrough", "kb:alt+numpadEnter": "passThrough", "kb:applications": "passThrough", "kb:shift+applications": "passThrough", "kb:shift+f10": "passThrough", } # Add quick navigation scripts. qn = BrowseModeTreeInterceptor.addQuickNav qn("heading", key="h", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next heading"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next heading"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous heading"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous heading")) qn("heading1", key="1", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next heading at level 1"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next heading at level 1"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous heading at level 1"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous heading at level 1")) qn("heading2", key="2", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next heading at level 2"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next heading at level 2"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous heading at level 2"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous heading at level 2")) qn("heading3", key="3", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next heading at level 3"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next heading at level 3"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous heading at level 3"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous heading at level 3")) qn("heading4", key="4", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next heading at level 4"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next heading at level 4"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous heading at level 4"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous heading at level 4")) qn("heading5", key="5", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next heading at level 5"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next heading at level 5"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous heading at level 5"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous heading at level 5")) qn("heading6", key="6", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next heading at level 6"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next heading at level 6"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous heading at level 6"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous heading at level 6")) qn("table", key="t", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next table"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next table"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous table"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous table"), readUnit=textInfos.UNIT_LINE) qn("link", key="k", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next link"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next link"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous link"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous link")) qn("visitedLink", key="v", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next visited link"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next visited link"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous visited link"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous visited link")) qn("unvisitedLink", key="u", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next unvisited link"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next unvisited link"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous unvisited link"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous unvisited link")) qn("formField", key="f", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next form field"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next form field"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous form field"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous form field")) qn("list", key="l", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next list"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next list"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous list"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous list"), readUnit=textInfos.UNIT_LINE) qn("listItem", key="i", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next list item"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next list item"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous list item"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous list item")) qn("button", key="b", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next button"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next button"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous button"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous button")) qn("edit", key="e", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next edit field"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next edit field"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous edit field"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous edit field"), readUnit=textInfos.UNIT_LINE) qn("frame", key="m", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next frame"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next frame"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous frame"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous frame"), readUnit=textInfos.UNIT_LINE) qn("separator", key="s", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next separator"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next separator"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous separator"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous separator")) qn("radioButton", key="r", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next radio button"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next radio button"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous radio button"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous radio button")) qn("comboBox", key="c", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next combo box"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next combo box"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous combo box"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous combo box")) qn("checkBox", key="x", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next check box"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next check box"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous check box"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous check box")) qn("graphic", key="g", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next graphic"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next graphic"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous graphic"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous graphic")) qn("blockQuote", key="q", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next block quote"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next block quote"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous block quote"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous block quote")) qn("notLinkBlock", key="n", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("skips forward past a block of links"), # Translators: Message presented when the browse mode element is not found. nextError=_("no more text after a block of links"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("skips backward past a block of links"), # Translators: Message presented when the browse mode element is not found. prevError=_("no more text before a block of links"), readUnit=textInfos.UNIT_LINE) qn("landmark", key="d", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next landmark"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next landmark"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous landmark"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous landmark"), readUnit=textInfos.UNIT_LINE) qn("embeddedObject", key="o", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next embedded object"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next embedded object"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous embedded object"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous embedded object")) qn("annotation", key="a", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next annotation"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next annotation"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous annotation"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous annotation")) qn("error", key="w", # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next error"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next error"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous error"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous error")) qn( "article", key=None, # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next article"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next article"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous article"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous article") ) qn( "grouping", key=None, # Translators: Input help message for a quick navigation command in browse mode. nextDoc=_("moves to the next grouping"), # Translators: Message presented when the browse mode element is not found. nextError=_("no next grouping"), # Translators: Input help message for a quick navigation command in browse mode. prevDoc=_("moves to the previous grouping"), # Translators: Message presented when the browse mode element is not found. prevError=_("no previous grouping") ) del qn class ElementsListDialog( DpiScalingHelperMixinWithoutInit, gui.contextHelp.ContextHelpMixin, wx.Dialog # wxPython does not seem to call base class initializer, put last in MRO ): helpId = "ElementsList" ELEMENT_TYPES = ( # Translators: The label of a radio button to select the type of element # in the browse mode Elements List dialog. ("link", _("Lin&ks")), # Translators: The label of a radio button to select the type of element # in the browse mode Elements List dialog. ("heading", _("&Headings")), # Translators: The label of a radio button to select the type of element # in the browse mode Elements List dialog. ("formField", _("&Form fields")), # Translators: The label of a radio button to select the type of element # in the browse mode Elements List dialog. ("button", _("&Buttons")), # Translators: The label of a radio button to select the type of element # in the browse mode Elements List dialog. ("landmark", _("Lan&dmarks")), ) Element = collections.namedtuple("Element", ("item", "parent")) lastSelectedElementType=0 def __init__(self, document): super().__init__( parent=gui.mainFrame, # Translators: The title of the browse mode Elements List dialog. title=_("Elements List") ) self.document = document mainSizer = wx.BoxSizer(wx.VERTICAL) contentsSizer = wx.BoxSizer(wx.VERTICAL) # Translators: The label of a group of radio buttons to select the type of element # in the browse mode Elements List dialog. child = wx.RadioBox(self, wx.ID_ANY, label=_("Type:"), choices=tuple(et[1] for et in self.ELEMENT_TYPES)) child.SetSelection(self.lastSelectedElementType) child.Bind(wx.EVT_RADIOBOX, self.onElementTypeChange) contentsSizer.Add(child, flag=wx.EXPAND) contentsSizer.AddSpacer(gui.guiHelper.SPACE_BETWEEN_VERTICAL_DIALOG_ITEMS) self.tree = wx.TreeCtrl( self, size=self.scaleSize((500, 300)), # height is chosen to ensure the dialog will fit on an 800x600 screen style=wx.TR_HAS_BUTTONS | wx.TR_HIDE_ROOT | wx.TR_LINES_AT_ROOT | wx.TR_SINGLE | wx.TR_EDIT_LABELS ) self.tree.Bind(wx.EVT_SET_FOCUS, self.onTreeSetFocus) self.tree.Bind(wx.EVT_CHAR, self.onTreeChar) self.tree.Bind(wx.EVT_TREE_BEGIN_LABEL_EDIT, self.onTreeLabelEditBegin) self.tree.Bind(wx.EVT_TREE_END_LABEL_EDIT, self.onTreeLabelEditEnd) self.treeRoot = self.tree.AddRoot("root") contentsSizer.Add(self.tree,flag=wx.EXPAND) contentsSizer.AddSpacer(gui.guiHelper.SPACE_BETWEEN_VERTICAL_DIALOG_ITEMS) # Translators: The label of an editable text field to filter the elements # in the browse mode Elements List dialog. filterText = _("Filter b&y:") labeledCtrl = gui.guiHelper.LabeledControlHelper(self, filterText, wx.TextCtrl) self.filterEdit = labeledCtrl.control self.filterEdit.Bind(wx.EVT_TEXT, self.onFilterEditTextChange) contentsSizer.Add(labeledCtrl.sizer) contentsSizer.AddSpacer(gui.guiHelper.SPACE_BETWEEN_VERTICAL_DIALOG_ITEMS) bHelper = gui.guiHelper.ButtonHelper(wx.HORIZONTAL) # Translators: The label of a button to activate an element in the browse mode Elements List dialog. # Beware not to set an accelerator that would collide with other controls in this dialog, such as an # element type radio label. self.activateButton = bHelper.addButton(self, label=_("Activate")) self.activateButton.Bind(wx.EVT_BUTTON, lambda evt: self.onAction(True)) # Translators: The label of a button to move to an element # in the browse mode Elements List dialog. self.moveButton = bHelper.addButton(self, label=_("&Move to")) self.moveButton.Bind(wx.EVT_BUTTON, lambda evt: self.onAction(False)) bHelper.addButton(self, id=wx.ID_CANCEL) contentsSizer.Add(bHelper.sizer, flag=wx.ALIGN_RIGHT) mainSizer.Add(contentsSizer, border=gui.guiHelper.BORDER_FOR_DIALOGS, flag=wx.ALL) mainSizer.Fit(self) self.SetSizer(mainSizer) self.tree.SetFocus() self.initElementType(self.ELEMENT_TYPES[self.lastSelectedElementType][0]) self.CentreOnScreen() def onElementTypeChange(self, evt): elementType=evt.GetInt() # We need to make sure this gets executed after the focus event. # Otherwise, NVDA doesn't seem to get the event. queueHandler.queueFunction(queueHandler.eventQueue, self.initElementType, self.ELEMENT_TYPES[elementType][0]) self.lastSelectedElementType=elementType def initElementType(self, elType): if elType in ("link","button"): # Links and buttons can be activated. self.activateButton.Enable() self.SetAffirmativeId(self.activateButton.GetId()) else: # No other element type can be activated. self.activateButton.Disable() self.SetAffirmativeId(self.moveButton.GetId()) # Gather the elements of this type. self._elements = [] self._initialElement = None parentElements = [] isAfterSelection=False for item in self.document._iterNodesByType(elType): # Find the parent element, if any. for parent in reversed(parentElements): if item.isChild(parent.item): break else: # We're not a child of this parent, so this parent has no more children and can be removed from the stack. parentElements.pop() else: # No parent found, so we're at the root. # Note that parentElements will be empty at this point, as all parents are no longer relevant and have thus been removed from the stack. parent = None element=self.Element(item,parent) self._elements.append(element) if not isAfterSelection: isAfterSelection=item.isAfterSelection if not isAfterSelection: # The element immediately preceding or overlapping the caret should be the initially selected element. # Since we have not yet passed the selection, use this as the initial element. try: self._initialElement = self._elements[-1] except IndexError: # No previous element. pass # This could be the parent of a subsequent element, so add it to the parents stack. parentElements.append(element) # Start with no filtering. self.filterEdit.ChangeValue("") self.filter("", newElementType=True) def filter(self, filterText, newElementType=False): # If this is a new element type, use the element nearest the cursor. # Otherwise, use the currently selected element. # #8753: wxPython 4 returns "invalid tree item" when the tree view is empty, so use initial element if appropriate. try: defaultElement = self._initialElement if newElementType else self.tree.GetItemData(self.tree.GetSelection()) except: defaultElement = self._initialElement # Clear the tree. self.tree.DeleteChildren(self.treeRoot) # Populate the tree with elements matching the filter text. elementsToTreeItems = {} defaultItem = None matched = False #Do case-insensitive matching by lowering both filterText and each element's text. filterText=filterText.lower() for element in self._elements: label=element.item.label if filterText and filterText not in label.lower(): continue matched = True parent = element.parent if parent: parent = elementsToTreeItems.get(parent) item = self.tree.AppendItem(parent or self.treeRoot, label) self.tree.SetItemData(item, element) elementsToTreeItems[element] = item if element == defaultElement: defaultItem = item self.tree.ExpandAll() if not matched: # No items, so disable the buttons. self.activateButton.Disable() self.moveButton.Disable() return # If there's no default item, use the first item in the tree. self.tree.SelectItem(defaultItem or self.tree.GetFirstChild(self.treeRoot)[0]) # Enable the button(s). # If the activate button isn't the default button, it is disabled for this element type and shouldn't be enabled here. if self.AffirmativeId == self.activateButton.Id: self.activateButton.Enable() self.moveButton.Enable() def onTreeSetFocus(self, evt): # Start with no search. self._searchText = "" self._searchCallLater = None evt.Skip() def onTreeChar(self, evt): key = evt.KeyCode if key == wx.WXK_RETURN: # The enter key should be propagated to the dialog and thus activate the default button, # but this is broken (wx ticket #3725). # Therefore, we must catch the enter key here. # Activate the current default button. evt = wx.CommandEvent(wx.wxEVT_COMMAND_BUTTON_CLICKED, wx.ID_ANY) button = self.FindWindowById(self.AffirmativeId) if button.Enabled: button.ProcessEvent(evt) else: wx.Bell() elif key == wx.WXK_F2: item=self.tree.GetSelection() if item: selectedItemType=self.tree.GetItemData(item).item self.tree.EditLabel(item) evt.Skip() elif key >= wx.WXK_START or key == wx.WXK_BACK: # Non-printable character. self._searchText = "" evt.Skip() else: # Search the list. # We have to implement this ourselves, as tree views don't accept space as a search character. char = chr(evt.UnicodeKey).lower() # IF the same character is typed twice, do the same search. if self._searchText != char: self._searchText += char if self._searchCallLater: self._searchCallLater.Restart() else: self._searchCallLater = wx.CallLater(1000, self._clearSearchText) self.search(self._searchText) def onTreeLabelEditBegin(self,evt): item=self.tree.GetSelection() selectedItemType = self.tree.GetItemData(item).item if not selectedItemType.isRenameAllowed: evt.Veto() def onTreeLabelEditEnd(self,evt): selectedItemNewName=evt.GetLabel() item=self.tree.GetSelection() selectedItemType = self.tree.GetItemData(item).item selectedItemType.rename(selectedItemNewName) def _clearSearchText(self): self._searchText = "" def search(self, searchText): item = self.tree.GetSelection() if not item: # No items. return # First try searching from the current item. # Failing that, search from the first item. items = itertools.chain(self._iterReachableTreeItemsFromItem(item), self._iterReachableTreeItemsFromItem(self.tree.GetFirstChild(self.treeRoot)[0])) if len(searchText) == 1: # If only a single character has been entered, skip (search after) the current item. next(items) for item in items: if self.tree.GetItemText(item).lower().startswith(searchText): self.tree.SelectItem(item) return # Not found. wx.Bell() def _iterReachableTreeItemsFromItem(self, item): while item: yield item childItem = self.tree.GetFirstChild(item)[0] if childItem and self.tree.IsExpanded(item): # Has children and is reachable, so recurse. for childItem in self._iterReachableTreeItemsFromItem(childItem): yield childItem item = self.tree.GetNextSibling(item) def onFilterEditTextChange(self, evt): self.filter(self.filterEdit.GetValue()) evt.Skip() def onAction(self, activate): prevFocus = gui.mainFrame.prevFocus self.Close() # Save off the last selected element type on to the class so its used in initialization next time. self.__class__.lastSelectedElementType=self.lastSelectedElementType item = self.tree.GetSelection() item = self.tree.GetItemData(item).item if activate: item.activate() else: def move(): speech.cancelSpeech() # Avoid double announce if item.obj is about to gain focus. if not ( self.document.passThrough and getattr(item, "obj", False) and item.obj != prevFocus and controlTypes.State.FOCUSABLE in item.obj.states ): # #8831: Report before moving because moving might change the focus, which # might mutate the document, potentially invalidating info if it is # offset-based. item.report() item.moveTo() # We must use core.callLater rather than wx.CallLater to ensure that the callback runs within NVDA's core pump. # If it didn't, and it directly or indirectly called wx.Yield, it could start executing NVDA's core pump from within the yield, causing recursion. core.callLater(100, move) class BrowseModeDocumentTextInfo(textInfos.TextInfo): def _get_focusableNVDAObjectAtStart(self): try: item = next(self.obj._iterNodesByType("focusable", "up", self)) except StopIteration: return self.obj.rootNVDAObject if not item: return self.obj.rootNVDAObject return item.obj class BrowseModeDocumentTreeInterceptor(documentBase.DocumentWithTableNavigation,cursorManager.CursorManager,BrowseModeTreeInterceptor,treeInterceptorHandler.DocumentTreeInterceptor): programmaticScrollMayFireEvent = False def __init__(self,obj): super(BrowseModeDocumentTreeInterceptor,self).__init__(obj) self._lastProgrammaticScrollTime = None self.documentConstantIdentifier = self.documentConstantIdentifier self._lastFocusObj = None self._objPendingFocusBeforeActivate = None self._hadFirstGainFocus = False self._enteringFromOutside = True # We need to cache this because it will be unavailable once the document dies. if not hasattr(self.rootNVDAObject.appModule, "_browseModeRememberedCaretPositions"): self.rootNVDAObject.appModule._browseModeRememberedCaretPositions = {} self._lastCaretPosition = None #: True if the last caret move was due to a focus change. self._lastCaretMoveWasFocus = False def terminate(self): if self.shouldRememberCaretPositionAcrossLoads and self._lastCaretPosition: try: self.rootNVDAObject.appModule._browseModeRememberedCaretPositions[self.documentConstantIdentifier] = self._lastCaretPosition except AttributeError: # The app module died. pass def _get_currentNVDAObject(self): return self.makeTextInfo(textInfos.POSITION_CARET).NVDAObjectAtStart def event_treeInterceptor_gainFocus(self): doSayAll=False hadFirstGainFocus=self._hadFirstGainFocus if not hadFirstGainFocus: # This treeInterceptor is gaining focus for the first time. # Fake a focus event on the focus object, as the treeInterceptor may have missed the actual focus event. focus = api.getFocusObject() self.event_gainFocus(focus, lambda: focus.event_gainFocus()) if not self.passThrough: # We only set the caret position if in browse mode. # If in focus mode, the document must have forced the focus somewhere, # so we don't want to override it. initialPos = self._getInitialCaretPos() if initialPos: self.selection = self.makeTextInfo(initialPos) reportPassThrough(self) doSayAll=config.conf['virtualBuffers']['autoSayAllOnPageLoad'] self._hadFirstGainFocus = True if not self.passThrough: if doSayAll: speech.speakObjectProperties(self.rootNVDAObject, name=True, states=True, reason=OutputReason.FOCUS) sayAll.SayAllHandler.readText(sayAll.CURSOR.CARET) else: # Speak it like we would speak focus on any other document object. # This includes when entering the treeInterceptor for the first time: if not hadFirstGainFocus: speech.speakObject(self.rootNVDAObject, reason=OutputReason.FOCUS) else: # And when coming in from an outside object # #4069 But not when coming up from a non-rendered descendant. ancestors=api.getFocusAncestors() fdl=api.getFocusDifferenceLevel() try: tl=ancestors.index(self.rootNVDAObject) except ValueError: tl=len(ancestors) if fdl<=tl: speech.speakObject(self.rootNVDAObject, reason=OutputReason.FOCUS) info = self.selection if not info.isCollapsed: speech.speakPreselectedText(info.text) else: info.expand(textInfos.UNIT_LINE) speech.speakTextInfo(info, reason=OutputReason.CARET, unit=textInfos.UNIT_LINE) reportPassThrough(self) braille.handler.handleGainFocus(self) def event_caret(self, obj, nextHandler): if self.passThrough: nextHandler() def _activateLongDesc(self,controlField): """ Activates (presents) the long description for a particular field (usually a graphic). @param controlField: the field who's long description should be activated. This field is guaranteed to have states containing HASLONGDESC state. @type controlField: dict """ raise NotImplementedError def _activatePosition(self, obj=None, info=None): if info: obj=info.NVDAObjectAtStart if not obj: return super(BrowseModeDocumentTreeInterceptor,self)._activatePosition(obj=obj) def _set_selection(self, info, reason=OutputReason.CARET): super(BrowseModeDocumentTreeInterceptor, self)._set_selection(info) if isScriptWaiting() or not info.isCollapsed: return # Save the last caret position for use in terminate(). # This must be done here because the buffer might be cleared just before terminate() is called, # causing the last caret position to be lost. caret = info.copy() caret.collapse() self._lastCaretPosition = caret.bookmark review.handleCaretMove(caret) if reason == OutputReason.FOCUS: self._lastCaretMoveWasFocus = True focusObj = api.getFocusObject() if focusObj==self.rootNVDAObject: return else: self._lastCaretMoveWasFocus = False focusObj=info.focusableNVDAObjectAtStart obj=info.NVDAObjectAtStart if not obj: log.debugWarning("Invalid NVDAObjectAtStart") return if obj==self.rootNVDAObject: return obj.scrollIntoView() if self.programmaticScrollMayFireEvent: self._lastProgrammaticScrollTime = time.time() if focusObj: self.passThrough = self.shouldPassThrough(focusObj, reason=reason) if ( not eventHandler.isPendingEvents("gainFocus") and focusObj != self.rootNVDAObject and focusObj != api.getFocusObject() and self._shouldSetFocusToObj(focusObj) ): followBrowseModeFocus = config.conf["virtualBuffers"]["autoFocusFocusableElements"] if followBrowseModeFocus or self.passThrough: focusObj.setFocus() # Queue the reporting of pass through mode so that it will be spoken after the actual content. queueHandler.queueFunction(queueHandler.eventQueue, reportPassThrough, self) def _shouldSetFocusToObj(self, obj): """Determine whether an object should receive focus. Subclasses may extend or override this method. @param obj: The object in question. @type obj: L{NVDAObjects.NVDAObject} """ return obj.role not in self.APPLICATION_ROLES and obj.isFocusable and obj.role!=controlTypes.Role.EMBEDDEDOBJECT def script_activateLongDesc(self,gesture): info=self.makeTextInfo(textInfos.POSITION_CARET) info.expand("character") for field in reversed(info.getTextWithFields()): if isinstance(field,textInfos.FieldCommand) and field.command=="controlStart": states=field.field.get('states') if states and controlTypes.State.HASLONGDESC in states: self._activateLongDesc(field.field) break else: # Translators: the message presented when the activateLongDescription script cannot locate a long description to activate. ui.message(_("No long description")) # Translators: the description for the activateLongDescription script on browseMode documents. script_activateLongDesc.__doc__=_("Shows the long description at this position if one is found.") def event_caretMovementFailed(self, obj, nextHandler, gesture=None): if not self.passThrough or not gesture or not config.conf["virtualBuffers"]["autoPassThroughOnCaretMove"]: return nextHandler() if gesture.mainKeyName in ("home", "end"): # Home, end, control+home and control+end should not disable pass through. return nextHandler() script = self.getScript(gesture) if not script: return nextHandler() # We've hit the edge of the focused control. # Therefore, move the virtual caret to the same edge of the field. info = self.makeTextInfo(textInfos.POSITION_CARET) info.expand(textInfos.UNIT_CONTROLFIELD) if gesture.mainKeyName in ("leftArrow", "upArrow", "pageUp"): info.collapse() else: info.collapse(end=True) info.move(textInfos.UNIT_CHARACTER, -1) info.updateCaret() scriptHandler.queueScript(script, gesture) currentExpandedControl=None #: an NVDAObject representing the control that has just been expanded with the collapseOrExpandControl script. def script_collapseOrExpandControl(self, gesture): if not config.conf["virtualBuffers"]["autoFocusFocusableElements"]: self._focusLastFocusableObject() oldFocus = api.getFocusObject() oldFocusStates = oldFocus.states gesture.send() if controlTypes.State.COLLAPSED in oldFocusStates: self.passThrough = True # When a control (such as a combo box) is expanded, we expect that its descendants will be classed as being outside the browseMode document. # We save off the expanded control so that the next focus event within the browseMode document can see if it is for the control, # and if so, it disables passthrough, as the control has obviously been collapsed again. self.currentExpandedControl=oldFocus elif not self.disableAutoPassThrough: self.passThrough = False reportPassThrough(self) def _tabOverride(self, direction): """Override the tab order if the virtual caret is not within the currently focused node. This is done because many nodes are not focusable and it is thus possible for the virtual caret to be unsynchronised with the focus. In this case, we want tab/shift+tab to move to the next/previous focusable node relative to the virtual caret. If the virtual caret is within the focused node, the tab/shift+tab key should be passed through to allow normal tab order navigation. Note that this method does not pass the key through itself if it is not overridden. This should be done by the calling script if C{False} is returned. @param direction: The direction in which to move. @type direction: str @return: C{True} if the tab order was overridden, C{False} if not. @rtype: bool """ if self._lastCaretMoveWasFocus: # #5227: If the caret was last moved due to a focus change, don't override tab. # This ensures that tabbing behaves as expected after tabbing hits an iframe document. return False focus = api.getFocusObject() try: focusInfo = self.makeTextInfo(focus) except: return False # We only want to override the tab order if the caret is not within the focused node. caretInfo=self.makeTextInfo(textInfos.POSITION_CARET) #Only check that the caret is within the focus for things that ar not documents #As for documents we should always override if focus.role!=controlTypes.Role.DOCUMENT or controlTypes.State.EDITABLE in focus.states: # Expand to one character, as isOverlapping() doesn't yield the desired results with collapsed ranges. caretInfo.expand(textInfos.UNIT_CHARACTER) if focusInfo.isOverlapping(caretInfo): return False # If we reach here, we do want to override tab/shift+tab if possible. # Find the next/previous focusable node. try: item = next(self._iterNodesByType("focusable", direction, caretInfo)) except StopIteration: return False obj=item.obj newInfo=item.textInfo if obj == api.getFocusObject(): # This node is already focused, so we need to move to and speak this node here. newCaret = newInfo.copy() newCaret.collapse() self._set_selection(newCaret, reason=OutputReason.FOCUS) if self.passThrough: obj.event_gainFocus() else: speech.speakTextInfo(newInfo, reason=OutputReason.FOCUS) else: # This node doesn't have the focus, so just set focus to it. The gainFocus event will handle the rest. obj.setFocus() return True def script_tab(self, gesture): if not self._tabOverride("next"): gesture.send() def script_shiftTab(self, gesture): if not self._tabOverride("previous"): gesture.send() def event_focusEntered(self,obj,nextHandler): if obj==self.rootNVDAObject: self._enteringFromOutside = True # Even if passThrough is enabled, we still completely drop focusEntered events here. # In order to get them back when passThrough is enabled, we replay them with the _replayFocusEnteredEvents method in event_gainFocus. # The reason for this is to ensure that focusEntered events are delayed until a focus event has had a chance to disable passthrough mode. # As in this case we would not want them. def _shouldIgnoreFocus(self, obj): """Determines whether focus on a given object should be ignored. @param obj: The object in question. @type obj: L{NVDAObjects.NVDAObject} @return: C{True} if focus on L{obj} should be ignored, C{False} otherwise. @rtype: bool """ return False def _postGainFocus(self, obj): """Executed after a gainFocus within the browseMode document. This will not be executed if L{event_gainFocus} determined that it should abort and call nextHandler. @param obj: The object that gained focus. @type obj: L{NVDAObjects.NVDAObject} """ def _replayFocusEnteredEvents(self): # We blocked the focusEntered events because we were in browse mode, # but now that we've switched to focus mode, we need to fire them. for parent in api.getFocusAncestors()[api.getFocusDifferenceLevel():]: try: parent.event_focusEntered() except: log.exception("Error executing focusEntered event: %s" % parent) def event_gainFocus(self, obj, nextHandler): enteringFromOutside=self._enteringFromOutside self._enteringFromOutside=False if not self.isReady: if self.passThrough: self._replayFocusEnteredEvents() nextHandler() return # If a control has been expanded by the collapseOrExpandControl script, and this focus event is for it, # disable passThrough and report the control, as the control has obviously been collapsed again. # Note that whether or not this focus event was for that control, the last expanded control is forgotten, so that only the next focus event for the browseMode document can handle the collapsed control. lastExpandedControl=self.currentExpandedControl self.currentExpandedControl=None if self.passThrough and obj==lastExpandedControl: self.passThrough=False reportPassThrough(self) nextHandler() return if enteringFromOutside and not self.passThrough and self._lastFocusObj==obj: # We're entering the document from outside (not returning from an inside object/application; #3145) # and this was the last non-root node with focus, so ignore this focus event. # Otherwise, if the user switches away and back to this document, the cursor will jump to this node. # This is not ideal if the user was positioned over a node which cannot receive focus. return if obj==self.rootNVDAObject: if self.passThrough: self._replayFocusEnteredEvents() return nextHandler() return if not self.passThrough and self._shouldIgnoreFocus(obj): return # If the previous focus object was removed, we might hit a false positive for overlap detection. # Track the previous focus target so that we can account for this scenario. previousFocusObjIsDefunct = False if self._lastFocusObj: try: states = self._lastFocusObj.states previousFocusObjIsDefunct = controlTypes.State.DEFUNCT in states except Exception: log.debugWarning( "Error fetching states when checking for defunct object. Treating object as defunct anyway.", exc_info=True ) previousFocusObjIsDefunct = True self._lastFocusObj=obj try: focusInfo = self.makeTextInfo(obj) except: # This object is not in the treeInterceptor, even though it resides beneath the document. # Automatic pass through should be enabled in certain circumstances where this occurs. if not self.passThrough and self.shouldPassThrough(obj, reason=OutputReason.FOCUS): self.passThrough=True reportPassThrough(self) self._replayFocusEnteredEvents() return nextHandler() #We only want to update the caret and speak the field if we're not in the same one as before caretInfo=self.makeTextInfo(textInfos.POSITION_CARET) # Expand to one character, as isOverlapping() doesn't treat, for example, (4,4) and (4,5) as overlapping. caretInfo.expand(textInfos.UNIT_CHARACTER) isOverlapping = focusInfo.isOverlapping(caretInfo) if not self._hadFirstGainFocus or not isOverlapping or (isOverlapping and previousFocusObjIsDefunct): # The virtual caret is not within the focus node. oldPassThrough=self.passThrough passThrough = self.shouldPassThrough(obj, reason=OutputReason.FOCUS) if not oldPassThrough and (passThrough or sayAll.SayAllHandler.isRunning()): # If pass-through is disabled, cancel speech, as a focus change should cause page reading to stop. # This must be done before auto-pass-through occurs, as we want to stop page reading even if pass-through will be automatically enabled by this focus change. speech.cancelSpeech() self.passThrough=passThrough if not self.passThrough: # We read the info from the browseMode document instead of the control itself. speech.speakTextInfo(focusInfo, reason=OutputReason.FOCUS) # However, we still want to update the speech property cache so that property changes will be spoken properly. speech.speakObject(obj, controlTypes.OutputReason.ONLYCACHE) # As we do not call nextHandler which would trigger the vision framework to handle gain focus, # we need to call it manually here. vision.handler.handleGainFocus(obj) else: # Although we are going to speak the object rather than textInfo content, we still need to silently speak the textInfo content so that the textInfo speech cache is updated correctly. # Not doing this would cause later browseMode speaking to either not speak controlFields it had entered, or speak controlField exits after having already exited. # See #7435 for a discussion on this. speech.speakTextInfo(focusInfo, reason=OutputReason.ONLYCACHE) self._replayFocusEnteredEvents() nextHandler() focusInfo.collapse() self._set_selection(focusInfo, reason=OutputReason.FOCUS) else: # The virtual caret was already at the focused node. if not self.passThrough: # This focus change was caused by a virtual caret movement, so don't speak the focused node to avoid double speaking. # However, we still want to update the speech property cache so that property changes will be spoken properly. speech.speakObject(obj, OutputReason.ONLYCACHE) if config.conf["virtualBuffers"]["autoFocusFocusableElements"]: # As we do not call nextHandler which would trigger the vision framework to handle gain focus, # we need to call it manually here. # Note: this is usually called after the caret movement. vision.handler.handleGainFocus(obj) elif ( self._objPendingFocusBeforeActivate and obj == self._objPendingFocusBeforeActivate and obj is not self._objPendingFocusBeforeActivate ): # With auto focus focusable elements disabled, when the user activates # an element (e.g. by pressing enter) or presses a key which we pass # through (e.g. control+enter), we call _focusLastFocusableObject. # However, the activation/key press might cause a property change # before we get the focus event, so NVDA's normal reporting of # changes to the focus won't pick it up. # The speech property cache on _objPendingFocusBeforeActivate reflects # the properties before the activation/key, so use that to speak any # changes. speech.speakObject( self._objPendingFocusBeforeActivate, OutputReason.CHANGE ) self._objPendingFocusBeforeActivate = None else: self._replayFocusEnteredEvents() return nextHandler() self._postGainFocus(obj) event_gainFocus.ignoreIsReady=True def _handleScrollTo( self, obj: Union[NVDAObject, textInfos.TextInfo], ) -> bool: """Handle scrolling the browseMode document to a given object in response to an event. Subclasses should call this from an event which indicates that the document has scrolled. @postcondition: The virtual caret is moved to L{obj} and the buffer content for L{obj} is reported. @param obj: The object to which the document should scroll. @return: C{True} if the document was scrolled, C{False} if not. @note: If C{False} is returned, calling events should probably call their nextHandler. """ if self.programmaticScrollMayFireEvent and self._lastProgrammaticScrollTime and time.time() - self._lastProgrammaticScrollTime < 0.4: # This event was probably caused by this browseMode document's call to scrollIntoView(). # Therefore, ignore it. Otherwise, the cursor may bounce back to the scroll point. # However, pretend we handled it, as we don't want it to be passed on to the object either. return True if isinstance(obj, NVDAObject): try: scrollInfo = self.makeTextInfo(obj) except (NotImplementedError, RuntimeError): return False elif isinstance(obj, textInfos.TextInfo): scrollInfo = obj.copy() else: raise ValueError(f"{obj} is not a supported type") #We only want to update the caret and speak the field if we're not in the same one as before caretInfo=self.makeTextInfo(textInfos.POSITION_CARET) # Expand to one character, as isOverlapping() doesn't treat, for example, (4,4) and (4,5) as overlapping. caretInfo.expand(textInfos.UNIT_CHARACTER) if not scrollInfo.isOverlapping(caretInfo): if scrollInfo.isCollapsed: scrollInfo.expand(textInfos.UNIT_LINE) speech.speakTextInfo(scrollInfo, reason=OutputReason.CARET) scrollInfo.collapse() self.selection = scrollInfo return True return False def _isNVDAObjectInApplication_noWalk(self, obj): """Determine whether a given object is within an application without walking ancestors. The base implementation simply checks whether the object has an application role. Subclasses can override this if they can provide a definite answer without needing to walk. For example, for virtual buffers, if the object is in the buffer, it definitely isn't in an application. L{_isNVDAObjectInApplication} calls this and walks to the next ancestor if C{None} is returned. @return: C{True} if definitely in an application, C{False} if definitely not in an application, C{None} if this can't be determined without walking ancestors. """ if ( # roles such as application and dialog should be treated as being within a "application" and therefore outside of the browseMode document. obj.role in self.APPLICATION_ROLES # Anything other than an editable text box inside a combo box should be # treated as being outside a browseMode document. or ( obj.role != controlTypes.Role.EDITABLETEXT and obj.container and obj.container.role == controlTypes.Role.COMBOBOX ) ): return True return None def _isNVDAObjectInApplication(self, obj): """Determine whether a given object is within an application. The object is considered to be within an application if it or one of its ancestors has an application role. This should only be called on objects beneath the treeInterceptor's root NVDAObject. @param obj: The object in question. @type obj: L{NVDAObjects.NVDAObject} @return: C{True} if L{obj} is within an application, C{False} otherwise. @rtype: bool """ # We cache the result for each object we walk. # There can be browse mode documents within other documents and the result might be different between these, # so the cache must be maintained on the TreeInterceptor rather than the object itself. try: cache = self._isInAppCache except AttributeError: # Create this lazily, as this method isn't used by all browse mode implementations. cache = self._isInAppCache = weakref.WeakKeyDictionary() objs = [] def doResult(result): # Cache this on descendants we've walked over. for obj in objs: cache[obj] = result return result while obj and obj != self.rootNVDAObject: inApp = cache.get(obj) if inApp is not None: # We found a cached result. return doResult(inApp) objs.append(obj) inApp = self._isNVDAObjectInApplication_noWalk(obj) if inApp is not None: return doResult(inApp) # We must walk ancestors. # Cache container. container = obj.container obj.container = container obj = container return doResult(False) def _get_documentConstantIdentifier(self): """Get the constant identifier for this document. This identifier should uniquely identify all instances (not just one instance) of a document for at least the current session of the hosting application. Generally, the document URL should be used. @return: The constant identifier for this document, C{None} if there is none. """ return None def _get_shouldRememberCaretPositionAcrossLoads(self): """Specifies whether the position of the caret should be remembered when this document is loaded again. This is useful when the browser remembers the scroll position for the document, but does not communicate this information via APIs. The remembered caret position is associated with this document using L{documentConstantIdentifier}. @return: C{True} if the caret position should be remembered, C{False} if not. @rtype: bool """ docConstId = self.documentConstantIdentifier # Return True if the URL indicates that this is probably a web browser document. # We do this check because we don't want to remember caret positions for email messages, etc. if isinstance(docConstId, str): protocols=("http", "https", "ftp", "ftps", "file") protocol=docConstId.split("://", 1)[0] return protocol in protocols return False def _getInitialCaretPos(self): """Retrieve the initial position of the caret after the buffer has been loaded. This position, if any, will be passed to L{makeTextInfo}. Subclasses should extend this method. @return: The initial position of the caret, C{None} if there isn't one. @rtype: TextInfo position """ if self.shouldRememberCaretPositionAcrossLoads: try: return self.rootNVDAObject.appModule._browseModeRememberedCaretPositions[self.documentConstantIdentifier] except KeyError: pass return None def getEnclosingContainerRange(self, textRange): textRange = textRange.copy() textRange.collapse() try: item = next(self._iterNodesByType("container", "up", textRange)) except (NotImplementedError,StopIteration): try: item = next(self._iterNodesByType("landmark", "up", textRange)) except (NotImplementedError,StopIteration): return return item.textInfo def script_moveToStartOfContainer(self,gesture): info=self.makeTextInfo(textInfos.POSITION_CARET) info.expand(textInfos.UNIT_CHARACTER) container=self.getEnclosingContainerRange(info) if not container: # Translators: Reported when the user attempts to move to the start or end of a container # (list, table, etc.) but there is no container. ui.message(_("Not in a container")) return container.collapse() self._set_selection(container, reason=OutputReason.QUICKNAV) if not willSayAllResume(gesture): container.expand(textInfos.UNIT_LINE) speech.speakTextInfo(container, reason=OutputReason.FOCUS) script_moveToStartOfContainer.resumeSayAllMode = sayAll.CURSOR.CARET # Translators: Description for the Move to start of container command in browse mode. script_moveToStartOfContainer.__doc__=_("Moves to the start of the container element, such as a list or table") def script_movePastEndOfContainer(self,gesture): info=self.makeTextInfo(textInfos.POSITION_CARET) info.expand(textInfos.UNIT_CHARACTER) container=self.getEnclosingContainerRange(info) if not container: # Translators: Reported when the user attempts to move to the start or end of a container # (list, table, etc.) but there is no container. ui.message(_("Not in a container")) return container.collapse(end=True) docEnd=container.obj.makeTextInfo(textInfos.POSITION_LAST) if container.compareEndPoints(docEnd,"endToEnd")>=0: container=docEnd # Translators: a message reported when: # Review cursor is at the bottom line of the current navigator object. # Landing at the end of a browse mode document when trying to jump to the end of the current container. ui.message(_("Bottom")) self._set_selection(container, reason=OutputReason.QUICKNAV) if not willSayAllResume(gesture): container.expand(textInfos.UNIT_LINE) speech.speakTextInfo(container, reason=OutputReason.FOCUS) script_movePastEndOfContainer.resumeSayAllMode = sayAll.CURSOR.CARET # Translators: Description for the Move past end of container command in browse mode. script_movePastEndOfContainer.__doc__=_("Moves past the end of the container element, such as a list or table") NOT_LINK_BLOCK_MIN_LEN = 30 def _isSuitableNotLinkBlock(self, textRange): return len(textRange.text) >= self.NOT_LINK_BLOCK_MIN_LEN def _iterNotLinkBlock(self, direction="next", pos=None): links = self._iterNodesByType("link", direction=direction, pos=pos) # We want to compare each link against the next link. item1 = next(links, None) if item1 is None: return for item2 in links: # If the distance between the links is small, this is probably just a piece of non-link text within a block of links; e.g. an inactive link of a nav bar. if direction=="previous": textRange=item1.textInfo.copy() textRange.collapse() textRange.setEndPoint(item2.textInfo,"startToEnd") else: textRange=item2.textInfo.copy() textRange.collapse() textRange.setEndPoint(item1.textInfo,"startToEnd") if self._isSuitableNotLinkBlock(textRange): yield TextInfoQuickNavItem("notLinkBlock", self, textRange) item1=item2 __gestures={ "kb:NVDA+d": "activateLongDesc", "kb:alt+upArrow": "collapseOrExpandControl", "kb:alt+downArrow": "collapseOrExpandControl", "kb:tab": "tab", "kb:shift+tab": "shiftTab", "kb:shift+,": "moveToStartOfContainer", "kb:,": "movePastEndOfContainer", } @script( description=_( # Translators: the description for the toggleScreenLayout script. "Toggles on and off if the screen layout is preserved while rendering the document content" ), gesture="kb:NVDA+v", ) def script_toggleScreenLayout(self, gesture): # Translators: The message reported for not supported toggling of screen layout ui.message(_("Not supported in this document."))
44.265625
229
0.740899
65,512
0.77082
2,383
0.028039
3,804
0.044758
0
0
44,423
0.522685
d9f0ba759404ab21f8b93c6f40fde8e030bbf8a1
12,904
py
Python
qiskit_metal/qlibrary/qubits/Transmon_Interdigitated.py
PatrickSJacobs/qiskit-metal
9628369c4b880d1e13199e559f898c5e0b96eecb
[ "Apache-2.0" ]
null
null
null
qiskit_metal/qlibrary/qubits/Transmon_Interdigitated.py
PatrickSJacobs/qiskit-metal
9628369c4b880d1e13199e559f898c5e0b96eecb
[ "Apache-2.0" ]
null
null
null
qiskit_metal/qlibrary/qubits/Transmon_Interdigitated.py
PatrickSJacobs/qiskit-metal
9628369c4b880d1e13199e559f898c5e0b96eecb
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # This code is part of Qiskit. # # (C) Copyright IBM 2017, 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. #from math import * from math import sin, cos from qiskit_metal import draw, Dict from qiskit_metal.qlibrary.core.base import QComponent import numpy as np #from ... import config #if not config.is_building_docs(): # from qiskit_metal import is_true class TransmonInterdigitated(QComponent): """ The base "TransmonInterdigitated" inherits the "QComponent" class. This creates a transmon pocket with two large pads connected by a Josephson junction. Both pads have four interdigitated "fingers" which increase the capacitance of the structure. There are three coupling capacitor pads with qpins defined; these can be connected to other structures in a design using CPWs. Default Options: * pad_width: '1000um' -- width of the large rectanglular pads on either side of the junction * pad_height: '300um' -- height of the large rectanglular pads on either side of the junction * finger_width: '50um' -- width of the "finger" on either side of the junction * finger_height: '100um' -- height of the "finger" on the side of the junction * finger_space: '50um' -- height of the Josephson Junction (equivalently; space between two fingers) * pad_pos_x: '0um' -- the internal coordinate defining the center of the bottom rectangular pad * pad_pos_y: '0um' -- the internal coordinate defining the center of the bottom rectangular pad * comb_width: '50um' -- the width of the four interdigitated combs connected to either pad * comb_space_vert: '50um' -- the space between the edge of a comb and the edge of the opposite rectangular pad * comb_space_hor: '50um' -- the space between adjacent interdigitated comb structures * jj_width: '20um' -- the width of the Josephson Junction located between the two fingers of the device * cc_space: '50um' -- the space between the lower rectangular pad and the coupling capacitor below it * cc_width: '100um' -- the width of the coupling capacitor located below the bottom rectangular pad * cc_height: '100um' -- the height of the coupling capacitor located below the bottom rectangular pad * cc_topleft_space: '50um' -- the space between the upper rectangular pad and the top left coupling capacitor * cc_topleft_width: '100um' -- the width of the top left coupling capacitor pad * cc_topleft_height: '100um' -- the height of the top left coupling capacitor pad * cc_topright_space: '50um' -- the space between the upper rectangular pad and the top right coupling capacitor * cc_topright_width: '100um' -- the width of the top right coupling capacitor pad * cc_topright_height: '100um' -- the height of the top right coupling capacitor pad * position_x: '0um' -- the x-coordinate defining the center of the transmon pocket on the chip * position_y: '0um' -- the y-coordinate defining the center of the transmon pocket on the chip * rotation: '0.0' -- the angle at which the entire structure is rotated * rotation_top_pad: '180' -- internal coordinate defining the angle of rotation between top and bottom pads * layer: '1' -- all objcets are drawn assuming they are part of the same layer on a the chip """ # Default drawing options default_options = Dict(pad_width='1000um', pad_height='300um', finger_width='50um', finger_height='100um', finger_space='50um', pad_pos_x='0um', pad_pos_y='0um', comb_width='50um', comb_space_vert='50um', comb_space_hor='50um', jj_width='20um', cc_space='50um', cc_width='100um', cc_height='100um', cc_topleft_space='50um', cc_topleft_width='100um', cc_topleft_height='100um', cc_topright_space='50um', cc_topright_width='100um', cc_topright_height='100um', position_x='0um', position_y='0um', rotation='0.0', rotation_top_pad='180', layer='1') """Default drawing options""" # Name prefix of component, if user doesn't provide name component_metadata = Dict(short_name='component') """Component metadata""" def make(self): """Convert self.options into QGeometry.""" p = self.parse_options() # Parse the string options into numbers # draw the lower pad as a rectangle pad_lower = draw.rectangle(p.pad_width, p.pad_height, p.pad_pos_x, p.pad_pos_y) # draw the lower finger as a rectangle finger_lower = draw.rectangle( p.finger_width, p.finger_height, p.pad_pos_x, p.pad_pos_y + 0.49999 * (p.pad_height) + 0.49999 * (p.finger_height)) # draw the Josephson Junction rect_jj = draw.rectangle( p.jj_width, p.finger_space, p.pad_pos_x, 0.5 * (p.pad_height) + p.finger_height + 0.5 * (p.finger_space)) # draw the first comb to the right of the lower finger as a rectangle comb1_lower = draw.rectangle( p.comb_width, (2 * p.finger_height + p.finger_space - p.comb_space_vert), (0.5 * p.finger_width + p.comb_space_hor + 0.5 * p.comb_width), (0.5 * p.pad_height + 0.5 * (p.pad_pos_y + 0.5 * (p.pad_height) + 0.5 * (p.finger_height)))) # draw the second comb to the right of the lower finger by translating the first comb comb2_lower = draw.translate(comb1_lower, 2.0 * (p.comb_space_hor + p.comb_width), 0.0) # draw the first comb to the left of the lower finger comb3_lower = draw.rectangle( p.comb_width, (2 * p.finger_height + p.finger_space - p.comb_space_vert), (-0.5 * p.finger_width - 2.0 * p.comb_space_hor - 1.5 * p.comb_width), (0.5 * p.pad_height + 0.5 * (p.pad_pos_y + 0.5 * (p.pad_height) + 0.5 * (p.finger_height)))) # draw the second comb to the left of the lower finger comb4_lower = draw.translate(comb3_lower, -2.0 * (p.comb_space_hor + p.comb_width), 0.0) coupling_capacitor = draw.rectangle( p.cc_width, p.cc_height, p.pad_pos_x, p.pad_pos_y - 0.5 * (p.pad_height) - p.cc_space - 0.5 * p.cc_height) cc_topleft = draw.rectangle( p.cc_topleft_width, p.cc_topleft_height, p.pad_pos_x - 0.5 * p.pad_width + 0.5 * p.cc_topleft_width, p.pad_pos_y + 1.5 * p.pad_height + 2.0 * p.finger_height + p.finger_space + p.cc_topleft_space + 0.5 * p.cc_topleft_height) cc_topright = draw.translate( cc_topleft, p.pad_width - 0.5 * p.cc_topleft_width - 0.5 * p.cc_topright_width, 0.0) # merge the bottom elements bottom = draw.union(pad_lower, finger_lower, comb1_lower, comb2_lower, comb3_lower, comb4_lower) # create the top portion of the comb by translating and rotating # the bottom portion of the comb top = draw.translate(bottom, 0.0, p.pad_height + p.finger_space) top = draw.rotate(top, p.rotation_top_pad) # merge everything into a single design design = draw.union(bottom, top, rect_jj, coupling_capacitor, cc_topleft, cc_topright) # draw the transmon pocket bounding box pocket = draw.rectangle(1.5 * p.pad_width, 5.0 * p.pad_height) # the origin is originally set to the middle of the lower pad. # Let's move it to the center of the JJ. design = draw.translate( design, 0.0, -0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space) # now translate the final structure according to the user input design = draw.rotate(design, p.rotation, origin=(0, 0)) design = draw.translate(design, p.position_x, p.position_y) pocket = draw.rotate(pocket, p.rotation, origin=(0, 0)) pocket = draw.translate(pocket, p.position_x, p.position_y) geom = {'design': design} geom_pocket = {'pocket': pocket} self.add_qgeometry('poly', geom, layer=p.layer, subtract=False) self.add_qgeometry('poly', geom_pocket, layer=p.layer, subtract=True) ################################################################### # Add Qpin connections for coupling capacitors # define a function that both rotates and translates the # qpin coordinates def qpin_rotate_translate(x): """ This function rotates the coordinates of the three qpins according to the user inputs for "position_x", "position_y" and "rotation". """ y = list(x) z = [0.0, 0.0] z[0] = y[0] * cos(p.rotation * 3.14159 / 180) - y[1] * sin( p.rotation * 3.14159 / 180) z[1] = y[0] * sin(p.rotation * 3.14159 / 180) + y[1] * cos( p.rotation * 3.14159 / 180) z[0] = z[0] + p.position_x z[1] = z[1] + p.position_y x = (z[0], z[1]) return x # Add Qpin connections for the bottom coupling capacitor qp1a = (0.0, -0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space) qp1b = (0.0, -0.5 * p.pad_height - p.cc_space - p.cc_height - 0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space) # rotate and translate the qpin coordinates qp1a = qpin_rotate_translate(qp1a) qp1b = qpin_rotate_translate(qp1b) self.add_pin('pin1', points=np.array([qp1a, qp1b]), width=0.01, input_as_norm=True) # Add Qpin connections for top left coupling capacitor qp2a = (p.pad_pos_x - 0.5 * p.pad_width + 0.5 * p.cc_topleft_width, p.pad_pos_y + 1.5 * p.pad_height + 2.0 * p.finger_height + p.finger_space + p.cc_topleft_space + 0.5 * p.cc_topleft_height - 0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space) qp2b = (p.pad_pos_x - 0.5 * p.pad_width, p.pad_pos_y + 1.5 * p.pad_height + 2.0 * p.finger_height + p.finger_space + p.cc_topleft_space + 0.5 * p.cc_topleft_height - 0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space) qp2a = qpin_rotate_translate(qp2a) qp2b = qpin_rotate_translate(qp2b) self.add_pin('pin2', points=np.array([qp2a, qp2b]), width=0.01, input_as_norm=True) # Add Qpin connections for top right coupling capacitor qp3a = (p.pad_pos_x + 0.5 * p.pad_width - 0.5 * p.cc_topleft_width, p.pad_pos_y + 1.5 * p.pad_height + 2.0 * p.finger_height + p.finger_space + p.cc_topleft_space + 0.5 * p.cc_topleft_height - 0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space) qp3b = (p.pad_pos_x + 0.5 * p.pad_width, p.pad_pos_y + 1.5 * p.pad_height + 2.0 * p.finger_height + p.finger_space + p.cc_topleft_space + 0.5 * p.cc_topleft_height - 0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space) qp3a = qpin_rotate_translate(qp3a) qp3b = qpin_rotate_translate(qp3b) self.add_pin('pin3', points=np.array([qp3a, qp3b]), width=0.01, input_as_norm=True)
45.43662
93
0.577805
12,138
0.940639
0
0
0
0
0
0
5,476
0.424365
d9f1f15178cb9e26d9b4f91695b333a07eaa59d6
74,778
py
Python
sqlova/model/nl2sql/wikisql_models.py
guotong1988/Rule-SQL
e826c0d659c8b35a72b64aa2b50d4d943fdd70f1
[ "Apache-2.0" ]
15
2019-07-25T12:13:31.000Z
2020-10-17T13:42:58.000Z
sqlova/model/nl2sql/wikisql_models.py
guotong1988/Rule-SQL
e826c0d659c8b35a72b64aa2b50d4d943fdd70f1
[ "Apache-2.0" ]
1
2020-01-07T05:49:15.000Z
2020-04-22T01:22:00.000Z
sqlova/model/nl2sql/wikisql_models.py
guotong1988/Rule-SQL
e826c0d659c8b35a72b64aa2b50d4d943fdd70f1
[ "Apache-2.0" ]
3
2019-10-01T09:14:35.000Z
2020-07-18T08:39:48.000Z
# Copyright 2019-present NAVER Corp. # Apache License v2.0 # Wonseok Hwang import os, json from copy import deepcopy from matplotlib.pylab import * import torch import torch.nn as nn import torch.nn.functional as F device = torch.device("cuda" if torch.cuda.is_available() else "cpu") from sqlova.utils.utils import topk_multi_dim from sqlova.utils.utils_wikisql import * class Seq2SQL_v1(nn.Module): def __init__(self, input_size, hidden_size, num_layer, dropout, number_cond_ops, number_agg_ops, old=False): super(Seq2SQL_v1, self).__init__() self.input_size = input_size self.hidden_size = hidden_size self.num_layer = num_layer self.dropout = dropout self.max_where_number = 4 self.number_cond_ops = number_cond_ops self.number_agg_ops = number_agg_ops self.select_column_predict = SelectColumnPredict(input_size, hidden_size, num_layer, dropout) self.select_agg_predict = SelectAggPredict(input_size, hidden_size, num_layer, dropout, number_agg_ops, old=old) self.where_number_predict = WhereNumberPredict(input_size, hidden_size, num_layer, dropout) self.wcp = WhereColumnPredict(input_size, hidden_size, num_layer, dropout) self.wop = WhereOpPredict(input_size, hidden_size, num_layer, dropout, number_cond_ops) self.wvp = WhereValuePredict_startend(input_size, hidden_size, num_layer, dropout, number_cond_ops, old=old) # start-end-search-discriminative model # emb_question, [16,26,1536] # len_question, [16] # emb_header, [102,12,1536] # len_header_token, [102] # number_header, [16] def forward(self, emb_question, len_question, emb_header, len_header_token, number_header, g_sc=None, g_sa=None, g_wn=None, g_wc=None, g_wo=None, g_wvi=None, show_p_sc=False, show_p_sa=False, show_p_wn=False, show_p_wc=False, show_p_wo=False, show_p_wv=False): # sc s_sc,s_sc_softmax = self.select_column_predict(emb_question, len_question, emb_header, len_header_token, number_header, show_p_sc=show_p_sc) if g_sc: pr_sc = g_sc else: pr_sc = pred_sc(s_sc) # sa s_sa,s_sa_softmax = self.select_agg_predict(emb_question, len_question, emb_header, len_header_token, number_header, pr_sc, show_p_sa=show_p_sa) if g_sa: # it's not necessary though. pr_sa = g_sa else: pr_sa = pred_sa(s_sa) # wn s_wn,s_wn_softmax = self.where_number_predict(emb_question, len_question, emb_header, len_header_token, number_header, show_p_wn=show_p_wn) if g_wn: pr_wn = g_wn else: pr_wn = pred_wn(s_wn) # wc s_wc,s_wc_softmax = self.wcp(emb_question, len_question, emb_header, len_header_token, number_header, show_p_wc=show_p_wc, penalty=True) if g_wc: pr_wc = g_wc else: pr_wc = pred_wherecolumn(pr_wn, s_wc) # wo s_wo,s_wo_softmax = self.wop(emb_question, len_question, emb_header, len_header_token, number_header, wn=pr_wn, wc=pr_wc, show_p_wo=show_p_wo) if g_wo: pr_wo = g_wo else: pr_wo = pred_wo(pr_wn, s_wo) # wv s_wv,s_wv_softmax = self.wvp(emb_question, len_question, emb_header, len_header_token, number_header, wn=pr_wn, wc=pr_wc, wo=pr_wo, show_p_wv=show_p_wv) return s_sc, s_sa, s_wn, s_wc, s_wo, s_wv, s_sc_softmax, s_sa_softmax, s_wn_softmax, s_wc_softmax, s_wo_softmax, s_wv_softmax def beam_forward(self, emb_question, len_question, emb_header, len_header_token, l_header, engine, tb, nlu_t, nlu_wp_t, wp_to_wh_index, nlu, beam_size=4, show_p_sc=False, show_p_sa=False, show_p_wn=False, show_p_wc=False, show_p_wo=False, show_p_wv=False): """ Execution-guided beam decoding. """ # sc s_sc,_ = self.select_column_predict(emb_question, len_question, emb_header, len_header_token, l_header, show_p_sc=show_p_sc) prob_sc = F.softmax(s_sc, dim=-1) bS, mcL = s_sc.shape # minimum_header_length = min(l_header) # beam_size = minimum_header_length if beam_size > minimum_header_length else beam_size # sa # Construct all possible sc_sa_score prob_sc_sa = torch.zeros([bS, beam_size, self.number_agg_ops]).to(device) prob_sca = torch.zeros_like(prob_sc_sa).to(device) # get the top-k indices. pr_sc_beam = [B, beam_size] pr_sc_beam = pred_sc_beam(s_sc, beam_size) # calculate and predict s_sa. for i_beam in range(beam_size): pr_sc = list( array(pr_sc_beam)[:,i_beam] ) s_sa,_ = self.select_agg_predict(emb_question, len_question, emb_header, len_header_token, l_header, pr_sc, show_p_sa=show_p_sa) prob_sa = F.softmax(s_sa, dim=-1) prob_sc_sa[:, i_beam, :] = prob_sa prob_sc_selected = prob_sc[range(bS), pr_sc] # [B] prob_sca[:,i_beam,:] = (prob_sa.t() * prob_sc_selected).t() # [mcL, B] * [B] -> [mcL, B] (element-wise multiplication) # [mcL, B] -> [B, mcL] # Calculate the dimension of tensor # tot_dim = len(prob_sca.shape) # First flatten to 1-d idxs = topk_multi_dim(torch.tensor(prob_sca), n_topk=beam_size, batch_exist=True) # Now as sc_idx is already sorted, re-map them properly. idxs = remap_sc_idx(idxs, pr_sc_beam) # [sc_beam_idx, sa_idx] -> [sc_idx, sa_idx] idxs_arr = array(idxs) # [B, beam_size, remainig dim] # idxs[b][0] gives first probable [sc_idx, sa_idx] pairs. # idxs[b][1] gives of second. # Calculate prob_sca, a joint probability beam_idx_sca = [0] * bS beam_meet_the_final = [False] * bS while True: pr_sc = idxs_arr[range(bS),beam_idx_sca,0] pr_sa = idxs_arr[range(bS),beam_idx_sca,1] # map index properly check = check_sc_sa_pairs(tb, pr_sc, pr_sa) if sum(check) == bS: break else: for b, check1 in enumerate(check): if not check1: # wrong pair beam_idx_sca[b] += 1 if beam_idx_sca[b] >= beam_size: beam_meet_the_final[b] = True beam_idx_sca[b] -= 1 else: beam_meet_the_final[b] = True if sum(beam_meet_the_final) == bS: break # Now pr_sc, pr_sa are properly predicted. pr_sc_best = list(pr_sc) pr_sa_best = list(pr_sa) # Now, Where-clause beam search. s_wn,_ = self.where_number_predict(emb_question, len_question, emb_header, len_header_token, l_header, show_p_wn=show_p_wn) prob_wn = F.softmax(s_wn, dim=-1).detach().to('cpu').numpy() # Found "executable" most likely 4(=max_num_of_conditions) where-clauses. # wc s_wc,_ = self.wcp(emb_question, len_question, emb_header, len_header_token, l_header, show_p_wc=show_p_wc, penalty=True) prob_wc = F.sigmoid(s_wc).detach().to('cpu').numpy() # pr_wc_sorted_by_prob = pred_wc_sorted_by_prob(s_wc) # get max_wn # of most probable columns & their prob. pr_wn_max = [self.max_where_number] * bS pr_wc_max = pred_wherecolumn(pr_wn_max, s_wc) # if some column do not have executable where-claouse, omit that column prob_wc_max = zeros([bS, self.max_where_number]) for b, pr_wc_max1 in enumerate(pr_wc_max): prob_wc_max[b,:] = prob_wc[b,pr_wc_max1] # get most probable max_wn where-clouses # wo s_wo_max,_ = self.wop(emb_question, len_question, emb_header, len_header_token, l_header, wn=pr_wn_max, wc=pr_wc_max, show_p_wo=show_p_wo) prob_wo_max = F.softmax(s_wo_max, dim=-1).detach().to('cpu').numpy() # [B, max_wn, n_cond_op] pr_wvi_beam_op_list = [] prob_wvi_beam_op_list = [] for i_op in range(self.number_cond_ops - 1): pr_wo_temp = [[i_op] * self.max_where_number] * bS # wv s_wv,_ = self.wvp(emb_question, len_question, emb_header, len_header_token, l_header, wn=pr_wn_max, wc=pr_wc_max, wo=pr_wo_temp, show_p_wv=show_p_wv) prob_wv = F.softmax(s_wv, dim=-2).detach().to('cpu').numpy() # prob_wv pr_wvi_beam, prob_wvi_beam = pred_wvi_se_beam(self.max_where_number, s_wv, beam_size) pr_wvi_beam_op_list.append(pr_wvi_beam) prob_wvi_beam_op_list.append(prob_wvi_beam) # pr_wvi_beam = [B, max_wn, k_logit**2 [st, ed] paris] # pred_wv_beam # Calculate joint probability of where-clause # prob_w = [batch, wc, wo, wv] = [B, max_wn, n_cond_op, n_pairs] n_wv_beam_pairs = prob_wvi_beam.shape[2] prob_w = zeros([bS, self.max_where_number, self.number_cond_ops - 1, n_wv_beam_pairs]) for b in range(bS): for i_wn in range(self.max_where_number): for i_op in range(self.number_cond_ops - 1): # do not use final one for i_wv_beam in range(n_wv_beam_pairs): # i_wc = pr_wc_max[b][i_wn] # already done p_wc = prob_wc_max[b, i_wn] p_wo = prob_wo_max[b, i_wn, i_op] p_wv = prob_wvi_beam_op_list[i_op][b, i_wn, i_wv_beam] prob_w[b, i_wn, i_op, i_wv_beam] = p_wc * p_wo * p_wv # Perform execution guided decoding conds_max = [] prob_conds_max = [] # while len(conds_max) < self.max_wn: idxs = topk_multi_dim(torch.tensor(prob_w), n_topk=beam_size, batch_exist=True) # idxs = [B, i_wc_beam, i_op, i_wv_pairs] # Construct conds1 for b, idxs1 in enumerate(idxs): conds_max1 = [] prob_conds_max1 = [] for i_wn, idxs11 in enumerate(idxs1): i_wc = pr_wc_max[b][idxs11[0]] i_op = idxs11[1] wvi = pr_wvi_beam_op_list[i_op][b][idxs11[0]][idxs11[2]] # get wv_str temp_pr_wv_str, _ = convert_pred_wvi_to_string([[wvi]], [nlu_t[b]], [nlu_wp_t[b]], [wp_to_wh_index[b]], [nlu[b]]) merged_wv11 = merge_wv_t1_eng(temp_pr_wv_str[0][0], nlu[b]) conds11 = [i_wc, i_op, merged_wv11] prob_conds11 = prob_w[b, idxs11[0], idxs11[1], idxs11[2] ] # test execution # print(nlu[b]) # print(tb[b]['id'], tb[b]['types'], pr_sc[b], pr_sa[b], [conds11]) pr_ans = engine.execute(tb[b]['id'], pr_sc[b], pr_sa[b], [conds11]) if bool(pr_ans): # pr_ans is not empty! conds_max1.append(conds11) prob_conds_max1.append(prob_conds11) conds_max.append(conds_max1) prob_conds_max.append(prob_conds_max1) # May need to do more exhuastive search? # i.e. up to.. getting all executable cases. # Calculate total probability to decide the number of where-clauses pr_sql_i = [] prob_wn_w = [] pr_wn_based_on_prob = [] for b, prob_wn1 in enumerate(prob_wn): max_executable_wn1 = len( conds_max[b] ) prob_wn_w1 = [] prob_wn_w1.append(prob_wn1[0]) # wn=0 case. for i_wn in range(max_executable_wn1): prob_wn_w11 = prob_wn1[i_wn+1] * prob_conds_max[b][i_wn] prob_wn_w1.append(prob_wn_w11) pr_wn_based_on_prob.append(argmax(prob_wn_w1)) prob_wn_w.append(prob_wn_w1) pr_sql_i1 = {'agg': pr_sa_best[b], 'sel': pr_sc_best[b], 'conds': conds_max[b][:pr_wn_based_on_prob[b]]} pr_sql_i.append(pr_sql_i1) # s_wv = [B, max_wn, max_nlu_tokens, 2] return prob_sca, prob_w, prob_wn_w, pr_sc_best, pr_sa_best, pr_wn_based_on_prob, pr_sql_i class SelectColumnPredict(nn.Module): def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3): super(SelectColumnPredict, self).__init__() self.input_size = input_size self.hidden_size = hidden_size self.num_layer = num_layer self.dropout = dropout self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.W_att = nn.Linear(hidden_size, hidden_size) self.W_c = nn.Linear(hidden_size, hidden_size) self.W_header = nn.Linear(hidden_size, hidden_size) self.sc_out = nn.Sequential(nn.Tanh(), nn.Linear(2 * hidden_size, 1)) self.softmax_dim1 = nn.Softmax(dim=1) self.softmax_dim2 = nn.Softmax(dim=2) self.softmax_dim_1 = nn.Softmax(dim=-1) # emb_question, [16,26,1536] # len_question, [16] # emb_header, [102,12,1536] # len_header_token, [102] # number_header, [16] def forward(self, emb_question, len_question, emb_header, len_header_token, number_header, show_p_sc=False): # Encode encoded_question = encode(self.enc_n, emb_question, len_question, return_hidden=False, hc0=None, last_only=False) # [b, n, dim] encoded_header = encode_header(self.enc_h, emb_header, len_header_token, number_header) # [b, header, dim] bS = len(number_header) mL_n = max(len_question) # [bS, max_len_header, 100] * [bS, 100, mL_n] -> [bS, max_len_header, mL_n] att_h = torch.bmm(encoded_header, self.W_att(encoded_question).transpose(1, 2)) # Penalty on blank parts for b, l_n1 in enumerate(len_question): if l_n1 < mL_n: att_h[b, :, l_n1:] = -10000000000 p_n = self.softmax_dim2(att_h) if show_p_sc: # p = [b, header, n] if p_n.shape[0] != 1: raise Exception("Batch size should be 1.") fig=figure(2001, figsize=(12,3.5)) # subplot(6,2,7) subplot2grid((7,2), (3, 0), rowspan=2) cla() _color='rgbkcm' _symbol='.......' for i_h in range(number_header[0]): color_idx = i_h % len(_color) plot(p_n[0][i_h][:].data.numpy() - i_h, '--'+_symbol[color_idx]+_color[color_idx], ms=7) title('sc: p_n for each h') grid(True) fig.tight_layout() fig.canvas.draw() show() # p_n [ bS, max_len_header, mL_n] -> [ bS, max_len_header, mL_n, 1] # wenc_n [ bS, mL_n, 100] -> [ bS, 1, mL_n, 100] # -> [bS, max_len_header, mL_n, 100] -> [bS, max_len_header, 100] c_n = torch.mul(p_n.unsqueeze(3), encoded_question.unsqueeze(1)).sum(dim=2) vec = torch.cat([self.W_c(c_n), self.W_header(encoded_header)], dim=2) score_select_column = self.sc_out(vec).squeeze(2) # [bS, max_len_header, 1] -> [bS, max_len_header] score_select_column_softmax = self.softmax_dim_1(score_select_column) # Penalty max_len_header = max(number_header) for b, l_header1 in enumerate(number_header): if l_header1 < max_len_header: score_select_column[b, l_header1:] = -10000000000 for b, l_header1 in enumerate(number_header): if l_header1 < max_len_header: score_select_column_softmax[b, l_header1:] = 0 return score_select_column,score_select_column_softmax class SelectAggPredict(nn.Module): def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3, n_agg_ops=-1, old=False): super(SelectAggPredict, self).__init__() self.input_size = input_size self.hidden_size = hidden_size self.num_layer = num_layer self.dropout = dropout self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.W_att = nn.Linear(hidden_size, hidden_size) self.sa_out = nn.Sequential(nn.Linear(hidden_size, hidden_size), nn.Tanh(), nn.Linear(hidden_size, n_agg_ops)) # Fixed number of aggregation operator. self.softmax_dim1 = nn.Softmax(dim=1) self.softmax_dim2 = nn.Softmax(dim=2) self.softmax_dim_1 = nn.Softmax(dim=-1) if old: # for backwoard compatibility self.W_c = nn.Linear(hidden_size, hidden_size) self.W_header = nn.Linear(hidden_size, hidden_size) def forward(self, emb_question, len_question, emb_header, len_header_token, l_header, pr_sc, show_p_sa=False): # Encode encoded_question = encode(self.enc_n, emb_question, len_question, return_hidden=False, hc0=None, last_only=False) # [b, n, dim] encoded_header = encode_header(self.enc_h, emb_header, len_header_token, l_header) # [b, header, dim] bS = len(l_header) mL_n = max(len_question) wenc_header_ob = encoded_header[list(range(bS)), pr_sc] # list, so one sample for each batch. # [bS, question_len, 100] * [bS, 100, 1] -> [bS, question_len] att = torch.bmm(self.W_att(encoded_question), wenc_header_ob.unsqueeze(2)).squeeze(2) # Penalty on blank parts for b, l_n1 in enumerate(len_question): if l_n1 < mL_n: att[b, l_n1:] = -10000000000 # [bS, question_len] p = self.softmax_dim1(att) if show_p_sa: if p.shape[0] != 1: raise Exception("Batch size should be 1.") fig=figure(2001); subplot(7,2,3) cla() plot(p[0].data.numpy(), '--rs', ms=7) title('sa: nlu_weight') grid(True) fig.tight_layout() fig.canvas.draw() show() # [bS, question_len, 100] * ( [bS, question_len, 1] -> [bS, question_len, 100]) # -> [bS, question_len, 100] -> [bS, 100] c_n = torch.mul(encoded_question, p.unsqueeze(2).expand_as(encoded_question)).sum(dim=1) s_sa = self.sa_out(c_n) s_sa_softmax = self.softmax_dim_1(s_sa) return s_sa,s_sa_softmax class WhereNumberPredict(nn.Module): def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3, ): super(WhereNumberPredict, self).__init__() self.input_size = input_size self.hidden_size = hidden_size self.num_layer = num_layer self.dropout = dropout self.mL_w = 4 # max where condition number self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.W_att_h = nn.Linear(hidden_size, 1) self.W_hidden = nn.Linear(hidden_size, num_layer * hidden_size) self.W_cell = nn.Linear(hidden_size, num_layer * hidden_size) self.W_att_n = nn.Linear(hidden_size, 1) self.wn_out = nn.Sequential(nn.Linear(hidden_size, hidden_size), nn.Tanh(), nn.Linear(hidden_size, self.mL_w + 1)) # max number (4 + 1) self.softmax_dim1 = nn.Softmax(dim=1) self.softmax_dim2 = nn.Softmax(dim=2) self.softmax_dim_1 = nn.Softmax(dim=-1) def forward(self, emb_question, len_question, emb_header, len_header_token, l_header, show_p_wn=False): # Encode encoded_header = encode_header(self.enc_h, emb_header, len_header_token, l_header) # [b, max_len_header, dim] bS = len(l_header) max_len_question = max(len_question) max_len_header = max(l_header) # mL_h = max(len_header_token) # (self-attention?) column Embedding? # [B, max_len_header, 100] -> [B, max_len_header, 1] -> [B, max_len_header] att_h = self.W_att_h(encoded_header).squeeze(2) # Penalty for b, l_header1 in enumerate(l_header): if l_header1 < max_len_header: att_h[b, l_header1:] = -10000000000 p_h = self.softmax_dim1(att_h) if show_p_wn: if p_h.shape[0] != 1: raise Exception("Batch size should be 1.") fig=figure(2001); subplot(7,2,5) cla() plot(p_h[0].data.numpy(), '--rs', ms=7) title('wn: header_weight') grid(True) fig.canvas.draw() show() # input('Type Eenter to continue.') # [B, max_len_header, 100] * [ B, max_len_header, 1] -> [B, max_len_header, 100] -> [B, 100] c_header = torch.mul(encoded_header, p_h.unsqueeze(2)).sum(1) # [B, 100] --> [B, 2*100] Enlarge because there are two layers. hidden = self.W_hidden(c_header) # [B, 4, 200/2] hidden = hidden.view(bS, self.num_layer * 2, int( self.hidden_size / 2)) # [4, B, 100/2] # number_of_layer_layer * (bi-direction) # lstm input convention. hidden = hidden.transpose(0, 1).contiguous() cell = self.W_cell(c_header) # [B, 4, 100/2] cell = cell.view(bS, self.num_layer * 2, int(self.hidden_size / 2)) # [4, B, 100/2] cell = cell.transpose(0, 1).contiguous() wenc_n = encode(self.enc_n, emb_question, len_question, return_hidden=False, hc0=(hidden, cell), last_only=False) # [b, n, dim] att_n = self.W_att_n(wenc_n).squeeze(2) # [B, max_len, 100] -> [B, max_len, 1] -> [B, max_len] # Penalty for b, l_n1 in enumerate(len_question): if l_n1 < max_len_question: att_n[b, l_n1:] = -10000000000 p_n = self.softmax_dim1(att_n) if show_p_wn: if p_n.shape[0] != 1: raise Exception("Batch size should be 1.") fig=figure(2001); subplot(7,2,6) cla() plot(p_n[0].data.numpy(), '--rs', ms=7) title('wn: nlu_weight') grid(True) fig.canvas.draw() show() # input('Type Enter to continue.') # [B, mL_n, 100] *([B, mL_n] -> [B, mL_n, 1] -> [B, mL_n, 100] ) -> [B, 100] c_n = torch.mul(wenc_n, p_n.unsqueeze(2).expand_as(wenc_n)).sum(dim=1) s_wn = self.wn_out(c_n) s_wn_softmax = self.softmax_dim_1(s_wn) return s_wn,s_wn_softmax # where column predict class WhereColumnPredict(nn.Module): def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3): super(WhereColumnPredict, self).__init__() self.input_size = input_size self.hidden_size = hidden_size self.num_layer = num_layer self.dropout = dropout self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.W_att = nn.Linear(hidden_size, hidden_size) self.W_c = nn.Linear(hidden_size, hidden_size) self.W_header = nn.Linear(hidden_size, hidden_size) self.W_out = nn.Sequential( nn.Tanh(), nn.Linear(2 * hidden_size, 1) ) self.softmax_dim1 = nn.Softmax(dim=1) self.softmax_dim2 = nn.Softmax(dim=2) self.softmax_dim_1 = nn.Softmax(dim=-1) def forward(self, emb_question, len_question, emb_header, len_header_token, l_header, show_p_wc, penalty=True): # Encode encoded_question = encode(self.enc_n, emb_question, len_question, return_hidden=False, hc0=None, last_only=False) # [b, n, dim] encoded_header = encode_header(self.enc_h, emb_header, len_header_token, l_header) # [b, header, dim] # attention # wenc = [bS, mL, hidden_size] # att = [bS, max_len_header, mL_n] # att[b, i_h, j_n] = p(j_n| i_h) att = torch.bmm(encoded_header, self.W_att(encoded_question).transpose(1, 2)) # penalty to blank part. mL_n = max(len_question) for b_n, l_n1 in enumerate(len_question): if l_n1 < mL_n: att[b_n, :, l_n1:] = -10000000000 # make p(j_n | i_h) p = self.softmax_dim2(att) if show_p_wc: # p = [b, header, n] if p.shape[0] != 1: raise Exception("Batch size should be 1.") fig=figure(2001); # subplot(6,2,7) subplot2grid((7,2), (3, 1), rowspan=2) cla() _color='rgbkcm' _symbol='.......' for i_h in range(l_header[0]): color_idx = i_h % len(_color) plot(p[0][i_h][:].data.numpy() - i_h, '--'+_symbol[color_idx]+_color[color_idx], ms=7) title('wc: p_n for each h') grid(True) fig.tight_layout() fig.canvas.draw() show() # max nlu context vectors # [bS, max_len_header, mL_n]*[bS, max_len_header, mL_n] encoded_question = encoded_question.unsqueeze(1) # [ b, n, dim] -> [b, 1, n, dim] p = p.unsqueeze(3) # [b, header, n] -> [b, header, n, 1] c_n = torch.mul(encoded_question, p).sum(2) # -> [b, header, dim], c_n for each header. y = torch.cat([self.W_c(c_n), self.W_header(encoded_header)], dim=2) # [b, header, 2*dim] score = self.W_out(y).squeeze(2) # [b, header] score[torch.isnan(score)] = 0 score_softmax = self.softmax_dim_1(score) if penalty: for b, l_header1 in enumerate(l_header): score[b, l_header1:] = -1e+10 for b, l_header1 in enumerate(l_header): score_softmax[b, l_header1:] = 0 return score,score_softmax # where op predict class WhereOpPredict(nn.Module): def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3, n_cond_ops=3): super(WhereOpPredict, self).__init__() self.input_size = input_size self.hidden_size = hidden_size self.num_layer = num_layer self.dropout = dropout self.mL_w = 4 # max where condition number self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.W_att = nn.Linear(hidden_size, hidden_size) self.W_c = nn.Linear(hidden_size, hidden_size) self.W_header = nn.Linear(hidden_size, hidden_size) self.wo_out = nn.Sequential( nn.Linear(2*hidden_size, hidden_size), nn.Tanh(), nn.Linear(hidden_size, n_cond_ops) ) self.softmax_dim1 = nn.Softmax(dim=1) self.softmax_dim2 = nn.Softmax(dim=2) self.softmax_dim_1 = nn.Softmax(dim=-1) def forward(self, emb_question, len_question, emb_header, len_header_token, l_header, wn, wc, wenc_n=None, show_p_wo=False): # Encode if not wenc_n: wenc_n = encode(self.enc_n, emb_question, len_question, return_hidden=False, hc0=None, last_only=False) # [b, n, dim] encoded_header = encode_header(self.enc_h, emb_header, len_header_token, l_header) # [b, header, dim] bS = len(l_header) # wn wenc_header_ob = [] # observed header for b in range(bS): # [[...], [...]] # Pad list to maximum number of selections real = [encoded_header[b, col] for col in wc[b]] pad = (self.mL_w - wn[b]) * [encoded_header[b, 0]] # this padding could be wrong. Test with zero padding later. wenc_header_ob1 = torch.stack(real + pad) # It is not used in the loss function. wenc_header_ob.append(wenc_header_ob1) # list to [B, 4, dim] tensor. wenc_header_ob = torch.stack(wenc_header_ob) # list to tensor. wenc_header_ob = wenc_header_ob.to(device) # [B, 1, mL_n, dim] * [B, 4, dim, 1] # -> [B, 4, mL_n, 1] -> [B, 4, mL_n] # multiplication bewteen NLq-tokens and selected column att = torch.matmul(self.W_att(wenc_n).unsqueeze(1), wenc_header_ob.unsqueeze(3) ).squeeze(3) # Penalty for blank part. mL_n = max(len_question) for b, l_n1 in enumerate(len_question): if l_n1 < mL_n: att[b, :, l_n1:] = -10000000000 p = self.softmax_dim2(att) # p( n| selected_col ) if show_p_wo: # p = [b, header, n] if p.shape[0] != 1: raise Exception("Batch size should be 1.") fig=figure(2001) # subplot(6,2,7) subplot2grid((7,2), (5, 0), rowspan=2) cla() _color='rgbkcm' _symbol='.......' for i_wn in range(self.mL_w): color_idx = i_wn % len(_color) plot(p[0][i_wn][:].data.numpy() - i_wn, '--'+_symbol[color_idx]+_color[color_idx], ms=7) title('wo: p_n for selected h') grid(True) fig.tight_layout() fig.canvas.draw() show() # [B, 1, mL_n, dim] * [B, 4, mL_n, 1] # --> [B, 4, mL_n, dim] # --> [B, 4, dim] c_n = torch.mul(wenc_n.unsqueeze(1), p.unsqueeze(3)).sum(dim=2) # [bS, 5-1, dim] -> [bS, 5-1, 3] vec = torch.cat([self.W_c(c_n), self.W_header(wenc_header_ob)], dim=2) s_wo = self.wo_out(vec) s_wo_softmax = self.softmax_dim_1(s_wo) return s_wo,s_wo_softmax class WhereValuePredict_startend(nn.Module): """ Discriminative model Get start and end. Here, classifier for [ [투수], [팀1], [팀2], [연도], ...] Input: Encoded nlu & selected column. Algorithm: Encoded nlu & selected column. -> classifier -> mask scores -> ... """ def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3, n_cond_ops=4, old=False): super(WhereValuePredict_startend, self).__init__() self.input_size = input_size self.hidden_size = hidden_size self.num_layer = num_layer self.dropout = dropout self.n_cond_ops = n_cond_ops self.mL_w = 4 # max where condition number self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.W_att = nn.Linear(hidden_size, hidden_size) self.W_c = nn.Linear(hidden_size, hidden_size) self.W_header = nn.Linear(hidden_size, hidden_size) self.W_op = nn.Linear(n_cond_ops, hidden_size) # self.W_n = nn.Linear(hidden_size, hidden_size) if old: self.wv_out = nn.Sequential( nn.Linear(4 * hidden_size, 2) ) else: self.wv_out = nn.Sequential( nn.Linear(4 * hidden_size, hidden_size), nn.Tanh(), nn.Linear(hidden_size, 2) ) # self.wv_out = nn.Sequential( # nn.Linear(3 * hidden_size, hidden_size), # nn.Tanh(), # nn.Linear(hidden_size, self.gdkL) # ) self.softmax_dim1 = nn.Softmax(dim=1) self.softmax_dim2 = nn.Softmax(dim=2) self.softmax_dim_1 = nn.Softmax(dim=-1) def forward(self, emb_question, len_question, emb_header, len_header_token, l_header, wn, wc, wo, wenc_n=None, show_p_wv=False): # Encode if not wenc_n: wenc_n, hout, cout = encode(self.enc_n, emb_question, len_question, return_hidden=True, hc0=None, last_only=False) # [b, n, dim] encoded_header = encode_header(self.enc_h, emb_header, len_header_token, l_header) # [b, header, dim] bS = len(l_header) wenc_header_ob = [] # observed header for b in range(bS): # [[...], [...]] # Pad list to maximum number of selections real = [encoded_header[b, col] for col in wc[b]] pad = (self.mL_w - wn[b]) * [encoded_header[b, 0]] # this padding could be wrong. Test with zero padding later. wenc_header_ob1 = torch.stack(real + pad) # It is not used in the loss function. wenc_header_ob.append(wenc_header_ob1) # list to [B, 4, dim] tensor. wenc_header_ob = torch.stack(wenc_header_ob) # list to tensor. wenc_header_ob = wenc_header_ob.to(device) # Column attention # [B, 1, mL_n, dim] * [B, 4, dim, 1] # -> [B, 4, mL_n, 1] -> [B, 4, mL_n] # multiplication bewteen NLq-tokens and selected column att = torch.matmul(self.W_att(wenc_n).unsqueeze(1), wenc_header_ob.unsqueeze(3) ).squeeze(3) # Penalty for blank part. mL_n = max(len_question) for b, l_n1 in enumerate(len_question): if l_n1 < mL_n: att[b, :, l_n1:] = -10000000000 p = self.softmax_dim2(att) # p( n| selected_col ) if show_p_wv: # p = [b, header, n] if p.shape[0] != 1: raise Exception("Batch size should be 1.") fig=figure(2001) # subplot(6,2,7) subplot2grid((7,2), (5, 1), rowspan=2) cla() _color='rgbkcm' _symbol='.......' for i_wn in range(self.mL_w): color_idx = i_wn % len(_color) plot(p[0][i_wn][:].data.numpy() - i_wn, '--'+_symbol[color_idx]+_color[color_idx], ms=7) title('wv: p_n for selected h') grid(True) fig.tight_layout() fig.canvas.draw() show() # [B, 1, mL_n, dim] * [B, 4, mL_n, 1] # --> [B, 4, mL_n, dim] # --> [B, 4, dim] c_n = torch.mul(wenc_n.unsqueeze(1), p.unsqueeze(3)).sum(dim=2) # Select observed headers only. # Also generate one_hot vector encoding info of the operator # [B, 4, dim] wenc_op = [] for b in range(bS): # [[...], [...]] # Pad list to maximum number of selections wenc_op1 = torch.zeros(self.mL_w, self.n_cond_ops) wo1 = wo[b] idx_scatter = [] l_wo1 = len(wo1) for i_wo11 in range(self.mL_w): if i_wo11 < l_wo1: wo11 = wo1[i_wo11] idx_scatter.append([int(wo11)]) else: idx_scatter.append([0]) # not used anyway wenc_op1 = wenc_op1.scatter(1, torch.tensor(idx_scatter), 1) wenc_op.append(wenc_op1) # list to [B, 4, dim] tensor. wenc_op = torch.stack(wenc_op) # list to tensor. wenc_op = wenc_op.to(device) # Now after concat, calculate logits for each token # [bS, 5-1, 3*hidden_size] = [bS, 4, 300] vec = torch.cat([self.W_c(c_n), self.W_header(wenc_header_ob), self.W_op(wenc_op)], dim=2) # Make extended vector based on encoded nl token containing column and operator information. # wenc_n = [bS, mL, 100] # vec2 = [bS, 4, mL, 400] vec1e = vec.unsqueeze(2).expand(-1,-1, mL_n, -1) # [bS, 4, 1, 300] -> [bS, 4, mL, 300] wenc_ne = wenc_n.unsqueeze(1).expand(-1, 4, -1, -1) # [bS, 1, mL, 100] -> [bS, 4, mL, 100] vec2 = torch.cat( [vec1e, wenc_ne], dim=3) # now make logits s_wv = self.wv_out(vec2) # [bS, 4, mL, 400] -> [bS, 4, mL, 2] s_wv_softmax = self.softmax_dim_1(s_wv) # penalty for spurious tokens for b, l_n1 in enumerate(len_question): if l_n1 < mL_n: s_wv[b, :, l_n1:, :] = -10000000000 for b, l_n1 in enumerate(len_question): if l_n1 < mL_n: s_wv_softmax[b, :, l_n1:, :] = 0 return s_wv,s_wv_softmax def Loss_selectwhere_startend_v2(score_select_column, s_sa, s_wn, s_wc, s_wo, s_wv, ground_truth_select_column, g_sa, g_wn, g_wc, g_wo, g_wvi): """ :param s_wv: score [ B, n_conds, T, score] :param g_wn: [ B ] :param g_wvi: [B, conds, pnt], e.g. [[[0, 6, 7, 8, 15], [0, 1, 2, 3, 4, 15]], [[0, 1, 2, 3, 16], [0, 7, 8, 9, 16]]] :return: """ loss = 0 # loss += Loss_sc(score_select_column, ground_truth_select_column) # loss += Loss_sa(s_sa, g_sa) # loss += Loss_wn(s_wn, g_wn) # loss += Loss_wc(s_wc, g_wc) # loss += Loss_wo(s_wo, g_wn, g_wo) # loss += Loss_wv_se(s_wv, g_wn, g_wvi) return loss def Loss_sw_se(score_select_column, s_sa, s_wn, s_wc, s_wo, s_wv, ground_truth_select_column, g_sa, g_wn, g_wc, g_wo, g_wvi): """ :param s_wv: score [ B, n_conds, T, score] :param g_wn: [ B ] :param g_wvi: [B, conds, pnt], e.g. [[[0, 6, 7, 8, 15], [0, 1, 2, 3, 4, 15]], [[0, 1, 2, 3, 16], [0, 7, 8, 9, 16]]] :return: """ loss = 0 loss += Loss_sc(score_select_column, ground_truth_select_column) loss += Loss_sa(s_sa, g_sa) loss += Loss_wn(s_wn, g_wn) loss += Loss_wc(s_wc, g_wc) loss += Loss_wo(s_wo, g_wn, g_wo) loss += Loss_wv_se(s_wv, g_wn, g_wvi) return loss def Loss_sc(s_sc, g_sc): loss = F.cross_entropy(s_sc, torch.tensor(g_sc).to(device)) return loss def Loss_sa(s_sa, g_sa): loss = F.cross_entropy(s_sa, torch.tensor(g_sa).to(device)) return loss def Loss_wn(s_wn, g_wn): loss = F.cross_entropy(s_wn, torch.tensor(g_wn).to(device)) return loss def Loss_wc(s_wc, g_wc): # Construct index matrix bS, max_h_len = s_wc.shape im = torch.zeros([bS, max_h_len]).to(device) for b, g_wc1 in enumerate(g_wc): for g_wc11 in g_wc1: im[b, g_wc11] = 1.0 # Construct prob. p = F.sigmoid(s_wc) loss = F.binary_cross_entropy(p, im) return loss def Loss_wo(s_wo, g_wn, g_wo): # Construct index matrix loss = 0 for b, g_wn1 in enumerate(g_wn): if g_wn1 == 0: continue g_wo1 = g_wo[b] s_wo1 = s_wo[b] loss += F.cross_entropy(s_wo1[:g_wn1], torch.tensor(g_wo1).to(device)) return loss def Loss_wv_se(s_wv, g_wn, g_wvi): """ s_wv: [bS, 4, mL, 2], 4 stands for maximum # of condition, 2 tands for start & end logits. g_wvi: [ [1, 3, 2], [4,3] ] (when B=2, wn(b=1) = 3, wn(b=2) = 2). """ loss = 0 # g_wvi = torch.tensor(g_wvi).to(device) for b, g_wvi1 in enumerate(g_wvi): # for i_wn, g_wvi11 in enumerate(g_wvi1): g_wn1 = len(g_wvi1) # 有改动 # g_wn1 = g_wn[b] # 有改动 if g_wn1 == 0: continue g_wvi1 = torch.tensor(g_wvi1)[:g_wn1].to(device) # 有改动 g_st1 = g_wvi1[:,0] g_ed1 = g_wvi1[:,1] # loss from the start position loss += F.cross_entropy(s_wv[b,:g_wn1,:,0], g_st1) # print("st_login: ", s_wv[b,:g_wn1,:,0], g_st1, loss) # loss from the end position loss += F.cross_entropy(s_wv[b,:g_wn1,:,1], g_ed1) # print("ed_login: ", s_wv[b,:g_wn1,:,1], g_ed1, loss) return loss # ========= Decoder-Layer =========== class FT_s2s_1(nn.Module): """ Decoder-Layer """ def __init__(self, input_size, hidden_size, num_layer, dropout, max_seq_length, n_cond_ops, n_agg_ops, old=False): super(FT_s2s_1, self).__init__() self.input_size = input_size # input_size self.hidden_size = hidden_size # hidden_size self.ls = num_layer self.dropout = dropout self.n_cond_ops = n_cond_ops self.n_agg_ops = n_agg_ops self.n_where_num = 4 self.decoder_s2s = Decoder_s2s(input_size, hidden_size, num_layer, dropout, max_seq_length) def forward(self, wenc_s2s, l_input, cls_vec, pnt_start_tok, g_pnt_idxs=None): score = self.decoder_s2s(wenc_s2s, l_input, cls_vec, pnt_start_tok, g_pnt_idxs) return score def EG_forward(self, wenc_s2s, l_input, cls_vec, pnt_start_tok, pnt_end_tok, i_sql_vocab, i_nlu, i_hds, # for EG tokens, nlu, nlu_t, hds, tt_to_t_idx, # for EG tb, engine, beam_size=4, beam_only=True): """ EG-guided beam-search """ score = self.decoder_s2s.EG_forward(wenc_s2s, l_input, cls_vec, pnt_start_tok, pnt_end_tok, i_sql_vocab, i_nlu, i_hds, # for EG tokens, nlu, nlu_t, hds, tt_to_t_idx, # for EG tb, engine, beam_size, beam_only) return score class Decoder_s2s(nn.Module): def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3, max_seq_length=222, n_cond_ops=3): super(Decoder_s2s, self).__init__() self.input_size = input_size self.hidden_size = hidden_size self.num_layer = num_layer self.dropout = dropout self.mL = max_seq_length self.Tmax = 200 self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2), num_layers=num_layer, batch_first=True, dropout=dropout, bidirectional=True) self.decode_pn = nn.LSTM(input_size=max_seq_length, hidden_size=hidden_size, num_layers=num_layer, batch_first=True, dropout=dropout) self.W_s2s = nn.Linear(input_size, hidden_size) self.W_pnt = nn.Linear(hidden_size, hidden_size) self.wv_out = nn.Sequential(nn.Tanh(), nn.Linear(hidden_size, 1)) def forward(self, wenc_s2s, l_input, cls_vec, pnt_start_tok, g_pnt_idxs=None,): # Encode bS, mL_input, input_size = wenc_s2s.shape # Now, pointer network. ipnt = wenc_s2s.new_zeros(bS, 1, mL_input).to(device) # [B, 1, 200] ipnt[:, 0, pnt_start_tok] = 1 # 27 is of start token under current tokenization scheme # initial (current) pointer cpnt = ipnt # reshape wenc_s2s to incorporate T later wenc_s2s = wenc_s2s.unsqueeze(1) # h_0 and c_0 from cls_vec # They are not bidirectional. h_0 = torch.zeros([self.num_layer, bS, self.hidden_size]).to(device) c_0 = torch.zeros([self.num_layer, bS, self.hidden_size]).to(device) for i_layer in range(self.num_layer): h_st = (2*i_layer)*self.hidden_size h_ed = h_st + self.hidden_size c_st = (2*i_layer+1)*self.hidden_size c_ed = c_st + self.hidden_size h_0[i_layer] = cls_vec[:, h_st:h_ed] # [ # of layers, batch, dim] c_0[i_layer] = cls_vec[:, c_st:c_ed] # [ # of layers, batch, dim] if g_pnt_idxs: pnt_n = torch.zeros(bS, self.Tmax, mL_input).to(device) # one hot # assign index for b, g_pnt_idxs1 in enumerate(g_pnt_idxs): for t, g_pnt_idx in enumerate(g_pnt_idxs1): pnt_n[b, t, g_pnt_idx] = 1 # Encode dec_pn, _ = self.decode_pn(pnt_n, (h_0, c_0)) dec_pn = dec_pn.contiguous() # [bS, T, input_size] dec_pn = dec_pn.unsqueeze(2) # Calculate score s_wv = self.wv_out( self.W_s2s(wenc_s2s) + self.W_pnt(dec_pn) ).squeeze(3) # [B, T, mL_input, dim] -> [B, T, mL_input, 1] -> [B, T, mL_input] # s_wv = [B, 4, T, mL_n] = [batch, conds, token idx, score] # penalty for b, l_input1 in enumerate(l_input): if l_input1 < mL_input: s_wv[b, :, l_input1:] = -10000000000 else: t = 0 s_wv_list = [] cpnt_h = (h_0, c_0) while t < self.Tmax: dec_pn, cpnt_h = self.decode_pn(cpnt, cpnt_h) # lstm # [B, 1, 100] -> [B, 1, 1, 100] dec_pn = dec_pn.unsqueeze(2) # [bS, T, input_size] # get score s_wv1 = self.wv_out( self.W_s2s(wenc_s2s) # [B, 1, mL_input, dim] + self.W_pnt(dec_pn) # [B, T=1, 1, dim] Now, T=1 ).squeeze(3) # s_wv = [B, 4, 1, mL_n, 1] = [batch, conds, token idx, score] # -> [B, 4, mL_n] # Masking -- for b, l_input1 in enumerate(l_input): if l_input1 < mL_input: s_wv1[b, :, l_input1:] = -10000000000 # Collect score-- s_wv_list.append(s_wv1) # [B, 1, mL_input] -> [B, mL_n] -> [bS*(5-1)] # (max_val, max_indices) _val, pnt_n = s_wv1.view(bS, -1).max(dim=1) # formatting pnt_n as a one-hot input. cpnt = torch.zeros(bS, mL_input).to(device) # cpnt = cpnt.scatter_(dim=1, index=pnt_n.unsqueeze(1), src=1).to(device) cpnt = cpnt.scatter_(1, pnt_n.unsqueeze(1), 1) cpnt = cpnt.unsqueeze(1) # --> [B * 4, 1, 200] t += 1 s_wv = torch.stack(s_wv_list, 1) # [B, s_wv = s_wv.squeeze(2) # # # Following lines seems to be unnecessary. # # Penalty to blank parts # for b, l_input1 in enumerate(l_input): # if l_input1 < mL_input: # s_wv[b, :, l_input1:] = -10000000000 return s_wv def EG_forward(self, wenc_s2s, l_input, cls_vec, pnt_start_tok, pnt_end_tok, i_sql_vocab, i_nlu, i_hds, # for EG tokens, nlu, nlu_t, hds, tt_to_t_idx, # for EG tb, engine, beam_size, beam_only=True): # Encode bS, mL_input, input_size = wenc_s2s.shape # reshape wenc_s2s to incorperate T later wenc_s2s = wenc_s2s.unsqueeze(1) # h_0 and c_0 from cls_vec # They are not bidirectional. h_0 = torch.zeros([self.num_layer, bS, self.hidden_size]).to(device) c_0 = torch.zeros([self.num_layer, bS, self.hidden_size]).to(device) for i_layer in range(self.num_layer): h_st = (2*i_layer)*self.hidden_size h_ed = h_st + self.hidden_size c_st = (2*i_layer+1)*self.hidden_size c_ed = c_st + self.hidden_size h_0[i_layer] = cls_vec[:, h_st:h_ed] # [ # of layers, batch, dim] c_0[i_layer] = cls_vec[:, c_st:c_ed] # [ # of layers, batch, dim] # initial (current) pointer pnt_list_beam = [] cpnt_beam = [] cpnt_h_beam = [] for i_beam in range(beam_size): pnt_list_beam1 = [] for b in range(bS): pnt_list_beam1.append( [ [pnt_start_tok], 0] ) pnt_list_beam.append(pnt_list_beam1) # initisl cpnt # Now, initialize pointer network. ipnt = wenc_s2s.new_zeros(bS, 1, mL_input).to(device) # [B, 1, 200] # Distort ipnt by i_bam on purpose to avoid initial duplication of beam-search ipnt[:, 0, pnt_start_tok] = 1 # 27 is of start token under current tokenization scheme cpnt_beam.append(ipnt) cpnt_h_beam.append( (h_0, c_0) ) t = 0 while t < self.Tmax: # s_wv1_beam = [] candidates = [ [] for b in range(bS) ] # [bS] # Generate beam for i_beam, cpnt in enumerate(cpnt_beam): cpnt_h = cpnt_h_beam[i_beam] pnt_list_beam1 = pnt_list_beam[i_beam] dec_pn, cpnt_h = self.decode_pn(cpnt, cpnt_h) # lstm cpnt_h_beam[i_beam] = cpnt_h # [B, 1, 100] -> [B, 1, 1, 100] dec_pn = dec_pn.unsqueeze(2) # [bS, T, input_size] # get score s_wv1 = self.wv_out( self.W_s2s(wenc_s2s) # [B, 1, mL_input, dim] + self.W_pnt(dec_pn) # [B, T=1, 1, dim] Now, T=1 ).squeeze(3) # s_wv = [B, 4, 1, mL_n, 1] = [batch, conds, token idx, score] # -> [B, 4, mL_n] # Masking -- for b, l_input1 in enumerate(l_input): if l_input1 < mL_input: s_wv1[b, :, l_input1:] = -10000000000 # Get the candidates only among the input space. prob, idxs = F.softmax(s_wv1.view(bS, -1), dim=1).topk(dim=1, k=max(l_input)) log_prob = torch.log(prob) # [bS, beam_size] for b, log_prob1 in enumerate(log_prob): pnt_list11, score = pnt_list_beam1[b] for i_can, log_prob11 in enumerate(log_prob1): # no update if last token was the end-token previous_pnt = pnt_list11[-1] if previous_pnt== pnt_end_tok: new_seq = pnt_list11 new_score = score else: new_seq = pnt_list11 + [idxs[b][i_can].item()] new_score = score + log_prob11.item() _candidate = [new_seq, new_score] candidates[b].append(_candidate) # Execution-guided beam filtering for b, candidates1 in enumerate(candidates): new_pnt_list_batch1 = sorted(candidates1, key=lambda list1: list1[-1], reverse=True) count = 0 selected_candidates1 = [] for new_pnt_list_batch11 in new_pnt_list_batch1: if new_pnt_list_batch11 not in selected_candidates1: if beam_only: selected_candidates1.append(new_pnt_list_batch11) pnt_list_beam[count][b] = new_pnt_list_batch11 count +=1 else: # Need to be modified here. executable = False testable = False pr_i_vg_list, pr_i_vg_sub_list = gen_i_vg_from_pnt_idxs([new_pnt_list_batch11[0]], [i_sql_vocab[b]], [i_nlu[b]], [i_hds[b]]) pr_sql_q_s2s, pr_sql_i = gen_sql_q_from_i_vg([tokens[b]], [nlu[b]], [nlu_t[b]], [hds[b]], [tt_to_t_idx[b]], pnt_start_tok, pnt_end_tok, [new_pnt_list_batch11[0]], pr_i_vg_list, pr_i_vg_sub_list) # check testability from select-clause try: # check whether basic elements presents in pr_sql_i # If so, it is testable. idx_agg = pr_sql_i[0]["agg"] idx_sel = pr_sql_i[0]["sel"] testable = True except: testable = False pass # check the presence of conds if testable: try: conds = pr_sql_i[0]["conds"] except: conds = [] try: pr_ans1 = engine.execute(tb[b]['id'], idx_sel, idx_agg, conds) executable = bool(pr_ans1) except: executable = False # if testable: if executable: add_candidate = True else: add_candidate = False else: add_candidate = True if add_candidate: selected_candidates1.append(new_pnt_list_batch11) pnt_list_beam[count][b] = new_pnt_list_batch11 count += 1 if count == beam_size: break if count < beam_size: # not executable at all.. # add junk sequence. for i_junk in range(count, beam_size): pnt_list_beam[i_junk][b] = [[pnt_end_tok],-9999999] # generate cpnt # formatting pnt_n as a one-hot input. for i_beam in range(beam_size): cpnt = torch.zeros(bS, mL_input).to(device) # cpnt = cpnt.scatter_(dim=1, index=pnt_n.unsqueeze(1), src=1).to(device) idx_batch = [seq_score[0][-1] for seq_score in pnt_list_beam[i_beam]] pnt_n = torch.tensor(idx_batch).to(device) cpnt = cpnt.scatter_(1, pnt_n.unsqueeze(1), 1) cpnt = cpnt.unsqueeze(1) # --> [B, t=1, mL_input] cpnt_beam[i_beam] = cpnt t += 1 # Generate best pr_pnt_list, p_tot pr_pnt_idxs = [] p_list = [] for b in range(bS): pnt_list_beam_best = pnt_list_beam[0] pr_pnt_idxs.append(pnt_list_beam_best[b][0]) p_list.append( pnt_list_beam_best[b][1]) return pr_pnt_idxs, p_list, pnt_list_beam # ============= Shallow-Layer =============== class FT_Scalar_1(nn.Module): """ Shallow-Layer """ def __init__(self, input_size, hidden_size, num_layer, dropout, n_cond_ops, n_agg_ops, old=False): super(FT_Scalar_1, self).__init__() self.input_size = input_size # input_size self.hidden_size = hidden_size self.num_layer = num_layer self.dropout = dropout self.n_cond_ops = n_cond_ops self.n_agg_ops = n_agg_ops self.n_where_num = 4 def scp(self, wemb_h, l_header): bS, max_header_len, _ = wemb_h.shape # s_sc s_sc = torch.zeros(bS, max_header_len).to(device) s_sc[:, :] = wemb_h[:, :, 0] # s_sc = [B, max_header length, 1] # s_sc[:,:] = F.tanh(wemb_h[:,:,0]) # s_sc = [B, max_header length, 1] # s_sc = s_sc.squeeze(2) # masking # print(f"s_sc {s_sc}") for b, l_header1 in enumerate(l_header): s_sc[b, l_header1:] = -9999999999.0 return s_sc def sap(self, wemb_h, pr_sc, idx_st, idx_ed): bS, max_header_len, _ = wemb_h.shape # select of aggregation operator s_sa = torch.zeros([bS, self.n_agg_ops]).to(device) for b, pr_sc1 in enumerate(pr_sc): s_sa[b,:] = wemb_h[b,pr_sc1,idx_st:idx_ed] return s_sa def wnp(self, cls_vec): bS = cls_vec.shape[0] # [B,hidden_size] -> [B, n_where_num+1] s_wn = torch.zeros(bS, (self.n_where_num + 1)).to(device) s_wn[:, :] = cls_vec[:, 0:(self.n_where_num + 1)] return s_wn def wcp(self, wemb_h, l_header, idx_st, idx_ed): bS, max_header_len, _ = wemb_h.shape s_wc = torch.zeros(bS, max_header_len, 1).to(device) s_wc[:, :, :] = wemb_h[:, :, idx_st:idx_ed] s_wc = s_wc.squeeze(2) # [B, max_header_length] # masking for b, l_header1 in enumerate(l_header): s_wc[b, l_header1:] = -99999999999.0 return s_wc def wop(self, wemb_h, pr_wc, idx_st, idx_ed): bS, max_header_len, _ = wemb_h.shape s_wo = torch.zeros([bS, self.n_where_num, self.n_cond_ops]).to(device) for b, pr_wc1 in enumerate(pr_wc): if len(pr_wc1) > 0: s_wo[b, 0:len(pr_wc1), :] = wemb_h[b, pr_wc1, idx_st:idx_ed] else: pass return s_wo def wvp(self, emb_question, len_question, pr_wc): bS, _, _ = emb_question.shape s_wv = torch.zeros([bS, self.n_where_num, max(len_question), 2]).to(device) for b, pr_wc1 in enumerate(pr_wc): if len(pr_wc1) > 0: # start logit s_wv[b, 0:len(pr_wc1), :, 0] = emb_question[b, :, pr_wc1].transpose(0, 1) # end logit s_wv[b, 0:len(pr_wc1), :, 1] = emb_question[b, :, [pr_wc11 + 100 for pr_wc11 in pr_wc1]].transpose(0, 1) else: pass # masking # penalty for spurious tokens for b, l_n1 in enumerate(len_question): if l_n1 < max(len_question): s_wv[b, :, l_n1:, :] = -1e+11 return s_wv def forward(self, emb_question, len_question, wemb_h, l_header, cls_vec, g_sc=None, g_sa=None, g_wn=None, g_wc=None, g_wo=None, g_wvi=None, show_p_sc=False, show_p_sa=False, show_p_wn=False, show_p_wc=False, show_p_wo=False, show_p_wv=False): # emb_question = [B, max_nlu_token_length, hidden_size] # here, # of target_layer is fixed to 1. # wemb_h = [B, max_header #, hidden_size] s_sc = self.scp(wemb_h, l_header) if g_sc: pr_sc = g_sc else: pr_sc = pred_sc(s_sc) # s_sa idx_st = 1 idx_ed = 1 + self.n_agg_ops s_sa = self.sap(wemb_h, pr_sc, idx_st, idx_ed) if g_sa: pr_sa = g_sa else: pr_sa = pred_sa(s_sa) # where_number s_wn = self.wnp(cls_vec) if g_wn: pr_wn = g_wn else: pr_wn = pred_wn(s_wn) # wc idx_st = idx_ed+1 idx_ed = idx_st+1 s_wc = self.wcp(wemb_h, l_header, idx_st, idx_ed) if g_wc: pr_wc = g_wc else: pr_wc = pred_wherecolumn(pr_wn, s_wc) # wo idx_st = idx_ed+1 idx_ed = idx_st + self.n_cond_ops s_wo = self.wop(wemb_h, pr_wc, idx_st, idx_ed) if g_wo: pr_wo = g_wo else: pr_wo = pred_wo(pr_wn, s_wo) # wv # s_wv = [bS, 4, mL, 2] s_wv = self.wvp(emb_question, len_question, pr_wc) # print(s_wv) # s_wv = F.tanh(s_wv) return s_sc, s_sa, s_wn, s_wc, s_wo, s_wv def forward_EG(self, emb_question, len_question, wemb_h, l_header, cls_vec, engine, tb, nlu_t, nlu_tt, tt_to_t_idx, nlu, beam_size=4): """ Execution-guided beam decoding. Essentially identical with that of NL2SQL Layer. """ # Select-clause prob_sca, pr_sc_best, pr_sa_best, \ p_sc_best, p_sa_best, p_select \ = self.EG_decoding_select(wemb_h, l_header, tb, beam_size=beam_size) # Where-clause prob_w, prob_wn_w, pr_wn_based_on_prob, pr_sql_i, pr_wvi_best, \ p_where, p_wn_best, p_wc_best, p_wo_best, p_wvi_best \ = self.EG_decoding_where(emb_question, len_question, wemb_h, l_header, cls_vec, engine, tb, nlu_t, nlu_tt, tt_to_t_idx, nlu, pr_sc_best, pr_sa_best, beam_size=4) p_tot = cal_prob_tot(p_select, p_where) return pr_sc_best, pr_sa_best, pr_wn_based_on_prob, pr_wvi_best, \ pr_sql_i, p_tot, p_select, p_where, p_sc_best, p_sa_best, \ p_wn_best, p_wc_best, p_wo_best, p_wvi_best def EG_decoding_select(self, wemb_h, l_header, tb, beam_size=4, show_p_sc=False, show_p_sa=False): # sc s_sc = self.scp(wemb_h, l_header) prob_sc = F.softmax(s_sc, dim=-1) bS, mcL = s_sc.shape # minimum_header_length = min(l_header) # beam_size = minimum_header_length if beam_size > minimum_header_length else beam_size # sa # Construct all possible sc_sa_score prob_sc_sa = torch.zeros([bS, beam_size, self.n_agg_ops]).to(device) score_sc_sa = torch.zeros([bS, beam_size, self.n_agg_ops]).to(device) prob_sca = torch.zeros_like(prob_sc_sa).to(device) # get the top-k indices. pr_sc_beam = [B, beam_size] pr_sc_beam = pred_sc_beam(s_sc, beam_size) # calculate and predict s_sa. idx_st = 1 idx_ed = 1 + self.n_agg_ops for i_beam in range(beam_size): pr_sc = list(array(pr_sc_beam)[:, i_beam]) s_sa = self.sap(wemb_h, pr_sc, idx_st, idx_ed) prob_sa = F.softmax(s_sa, dim=-1) prob_sc_sa[:, i_beam, :] = prob_sa score_sc_sa[:, i_beam, :] = s_sa prob_sc_selected = prob_sc[range(bS), pr_sc] # [B] prob_sca[:, i_beam, :] = (prob_sa.t() * prob_sc_selected).t() # [mcL, B] * [B] -> [mcL, B] (element-wise multiplication) # [mcL, B] -> [B, mcL] # Calculate the dimension of tensor # tot_dim = len(prob_sca.shape) idxs = topk_multi_dim(torch.tensor(prob_sca), n_topk=beam_size, batch_exist=True) # Now as sc_idx is already sorted, re-map them properly. idxs = remap_sc_idx(idxs, pr_sc_beam) # [sc_beam_idx, sa_idx] -> [sc_idx, sa_idx] idxs_arr = array(idxs) # [B, beam_size, remainig dim] # idxs[b][0] gives first probable [sc_idx, sa_idx] pairs. # idxs[b][1] gives of second. # Calculate prob_sca, a joint probability beam_idx_sca = [0] * bS beam_meet_the_final = [False] * bS while True: pr_sc = idxs_arr[range(bS), beam_idx_sca, 0] pr_sa = idxs_arr[range(bS), beam_idx_sca, 1] # map index properly check = check_sc_sa_pairs(tb, pr_sc, pr_sa) if sum(check) == bS: break else: for b, check1 in enumerate(check): if not check1: # wrong pair beam_idx_sca[b] += 1 if beam_idx_sca[b] >= beam_size: beam_meet_the_final[b] = True beam_idx_sca[b] -= 1 else: beam_meet_the_final[b] = True if sum(beam_meet_the_final) == bS: break # Now pr_sc, pr_sa are properly predicted. pr_sc_best = list(pr_sc) pr_sa_best = list(pr_sa) # output for later analysis. p_sc_best = cal_prob_sc(s_sc, pr_sc_best) p_sa_best = cal_prob_sa(score_sc_sa[range(bS), beam_idx_sca, :].squeeze(1), pr_sa_best) p_select = cal_prob_select(p_sc_best, p_sa_best) # p_select = prob_sca[range(bS),beam_idx_sca,pr_sa_best].detach().to('cpu').numpy() return prob_sca, pr_sc_best, pr_sa_best, p_sc_best, p_sa_best, p_select def EG_decoding_where(self, emb_question, len_question, wemb_h, l_header, cls_vec, engine, tb, nlu_t, nlu_wp_t, tt_to_t_idx, nlu, pr_sc_best, pr_sa_best, beam_size=4, show_p_wn=False, show_p_wc=False, show_p_wo=False, show_p_wv=False): bS, max_header_len, _ = wemb_h.shape # Now, Where-clause beam search. idx_st = 1 idx_ed = 1 + self.n_agg_ops s_wn = self.wnp(cls_vec) prob_wn = F.softmax(s_wn, dim=-1).detach().to('cpu').numpy() # Found "executable" most likely 4(=max_num_of_conditions) where-clauses. # wc idx_st = idx_ed + 1 idx_ed = idx_st + 1 s_wc = self.wcp(wemb_h, l_header, idx_st, idx_ed) prob_wc = torch.sigmoid(s_wc).detach().to('cpu').numpy() # pr_wc_sorted_by_prob = pred_wc_sorted_by_prob(s_wc) # get max_wn # of most probable columns & their prob. pr_wn_max = [self.n_where_num] * bS pr_wc_max = pred_wherecolumn(pr_wn_max, s_wc) # if some column do not have executable where-claouse, omit that column prob_wc_max = zeros([bS, self.n_where_num]) for b, pr_wc_max1 in enumerate(pr_wc_max): prob_wc_max[b, :] = prob_wc[b, pr_wc_max1] # get most probable n_where_num where-clouses # wo idx_st = idx_ed + 1 idx_ed = idx_st + self.n_cond_ops s_wo_max = self.wop(wemb_h, pr_wc_max, idx_st, idx_ed) prob_wo_max = F.softmax(s_wo_max, dim=-1).detach().to('cpu').numpy() # [B, n_where_num, n_cond_op] pr_wvi_beam_op_list = [] prob_wvi_beam_op_list = [] prob_wvi_beam_st_op_list = [] prob_wvi_beam_ed_op_list = [] # To re-use code, repeat the calculation unnecessarily. for i_op in range(self.n_cond_ops - 1): pr_wo_temp = [[i_op] * self.n_where_num] * bS # wv s_wv = self.wvp(emb_question, len_question, pr_wc_max) prob_wv = F.softmax(s_wv, dim=-2).detach().to('cpu').numpy() # prob_wv pr_wvi_beam, prob_wvi_beam, prob_wvi_beam_st, prob_wvi_beam_ed = pred_wvi_se_beam(self.n_where_num, s_wv, beam_size) pr_wvi_beam_op_list.append(pr_wvi_beam) prob_wvi_beam_op_list.append(prob_wvi_beam) prob_wvi_beam_st_op_list.append(prob_wvi_beam_st) prob_wvi_beam_ed_op_list.append(prob_wvi_beam_ed) # pr_wvi_beam = [B, n_where_num, k_logit**2 [st, ed] paris] # pred_wv_beam # Calculate joint probability of where-clause # prob_w = [batch, wc, wo, wv] = [B, n_where_num, n_cond_op, n_pairs] n_wv_beam_pairs = prob_wvi_beam.shape[2] prob_w = zeros([bS, self.n_where_num, self.n_cond_ops - 1, n_wv_beam_pairs]) prob_wc_dupl = zeros([bS, self.n_where_num, self.n_cond_ops - 1, n_wv_beam_pairs]) prob_wo_dupl = zeros([bS, self.n_where_num, self.n_cond_ops - 1, n_wv_beam_pairs]) prob_wvi_st_dupl = zeros([bS, self.n_where_num, self.n_cond_ops - 1, n_wv_beam_pairs]) prob_wvi_ed_dupl = zeros([bS, self.n_where_num, self.n_cond_ops - 1, n_wv_beam_pairs]) for b in range(bS): for i_wn in range(self.n_where_num): for i_op in range(self.n_cond_ops - 1): # do not use final one p_wc = prob_wc_max[b, i_wn] for i_wv_beam in range(n_wv_beam_pairs): # i_wc = pr_wc_max[b][i_wn] # already done p_wo = prob_wo_max[b, i_wn, i_op] p_wv = prob_wvi_beam_op_list[i_op][b, i_wn, i_wv_beam] prob_w[b, i_wn, i_op, i_wv_beam] = p_wc * p_wo * p_wv prob_wc_dupl[b, i_wn, i_op, i_wv_beam] = p_wc prob_wo_dupl[b, i_wn, i_op, i_wv_beam] = p_wo p_wv_st = prob_wvi_beam_st_op_list[i_op][b, i_wn, i_wv_beam] p_wv_ed = prob_wvi_beam_ed_op_list[i_op][b, i_wn, i_wv_beam] prob_wvi_st_dupl[b, i_wn, i_op, i_wv_beam] = p_wv_st prob_wvi_ed_dupl[b, i_wn, i_op, i_wv_beam] = p_wv_ed # Perform execution guided decoding conds_max = [] prob_conds_max = [] # while len(conds_max) < self.n_where_num: idxs = topk_multi_dim(torch.tensor(prob_w), n_topk=beam_size, batch_exist=True) # idxs = [B, i_wc_beam, i_op, i_wv_pairs] # Construct conds1. Collect only executable one. It is descending order of the probability. pr_wvi_max = [] p_wc_max = [] p_wo_max = [] p_wvi_max = [] for b, idxs1 in enumerate(idxs): conds_max1 = [] prob_conds_max1 = [] pr_wvi1_max = [] p_wc1_max = [] p_wo1_max = [] p_wvi1_max = [] for i_wn, idxs11 in enumerate(idxs1): i_wc = pr_wc_max[b][idxs11[0]] i_op = idxs11[1] wvi = pr_wvi_beam_op_list[i_op][b][idxs11[0]][idxs11[2]] # idx11[0] # get wv_str temp_pr_wv_str, _ = convert_pred_wvi_to_string([[wvi]], [nlu_t[b]], [nlu_wp_t[b]], [tt_to_t_idx[b]], [nlu[b]]) merged_wv11 = merge_wv_t1_eng(temp_pr_wv_str[0][0], nlu[b]) conds11 = [i_wc, i_op, merged_wv11] prob_conds11 = prob_w[b, idxs11[0], idxs11[1], idxs11[2]] p_wc11_max = prob_wc_dupl[b, idxs11[0], idxs11[1], idxs11[2]] p_wo11_max = prob_wo_dupl[b, idxs11[0], idxs11[1], idxs11[2]] p_wvi11_max = [ prob_wvi_st_dupl[b, idxs11[0], idxs11[1], idxs11[2]], prob_wvi_ed_dupl[b, idxs11[0], idxs11[1], idxs11[2]] ] # test execution # print(nlu[b]) # print(tb[b]['id'], tb[b]['types'], pr_sc[b], pr_sa[b], [conds11]) pr_ans = engine.execute(tb[b]['id'], pr_sc_best[b], pr_sa_best[b], [conds11]) if bool(pr_ans): # pr_ans is not empty! conds_max1.append(conds11) prob_conds_max1.append(prob_conds11) pr_wvi1_max.append(wvi) p_wc1_max.append(p_wc11_max) p_wo1_max.append(p_wo11_max) p_wvi1_max.append(p_wvi11_max) conds_max.append(conds_max1) prob_conds_max.append(prob_conds_max1) pr_wvi_max.append(pr_wvi1_max) p_wc_max.append(p_wc1_max) p_wo_max.append(p_wo1_max) p_wvi_max.append(p_wvi1_max) # May need to do more exhuastive search? # i.e. up to.. getting all executable cases. # Calculate total probability to decide the number of where-clauses pr_sql_i = [] prob_wn_w = [] # total where-clause probability pr_wn_based_on_prob = [] pr_wvi_best = [] p_wc = [] p_wo = [] p_wvi = [] for b, prob_wn1 in enumerate(prob_wn): max_executable_wn1 = len(conds_max[b]) prob_wn_w1 = [] prob_wn_w1.append(prob_wn1[0]) # wn=0 case. for i_wn in range(max_executable_wn1): prob_wn_w11 = prob_wn1[i_wn + 1] * prob_conds_max[b][i_wn] prob_wn_w1.append(prob_wn_w11) pr_wn_based_on_prob.append(argmax(prob_wn_w1)) prob_wn_w.append(prob_wn_w1) pr_sql_i1 = {'agg': pr_sa_best[b], 'sel': pr_sc_best[b], 'conds': conds_max[b][:pr_wn_based_on_prob[b]]} pr_wvi_best1 = pr_wvi_max[b][:pr_wn_based_on_prob[b]] pr_sql_i.append(pr_sql_i1) pr_wvi_best.append(pr_wvi_best1) p_wc.append( p_wc_max[b][:pr_wn_based_on_prob[b]] ) p_wo.append( p_wo_max[b][:pr_wn_based_on_prob[b]] ) p_wvi.append( p_wvi_max[b][:pr_wn_based_on_prob[b]] ) # s_wv = [B, n_where_num, max_nlu_tokens, 2] p_wn = cal_prob_wn(s_wn, pr_wn_based_on_prob) p_where = cal_prob_where(p_wn, p_wc, p_wo, p_wvi) return prob_w, prob_wn_w, pr_wn_based_on_prob, pr_sql_i, pr_wvi_best, \ p_where, p_wn, p_wc, p_wo, p_wvi def Loss_s2s(score, g_pnt_idxs): """ score = [B, T, max_seq_length] """ # WHERE string part loss = 0 for b, g_pnt_idxs1 in enumerate(g_pnt_idxs): ed = len(g_pnt_idxs1) - 1 score_part = score[b, :ed] loss += F.cross_entropy(score_part, torch.tensor(g_pnt_idxs1[1:]).to(device)) # +1 shift. return loss
39.419083
161
0.555163
70,692
0.944979
0
0
0
0
0
0
13,762
0.183964
d9f2ed71da13f5b57b61c1c386731f8180c40992
667
py
Python
www/app.py
leeeGreat/xlw_study_python
03d8eb59f6826b4689d6598ede6393ecbb5058fb
[ "MIT" ]
1
2018-03-12T12:29:21.000Z
2018-03-12T12:29:21.000Z
www/app.py
leeeGreat/xlw_study_python
03d8eb59f6826b4689d6598ede6393ecbb5058fb
[ "MIT" ]
null
null
null
www/app.py
leeeGreat/xlw_study_python
03d8eb59f6826b4689d6598ede6393ecbb5058fb
[ "MIT" ]
1
2018-04-13T13:26:50.000Z
2018-04-13T13:26:50.000Z
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'Michael Liao' ''' async web application. ''' import logging; logging.basicConfig(level=logging.INFO) import asyncio, os, json, time from datetime import datetime from aiohttp import web def index(request): return web.Response(body=b'<h1>Awesome</h1>') async def init(loop): app = web.Application(loop=loop) app.router.add_route('GET', '/', index) srv = await loop.create_server(app.make_handler(), '127.0.0.1', 9000) logging.info('server started at http://127.0.0.1:9000...') return srv loop = asyncio.get_event_loop() loop.run_until_complete(init(loop)) loop.run_forever()
22.233333
73
0.698651
0
0
0
0
0
0
254
0.38081
171
0.256372
d9f306cc03073671d285f885169a3fe6dd743eef
684
py
Python
examples/Testing/flopy3_plotdata.py
ritchie46/flopy
8e7284dcb3aaf5c12293d442248c2c2d9959f835
[ "CC0-1.0", "BSD-3-Clause" ]
1
2021-03-17T09:15:54.000Z
2021-03-17T09:15:54.000Z
examples/Testing/flopy3_plotdata.py
ritchie46/flopy
8e7284dcb3aaf5c12293d442248c2c2d9959f835
[ "CC0-1.0", "BSD-3-Clause" ]
null
null
null
examples/Testing/flopy3_plotdata.py
ritchie46/flopy
8e7284dcb3aaf5c12293d442248c2c2d9959f835
[ "CC0-1.0", "BSD-3-Clause" ]
1
2021-08-05T19:11:27.000Z
2021-08-05T19:11:27.000Z
from __future__ import print_function import os import numpy as np import matplotlib.pyplot as plt import flopy fb = flopy.modflow.Modflow.load('freyberg', version='mf2005', model_ws=os.path.join('..', 'data', 'freyberg'), verbose=True) dis = fb.dis top = fb.dis.top fb.dis.top.plot(grid=True, colorbar=True) fb.dis.botm.plot(grid=True, colorbar=True) fb.dis.plot() plt.show() fb.dis.plot() plt.show() fig = plt.figure(figsize=(8, 8)) ax = fig.add_subplot(1,2,1, aspect='equal') fb.dis.top.plot(grid=True, axes=ax, colorbar=True) ax = fig.add_subplot(1,2,2, aspect='equal') fb.dis.botm.plot(grid=True, axes=ax, colorbar=True) plt.show() print('this is the end my friend')
20.727273
124
0.71345
0
0
0
0
0
0
0
0
79
0.115497
d9f32d2b9e677d6893c7269bf23bcedaa4e7f68a
363
py
Python
chia/components/sample_transformers/__init__.py
cabrust/chia
3eaf815b261dc8a85d64fd698e0079515ec0dde9
[ "BSD-3-Clause" ]
null
null
null
chia/components/sample_transformers/__init__.py
cabrust/chia
3eaf815b261dc8a85d64fd698e0079515ec0dde9
[ "BSD-3-Clause" ]
2
2021-10-06T13:19:09.000Z
2021-10-20T17:32:36.000Z
chia/components/sample_transformers/__init__.py
cabrust/chia
3eaf815b261dc8a85d64fd698e0079515ec0dde9
[ "BSD-3-Clause" ]
null
null
null
from chia import components from chia.components.sample_transformers import identity from chia.components.sample_transformers.sample_transformer import SampleTransformer class SampleTransformerFactory(components.Factory): name_to_class_mapping = {"identity": identity.IdentitySampleTransformer} __all__ = ["SampleTransformer", "SampleTransformerFactory"]
33
84
0.85124
128
0.352617
0
0
0
0
0
0
55
0.151515
d9f3cb72d610ec30e4ecf05d60ba2025dc849112
416
py
Python
3/3.6/add_guest.py
singi2016cn/python-scaffold
274e508d1919da67e599aa73be139800c043bce4
[ "MIT" ]
null
null
null
3/3.6/add_guest.py
singi2016cn/python-scaffold
274e508d1919da67e599aa73be139800c043bce4
[ "MIT" ]
null
null
null
3/3.6/add_guest.py
singi2016cn/python-scaffold
274e508d1919da67e599aa73be139800c043bce4
[ "MIT" ]
null
null
null
# 添加嘉宾 names = [] names.append('singi') names.append('lily') names.append('sam') print('I find a big dining-table,I can invite more friends.') names.insert(0, 'xiaoling') names.insert(2, 'fangsi') names.append('zhangqing') greets = ',would you like to have dinner with me ?' print(names[0]+greets) print(names[1]+greets) print(names[2]+greets) print(names[3]+greets) print(names[4]+greets) print(names[5]+greets)
20.8
61
0.711538
0
0
0
0
0
0
0
0
157
0.370283
d9f53b3bd4af7f2d655423b3e5a97d903f5c6dac
2,025
py
Python
apps/pypi/tests/test_slurper.py
cartwheelweb/packaginator
f6ce11da22154bce9cba42e896989bdb0fd5e865
[ "MIT" ]
1
2015-11-08T11:31:09.000Z
2015-11-08T11:31:09.000Z
apps/pypi/tests/test_slurper.py
cartwheelweb/packaginator
f6ce11da22154bce9cba42e896989bdb0fd5e865
[ "MIT" ]
null
null
null
apps/pypi/tests/test_slurper.py
cartwheelweb/packaginator
f6ce11da22154bce9cba42e896989bdb0fd5e865
[ "MIT" ]
null
null
null
from django.template.defaultfilters import slugify from django.test import TestCase from package.models import Package, Version from pypi.slurper import Slurper TEST_PACKAGE_NAME = 'Django' TEST_PACKAGE_VERSION = '1.3' TEST_PACKAGE_REPO_NAME = 'django-uni-form' class SlurpAllTests(TestCase): def test_get_latest_version_number(self): slurper = Slurper(TEST_PACKAGE_NAME) version = slurper.get_latest_version_number(TEST_PACKAGE_NAME) self.assertEquals(version, TEST_PACKAGE_VERSION) def test_get_or_create_package(self): slurper = Slurper(TEST_PACKAGE_NAME) version = slurper.get_latest_version_number(TEST_PACKAGE_NAME) package, created = slurper.get_or_create_package(TEST_PACKAGE_NAME, version) self.assertTrue(created) self.assertTrue(isinstance(package, Package)) self.assertEquals(package.title, TEST_PACKAGE_NAME) self.assertEquals(package.slug, slugify(TEST_PACKAGE_NAME)) def test_get_or_create_with_repo(self): slurper = Slurper(TEST_PACKAGE_REPO_NAME) version = slurper.get_latest_version_number(TEST_PACKAGE_REPO_NAME) package, created = slurper.get_or_create_package(TEST_PACKAGE_REPO_NAME, version) self.assertTrue(created) self.assertTrue(isinstance(package, Package)) self.assertEquals(package.title, TEST_PACKAGE_REPO_NAME) self.assertEquals(package.slug, slugify(TEST_PACKAGE_REPO_NAME)) def test_check_versions(self): slurper = Slurper(TEST_PACKAGE_REPO_NAME) version = slurper.get_latest_version_number(TEST_PACKAGE_REPO_NAME) # make me a package (Actually, make me a billionare) slurper.get_or_create_package(TEST_PACKAGE_REPO_NAME, version) # fetch the package for testing package = Package.objects.get(title=TEST_PACKAGE_REPO_NAME) self.assertTrue(package.pypi_downloads > 1000)
39.705882
89
0.718025
1,760
0.869136
0
0
0
0
0
0
113
0.055802
d9f57949a15383ed2a070813678af904fe2e2df0
1,145
py
Python
azure-mgmt-logic/azure/mgmt/logic/models/recurrence_schedule_occurrence.py
azuresdkci1x/azure-sdk-for-python-1722
e08fa6606543ce0f35b93133dbb78490f8e6bcc9
[ "MIT" ]
1
2017-10-29T15:14:35.000Z
2017-10-29T15:14:35.000Z
azure-mgmt-logic/azure/mgmt/logic/models/recurrence_schedule_occurrence.py
azuresdkci1x/azure-sdk-for-python-1722
e08fa6606543ce0f35b93133dbb78490f8e6bcc9
[ "MIT" ]
null
null
null
azure-mgmt-logic/azure/mgmt/logic/models/recurrence_schedule_occurrence.py
azuresdkci1x/azure-sdk-for-python-1722
e08fa6606543ce0f35b93133dbb78490f8e6bcc9
[ "MIT" ]
null
null
null
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class RecurrenceScheduleOccurrence(Model): """RecurrenceScheduleOccurrence. :param day: The day of the week. Possible values include: 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' :type day: str or :class:`DayOfWeek <azure.mgmt.logic.models.DayOfWeek>` :param occurrence: The occurrence. :type occurrence: int """ _attribute_map = { 'day': {'key': 'day', 'type': 'DayOfWeek'}, 'occurrence': {'key': 'occurrence', 'type': 'int'}, } def __init__(self, day=None, occurrence=None): self.day = day self.occurrence = occurrence
34.69697
76
0.590393
629
0.549345
0
0
0
0
0
0
862
0.752838
d9f6bdae288edaa527af57b654eafa00cfa5047b
11,757
py
Python
pandas/core/apply.py
AakankshaAshok/pandas
6498bc1e8a12003640139db4794bd5cd2462c116
[ "BSD-3-Clause" ]
null
null
null
pandas/core/apply.py
AakankshaAshok/pandas
6498bc1e8a12003640139db4794bd5cd2462c116
[ "BSD-3-Clause" ]
null
null
null
pandas/core/apply.py
AakankshaAshok/pandas
6498bc1e8a12003640139db4794bd5cd2462c116
[ "BSD-3-Clause" ]
null
null
null
import inspect import numpy as np from pandas._libs import reduction as libreduction from pandas.util._decorators import cache_readonly from pandas.core.dtypes.common import ( is_dict_like, is_extension_array_dtype, is_list_like, is_sequence, ) from pandas.core.dtypes.generic import ABCSeries def frame_apply( obj, func, axis=0, raw=False, result_type=None, ignore_failures=False, args=None, kwds=None, ): """ construct and return a row or column based frame apply object """ axis = obj._get_axis_number(axis) if axis == 0: klass = FrameRowApply elif axis == 1: klass = FrameColumnApply return klass( obj, func, raw=raw, result_type=result_type, ignore_failures=ignore_failures, args=args, kwds=kwds, ) class FrameApply: def __init__(self, obj, func, raw, result_type, ignore_failures, args, kwds): self.obj = obj self.raw = raw self.ignore_failures = ignore_failures self.args = args or () self.kwds = kwds or {} if result_type not in [None, "reduce", "broadcast", "expand"]: raise ValueError( "invalid value for result_type, must be one " "of {None, 'reduce', 'broadcast', 'expand'}" ) self.result_type = result_type # curry if needed if (kwds or args) and not isinstance(func, (np.ufunc, str)): def f(x): return func(x, *args, **kwds) else: f = func self.f = f # results self.result = None self.res_index = None self.res_columns = None @property def columns(self): return self.obj.columns @property def index(self): return self.obj.index @cache_readonly def values(self): return self.obj.values @cache_readonly def dtypes(self): return self.obj.dtypes @property def agg_axis(self): return self.obj._get_agg_axis(self.axis) def get_result(self): """ compute the results """ # dispatch to agg if is_list_like(self.f) or is_dict_like(self.f): return self.obj.aggregate(self.f, axis=self.axis, *self.args, **self.kwds) # all empty if len(self.columns) == 0 and len(self.index) == 0: return self.apply_empty_result() # string dispatch if isinstance(self.f, str): # Support for `frame.transform('method')` # Some methods (shift, etc.) require the axis argument, others # don't, so inspect and insert if necessary. func = getattr(self.obj, self.f) sig = inspect.getfullargspec(func) if "axis" in sig.args: self.kwds["axis"] = self.axis return func(*self.args, **self.kwds) # ufunc elif isinstance(self.f, np.ufunc): with np.errstate(all="ignore"): results = self.obj._data.apply("apply", func=self.f) return self.obj._constructor( data=results, index=self.index, columns=self.columns, copy=False ) # broadcasting if self.result_type == "broadcast": return self.apply_broadcast() # one axis empty elif not all(self.obj.shape): return self.apply_empty_result() # raw elif self.raw and not self.obj._is_mixed_type: return self.apply_raw() return self.apply_standard() def apply_empty_result(self): """ we have an empty result; at least 1 axis is 0 we will try to apply the function to an empty series in order to see if this is a reduction function """ # we are not asked to reduce or infer reduction # so just return a copy of the existing object if self.result_type not in ["reduce", None]: return self.obj.copy() # we may need to infer should_reduce = self.result_type == "reduce" from pandas import Series if not should_reduce: try: r = self.f(Series([])) except Exception: pass else: should_reduce = not isinstance(r, Series) if should_reduce: if len(self.agg_axis): r = self.f(Series([])) else: r = np.nan return self.obj._constructor_sliced(r, index=self.agg_axis) else: return self.obj.copy() def apply_raw(self): """ apply to the values as a numpy array """ try: result = libreduction.compute_reduction(self.values, self.f, axis=self.axis) except ValueError as err: if "Function does not reduce" not in str(err): # catch only ValueError raised intentionally in libreduction raise result = np.apply_along_axis(self.f, self.axis, self.values) # TODO: mixed type case if result.ndim == 2: return self.obj._constructor(result, index=self.index, columns=self.columns) else: return self.obj._constructor_sliced(result, index=self.agg_axis) def apply_broadcast(self, target): result_values = np.empty_like(target.values) # axis which we want to compare compliance result_compare = target.shape[0] for i, col in enumerate(target.columns): res = self.f(target[col]) ares = np.asarray(res).ndim # must be a scalar or 1d if ares > 1: raise ValueError("too many dims to broadcast") elif ares == 1: # must match return dim if result_compare != len(res): raise ValueError("cannot broadcast result") result_values[:, i] = res # we *always* preserve the original index / columns result = self.obj._constructor( result_values, index=target.index, columns=target.columns ) return result def apply_standard(self): # try to reduce first (by default) # this only matters if the reduction in values is of different dtype # e.g. if we want to apply to a SparseFrame, then can't directly reduce # we cannot reduce using non-numpy dtypes, # as demonstrated in gh-12244 if ( self.result_type in ["reduce", None] and not self.dtypes.apply(is_extension_array_dtype).any() # Disallow complex_internals since libreduction shortcut # cannot handle MultiIndex and not self.agg_axis._has_complex_internals ): values = self.values index = self.obj._get_axis(self.axis) labels = self.agg_axis empty_arr = np.empty(len(index), dtype=values.dtype) # Preserve subclass for e.g. test_subclassed_apply dummy = self.obj._constructor_sliced( empty_arr, index=index, dtype=values.dtype ) try: result = libreduction.compute_reduction( values, self.f, axis=self.axis, dummy=dummy, labels=labels ) except ValueError as err: if "Function does not reduce" not in str(err): # catch only ValueError raised intentionally in libreduction raise except TypeError: # e.g. test_apply_ignore_failures we just ignore if not self.ignore_failures: raise except ZeroDivisionError: # reached via numexpr; fall back to python implementation pass else: return self.obj._constructor_sliced(result, index=labels) # compute the result using the series generator self.apply_series_generator() # wrap results return self.wrap_results() def apply_series_generator(self): series_gen = self.series_generator res_index = self.result_index i = None keys = [] results = {} if self.ignore_failures: successes = [] for i, v in enumerate(series_gen): try: results[i] = self.f(v) except Exception: pass else: keys.append(v.name) successes.append(i) # so will work with MultiIndex if len(successes) < len(res_index): res_index = res_index.take(successes) else: for i, v in enumerate(series_gen): results[i] = self.f(v) keys.append(v.name) self.results = results self.res_index = res_index self.res_columns = self.result_columns def wrap_results(self): results = self.results # see if we can infer the results if len(results) > 0 and 0 in results and is_sequence(results[0]): return self.wrap_results_for_axis() # dict of scalars result = self.obj._constructor_sliced(results) result.index = self.res_index return result class FrameRowApply(FrameApply): axis = 0 def apply_broadcast(self): return super().apply_broadcast(self.obj) @property def series_generator(self): return (self.obj._ixs(i, axis=1) for i in range(len(self.columns))) @property def result_index(self): return self.columns @property def result_columns(self): return self.index def wrap_results_for_axis(self): """ return the results for the rows """ results = self.results result = self.obj._constructor(data=results) if not isinstance(results[0], ABCSeries): if len(result.index) == len(self.res_columns): result.index = self.res_columns if len(result.columns) == len(self.res_index): result.columns = self.res_index return result class FrameColumnApply(FrameApply): axis = 1 def apply_broadcast(self): result = super().apply_broadcast(self.obj.T) return result.T @property def series_generator(self): constructor = self.obj._constructor_sliced return ( constructor(arr, index=self.columns, name=name) for i, (arr, name) in enumerate(zip(self.values, self.index)) ) @property def result_index(self): return self.index @property def result_columns(self): return self.columns def wrap_results_for_axis(self): """ return the results for the columns """ results = self.results # we have requested to expand if self.result_type == "expand": result = self.infer_to_same_shape() # we have a non-series and don't want inference elif not isinstance(results[0], ABCSeries): from pandas import Series result = Series(results) result.index = self.res_index # we may want to infer results else: result = self.infer_to_same_shape() return result def infer_to_same_shape(self): """ infer the results to the same shape as the input object """ results = self.results result = self.obj._constructor(data=results) result = result.T # set the index result.index = self.res_index # infer dtypes result = result.infer_objects() return result
28.745721
88
0.575572
10,893
0.926512
0
0
972
0.082674
0
0
2,178
0.185251
d9f7ffc0611459c276e6f9ae99c70b7e8ba1a1c3
707
py
Python
tests/test_model/test_recognizer/test_shufflenetv1.py
YinAoXiong/ZCls
8aeea3640f8456937db35d043e37cf2c03ac9017
[ "Apache-2.0" ]
null
null
null
tests/test_model/test_recognizer/test_shufflenetv1.py
YinAoXiong/ZCls
8aeea3640f8456937db35d043e37cf2c03ac9017
[ "Apache-2.0" ]
null
null
null
tests/test_model/test_recognizer/test_shufflenetv1.py
YinAoXiong/ZCls
8aeea3640f8456937db35d043e37cf2c03ac9017
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """ @date: 2021/5/16 下午10:22 @file: test_shufflenetv1.py @author: zj @description: """ import torch from zcls.config import cfg from zcls.config.key_word import KEY_OUTPUT from zcls.model.recognizers.build import build_recognizer def test_data(model): data = torch.randn(1, 3, 224, 224) outputs = model(data)[KEY_OUTPUT] print(outputs.shape) assert outputs.shape == (1, 1000) def test_shufflenet(): cfg.merge_from_file('configs/benchmarks/shufflenet/shufflenet_v1_3g2x_zcls_imagenet_224.yaml') print(cfg) model = build_recognizer(cfg, torch.device('cpu')) print(model) test_data(model) if __name__ == '__main__': test_shufflenet()
19.638889
98
0.711457
0
0
0
0
0
0
0
0
202
0.284107
d9f8dcb19533a96faaad26bde1b0790a5c363c97
142,263
py
Python
autotest/gcore/vsis3.py
jpapadakis/gdal
f07aa15fd65af36b04291303cc6834c87f662814
[ "MIT" ]
18
2021-01-27T00:07:35.000Z
2022-03-25T22:20:13.000Z
autotest/gcore/vsis3.py
jpapadakis/gdal
f07aa15fd65af36b04291303cc6834c87f662814
[ "MIT" ]
1
2015-04-14T00:19:57.000Z
2015-04-14T00:29:29.000Z
autotest/gcore/vsis3.py
jpapadakis/gdal
f07aa15fd65af36b04291303cc6834c87f662814
[ "MIT" ]
1
2021-11-21T02:33:51.000Z
2021-11-21T02:33:51.000Z
#!/usr/bin/env pytest ############################################################################### # $Id$ # # Project: GDAL/OGR Test Suite # Purpose: Test /vsis3 # Author: Even Rouault <even dot rouault at spatialys dot com> # ############################################################################### # Copyright (c) 2015, Even Rouault <even dot rouault at spatialys dot com> # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. ############################################################################### import json import os.path import stat import sys from osgeo import gdal import gdaltest import webserver import pytest def open_for_read(uri): """ Opens a test file for reading. """ return gdal.VSIFOpenExL(uri, 'rb', 1) ############################################################################### def test_vsis3_init(): gdaltest.aws_vars = {} for var in ('AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_TIMESTAMP', 'AWS_HTTPS', 'AWS_VIRTUAL_HOSTING', 'AWS_S3_ENDPOINT', 'AWS_REQUEST_PAYER', 'AWS_DEFAULT_REGION', 'AWS_DEFAULT_PROFILE', 'AWS_PROFILE', 'AWS_NO_SIGN_REQUEST'): gdaltest.aws_vars[var] = gdal.GetConfigOption(var) if gdaltest.aws_vars[var] is not None: gdal.SetConfigOption(var, "") # To avoid user AWS credentials in ~/.aws/credentials and ~/.aws/config # to mess up our tests gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '') gdal.SetConfigOption('AWS_CONFIG_FILE', '') gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '') assert gdal.GetSignedURL('/vsis3/foo/bar') is None ############################################################################### # Test AWS_NO_SIGN_REQUEST=YES def test_vsis3_no_sign_request(): if not gdaltest.built_against_curl(): pytest.skip() with gdaltest.config_option('AWS_NO_SIGN_REQUEST', 'YES'): actual_url = gdal.GetActualURL('/vsis3/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF') assert actual_url == 'https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF' actual_url = gdal.GetActualURL('/vsis3_streaming/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF') assert actual_url == 'https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF' f = open_for_read('/vsis3/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF') if f is None: if gdaltest.gdalurlopen('https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF') is None: pytest.skip('cannot open URL') pytest.fail() gdal.VSIFCloseL(f) ############################################################################### # Test Sync() and multithreaded download def test_vsis3_sync_multithreaded_download(): if not gdaltest.built_against_curl(): pytest.skip() def cbk(pct, _, tab): assert pct >= tab[0] tab[0] = pct return True tab = [ -1 ] # Use a public bucket with /test_dummy/foo and /test_dummy/bar files with gdaltest.config_option('AWS_NO_SIGN_REQUEST', 'YES'): assert gdal.Sync('/vsis3/cdn.proj.org/test_dummy', '/vsimem/test_vsis3_no_sign_request_sync', options=['NUM_THREADS=2'], callback=cbk, callback_data=tab) assert tab[0] == 1.0 assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo').size == 4 assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar').size == 4 gdal.RmdirRecursive('/vsimem/test_vsis3_no_sign_request_sync') ############################################################################### # Test Sync() and multithreaded download and CHUNK_SIZE def test_vsis3_sync_multithreaded_download_chunk_size(): if not gdaltest.built_against_curl(): pytest.skip() def cbk(pct, _, tab): assert pct >= tab[0] tab[0] = pct return True tab = [ -1 ] # Use a public bucket with /test_dummy/foo and /test_dummy/bar files with gdaltest.config_option('AWS_NO_SIGN_REQUEST', 'YES'): assert gdal.Sync('/vsis3/cdn.proj.org/test_dummy', '/vsimem/test_vsis3_no_sign_request_sync', options=['NUM_THREADS=2', 'CHUNK_SIZE=3'], callback=cbk, callback_data=tab) assert tab[0] == 1.0 assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo').size == 4 assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar').size == 4 gdal.RmdirRecursive('/vsimem/test_vsis3_no_sign_request_sync') ############################################################################### # Error cases def test_vsis3_1(): if not gdaltest.built_against_curl(): pytest.skip() # Missing AWS_SECRET_ACCESS_KEY gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsis3/foo/bar') assert f is None and gdal.VSIGetLastErrorMsg().find('AWS_SECRET_ACCESS_KEY') >= 0 gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsis3_streaming/foo/bar') assert f is None and gdal.VSIGetLastErrorMsg().find('AWS_SECRET_ACCESS_KEY') >= 0 gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY') # Missing AWS_ACCESS_KEY_ID gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsis3/foo/bar') assert f is None and gdal.VSIGetLastErrorMsg().find('AWS_ACCESS_KEY_ID') >= 0 gdal.SetConfigOption('AWS_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID') # ERROR 1: The AWS Access Key Id you provided does not exist in our records. gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsis3/foo/bar.baz') if f is not None or gdal.VSIGetLastErrorMsg() == '': if f is not None: gdal.VSIFCloseL(f) if gdal.GetConfigOption('APPVEYOR') is not None: return pytest.fail(gdal.VSIGetLastErrorMsg()) gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsis3_streaming/foo/bar.baz') assert f is None and gdal.VSIGetLastErrorMsg() != '' ############################################################################### def test_vsis3_start_webserver(): gdaltest.webserver_process = None gdaltest.webserver_port = 0 if not gdaltest.built_against_curl(): pytest.skip() (gdaltest.webserver_process, gdaltest.webserver_port) = webserver.launch(handler=webserver.DispatcherHttpHandler) if gdaltest.webserver_port == 0: pytest.skip() gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY') gdal.SetConfigOption('AWS_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID') gdal.SetConfigOption('AWS_TIMESTAMP', '20150101T000000Z') gdal.SetConfigOption('AWS_HTTPS', 'NO') gdal.SetConfigOption('AWS_VIRTUAL_HOSTING', 'NO') gdal.SetConfigOption('AWS_S3_ENDPOINT', '127.0.0.1:%d' % gdaltest.webserver_port) def get_s3_fake_bucket_resource_method(request): request.protocol_version = 'HTTP/1.1' if 'Authorization' not in request.headers: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return expected_authorization_8080 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=38901846b865b12ac492bc005bb394ca8d60c098b68db57c084fac686a932f9e' expected_authorization_8081 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=9f623b7ffce76188a456c70fb4813eb31969e88d130d6b4d801b3accbf050d6c' if request.headers['Authorization'] != expected_authorization_8080 and request.headers['Authorization'] != expected_authorization_8081: sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization'])) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.send_header('Connection', 'close') request.end_headers() request.wfile.write("""foo""".encode('ascii')) ############################################################################### # Test with a fake AWS server def test_vsis3_2(): if gdaltest.webserver_port == 0: pytest.skip() signed_url = gdal.GetSignedURL('/vsis3/s3_fake_bucket/resource') expected_url_8080 = 'http://127.0.0.1:8080/s3_fake_bucket/resource?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AWS_ACCESS_KEY_ID%2F20150101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20150101T000000Z&X-Amz-Expires=3600&X-Amz-Signature=dca239dd95f72ff8c37c15c840afc54cd19bdb07f7aaee2223108b5b0ad35da8&X-Amz-SignedHeaders=host' expected_url_8081 = 'http://127.0.0.1:8081/s3_fake_bucket/resource?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AWS_ACCESS_KEY_ID%2F20150101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20150101T000000Z&X-Amz-Expires=3600&X-Amz-Signature=ef5216bc5971863414c69f6ca095276c0d62c0da97fa4f6ab80c30bd7fc146ac&X-Amz-SignedHeaders=host' assert signed_url in (expected_url_8080, expected_url_8081) handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method) with webserver.install_http_handler(handler): f = open_for_read('/vsis3_streaming/s3_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' handler = webserver.SequentialHandler() def method(request): request.protocol_version = 'HTTP/1.1' if 'Authorization' not in request.headers: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return expected_authorization_8080 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-security-token,Signature=464a21835038b4f4d292b6463b8a005b9aaa980513aa8c42fc170abb733dce85' expected_authorization_8081 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-security-token,Signature=b10e91575186342f9f2acfc91c4c2c9938c4a9e8cdcbc043d09d59d9641ad7fb' if request.headers['Authorization'] != expected_authorization_8080 and request.headers['Authorization'] != expected_authorization_8081: sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization'])) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.end_headers() request.wfile.write("""foo""".encode('ascii')) handler.add('GET', '/s3_fake_bucket_with_session_token/resource', custom_method=method) # Test with temporary credentials with gdaltest.config_option('AWS_SESSION_TOKEN', 'AWS_SESSION_TOKEN'): with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket_with_session_token/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) handler = webserver.SequentialHandler() def method(request): request.protocol_version = 'HTTP/1.1' if 'Range' in request.headers: if request.headers['Range'] != 'bytes=0-16383': sys.stderr.write("Bad Range: '%s'\n" % str(request.headers['Range'])) request.send_response(403) return request.send_response(206) request.send_header('Content-type', 'text/plain') request.send_header('Content-Range', 'bytes 0-16383/1000000') request.send_header('Content-Length', 16384) request.send_header('Connection', 'close') request.end_headers() request.wfile.write(('a' * 16384).encode('ascii')) else: request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 1000000) request.send_header('Connection', 'close') request.end_headers() request.wfile.write(('a' * 1000000).encode('ascii')) handler.add('GET', '/s3_fake_bucket/resource2.bin', custom_method=method) with webserver.install_http_handler(handler): # old_val = gdal.GetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN') # gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'EMPTY_DIR') stat_res = gdal.VSIStatL('/vsis3/s3_fake_bucket/resource2.bin') # gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', old_val) if stat_res is None or stat_res.size != 1000000: if stat_res is not None: print(stat_res.size) else: print(stat_res) pytest.fail() handler = webserver.SequentialHandler() handler.add('HEAD', '/s3_fake_bucket/resource2.bin', 200, {'Content-type': 'text/plain', 'Content-Length': 1000000, 'Connection': 'close'}) with webserver.install_http_handler(handler): stat_res = gdal.VSIStatL('/vsis3_streaming/s3_fake_bucket/resource2.bin') if stat_res is None or stat_res.size != 1000000: if stat_res is not None: print(stat_res.size) else: print(stat_res) pytest.fail() handler = webserver.SequentialHandler() def method(request): request.protocol_version = 'HTTP/1.1' if request.headers['Authorization'].find('us-east-1') >= 0: request.send_response(400) response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) request.send_header('Content-type', 'application/xml') request.send_header('Transfer-Encoding', 'chunked') request.send_header('Connection', 'close') request.end_headers() request.wfile.write(response.encode('ascii')) else: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) handler.add('GET', '/s3_fake_bucket/redirect', custom_method=method) def method(request): request.protocol_version = 'HTTP/1.1' if request.headers['Authorization'].find('us-west-2') >= 0 and request.headers['Host'].startswith('127.0.0.1'): request.send_response(301) response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>PermanentRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) request.send_header('Content-type', 'application/xml') request.send_header('Transfer-Encoding', 'chunked') request.send_header('Connection', 'close') request.end_headers() request.wfile.write(response.encode('ascii')) else: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) handler.add('GET', '/s3_fake_bucket/redirect', custom_method=method) def method(request): request.protocol_version = 'HTTP/1.1' if request.headers['Authorization'].find('us-west-2') >= 0 and request.headers['Host'].startswith('localhost'): request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.send_header('Connection', 'close') request.end_headers() request.wfile.write("""foo""".encode('ascii')) else: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) handler.add('GET', '/s3_fake_bucket/redirect', custom_method=method) # Test region and endpoint 'redirects' with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket/redirect') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) if data != 'foo': if gdaltest.is_travis_branch('trusty'): pytest.skip('Skipped on trusty branch, but should be investigated') pytest.fail(data) # Test region and endpoint 'redirects' gdal.VSICurlClearCache() handler.req_count = 0 with webserver.install_http_handler(handler): f = open_for_read('/vsis3_streaming/s3_fake_bucket/redirect') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' handler = webserver.SequentialHandler() def method(request): # /vsis3_streaming/ should have remembered the change of region and endpoint if request.headers['Authorization'].find('us-west-2') < 0 or \ not request.headers['Host'].startswith('localhost'): sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.protocol_version = 'HTTP/1.1' request.send_response(400) response = 'bla' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) request.send_header('Content-type', 'application/xml') request.send_header('Transfer-Encoding', 'chunked') request.send_header('Connection', 'close') request.end_headers() request.wfile.write(response.encode('ascii')) handler.add('GET', '/s3_fake_bucket/non_xml_error', custom_method=method) gdal.ErrorReset() with webserver.install_http_handler(handler): with gdaltest.error_handler(): f = open_for_read('/vsis3_streaming/s3_fake_bucket/non_xml_error') assert f is None and gdal.VSIGetLastErrorMsg().find('bla') >= 0 handler = webserver.SequentialHandler() response = '<?xml version="1.0" encoding="UTF-8"?><oops>' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) handler.add('GET', '/s3_fake_bucket/invalid_xml_error', 400, {'Content-type': 'application/xml', 'Transfer-Encoding': 'chunked', 'Connection': 'close'}, response) gdal.ErrorReset() with webserver.install_http_handler(handler): with gdaltest.error_handler(): f = open_for_read('/vsis3_streaming/s3_fake_bucket/invalid_xml_error') assert f is None and gdal.VSIGetLastErrorMsg().find('<oops>') >= 0 handler = webserver.SequentialHandler() response = '<?xml version="1.0" encoding="UTF-8"?><Error/>' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) handler.add('GET', '/s3_fake_bucket/no_code_in_error', 400, {'Content-type': 'application/xml', 'Transfer-Encoding': 'chunked', 'Connection': 'close'}, response) gdal.ErrorReset() with webserver.install_http_handler(handler): with gdaltest.error_handler(): f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_code_in_error') assert f is None and gdal.VSIGetLastErrorMsg().find('<Error/>') >= 0 handler = webserver.SequentialHandler() response = '<?xml version="1.0" encoding="UTF-8"?><Error><Code>AuthorizationHeaderMalformed</Code></Error>' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) handler.add('GET', '/s3_fake_bucket/no_region_in_AuthorizationHeaderMalformed_error', 400, {'Content-type': 'application/xml', 'Transfer-Encoding': 'chunked', 'Connection': 'close'}, response) gdal.ErrorReset() with webserver.install_http_handler(handler): with gdaltest.error_handler(): f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_region_in_AuthorizationHeaderMalformed_error') assert f is None and gdal.VSIGetLastErrorMsg().find('<Error>') >= 0 handler = webserver.SequentialHandler() response = '<?xml version="1.0" encoding="UTF-8"?><Error><Code>PermanentRedirect</Code></Error>' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) handler.add('GET', '/s3_fake_bucket/no_endpoint_in_PermanentRedirect_error', 400, {'Content-type': 'application/xml', 'Transfer-Encoding': 'chunked', 'Connection': 'close'}, response) gdal.ErrorReset() with webserver.install_http_handler(handler): with gdaltest.error_handler(): f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_endpoint_in_PermanentRedirect_error') assert f is None and gdal.VSIGetLastErrorMsg().find('<Error>') >= 0 handler = webserver.SequentialHandler() response = '<?xml version="1.0" encoding="UTF-8"?><Error><Code>bla</Code></Error>' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) handler.add('GET', '/s3_fake_bucket/no_message_in_error', 400, {'Content-type': 'application/xml', 'Transfer-Encoding': 'chunked', 'Connection': 'close'}, response) gdal.ErrorReset() with webserver.install_http_handler(handler): with gdaltest.error_handler(): f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_message_in_error') assert f is None and gdal.VSIGetLastErrorMsg().find('<Error>') >= 0 # Test with requester pays handler = webserver.SequentialHandler() def method(request): if 'x-amz-request-payer' not in request.headers: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return expected_authorization_8080 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-request-payer,Signature=cf713a394e1b629ac0e468d60d3d4a12f5236fd72d21b6005c758b0dfc7049cd' expected_authorization_8081 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-request-payer,Signature=4756166679008a1a40cd6ff91dbbef670a71c11bf8e3c998dd7385577c3ac4d9' if request.headers['Authorization'] != expected_authorization_8080 and request.headers['Authorization'] != expected_authorization_8081: sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization'])) request.send_response(403) return if request.headers['x-amz-request-payer'] != 'requester': sys.stderr.write("Bad x-amz-request-payer: '%s'\n" % str(request.headers['x-amz-request-payer'])) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.send_header('Connection', 'close') request.end_headers() request.wfile.write("""foo""".encode('ascii')) handler.add('GET', '/s3_fake_bucket_with_requester_pays/resource', custom_method=method) with gdaltest.config_option('AWS_REQUEST_PAYER', 'requester'): with webserver.install_http_handler(handler): with gdaltest.error_handler(): f = open_for_read('/vsis3/s3_fake_bucket_with_requester_pays/resource') assert f is not None data = gdal.VSIFReadL(1, 3, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' # Test temporary redirect handler = webserver.SequentialHandler() class HandlerClass(object): def __init__(self, response_value): self.old_authorization = None self.response_value = response_value def method_req_1(self, request): if request.headers['Host'].find('127.0.0.1') < 0: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return self.old_authorization = request.headers['Authorization'] request.protocol_version = 'HTTP/1.1' request.send_response(307) response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>TemporaryRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) request.send_header('Content-type', 'application/xml') request.send_header('Transfer-Encoding', 'chunked') request.end_headers() request.wfile.write(response.encode('ascii')) def method_req_2(self, request): if request.headers['Host'].find('localhost') < 0: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return if self.old_authorization == request.headers['Authorization']: sys.stderr.write('Should have get a different Authorization. Bad headers: %s\n' % str(request.headers)) request.send_response(403) return request.protocol_version = 'HTTP/1.1' request.send_response(200) response = self.response_value request.send_header('Content-Length', len(response)) request.end_headers() request.wfile.write(response.encode('ascii')) h = HandlerClass('foo') handler.add('GET', '/s3_test_temporary_redirect_read/resource', custom_method=h.method_req_1) handler.add('GET', '/s3_test_temporary_redirect_read/resource', custom_method=h.method_req_2) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_test_temporary_redirect_read/resource') assert f is not None data = gdal.VSIFReadL(1, 3, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' # Retry on the same bucket and check that the redirection was indeed temporary handler = webserver.SequentialHandler() h = HandlerClass('bar') handler.add('GET', '/s3_test_temporary_redirect_read/resource2', custom_method=h.method_req_1) handler.add('GET', '/s3_test_temporary_redirect_read/resource2', custom_method=h.method_req_2) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_test_temporary_redirect_read/resource2') assert f is not None data = gdal.VSIFReadL(1, 3, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'bar' ############################################################################### # Test re-opening after changing configuration option (#2294) def test_vsis3_open_after_config_option_chage(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/test_vsis3_change_config_options/?delimiter=%2F', 403) handler.add('GET', '/test_vsis3_change_config_options/test.bin', 403) with webserver.install_http_handler(handler): with gdaltest.error_handler(): f = open_for_read('/vsis3/test_vsis3_change_config_options/test.bin') assert f is None # Does not attempt any network access since we didn't change significant # parameters f = open_for_read('/vsis3/test_vsis3_change_config_options/test.bin') assert f is None with gdaltest.config_option('AWS_ACCESS_KEY_ID', 'another_key_id'): handler = webserver.SequentialHandler() handler.add('GET', '/test_vsis3_change_config_options/?delimiter=%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix></Prefix> <Contents> <Key>test.bin</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>123456</Size> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/test_vsis3_change_config_options/test.bin') assert f is not None gdal.VSIFCloseL(f) ############################################################################### # Test ReadDir() with a fake AWS server def test_vsis3_readdir(): if gdaltest.webserver_port == 0: pytest.skip() handler = webserver.SequentialHandler() def method(request): request.protocol_version = 'HTTP/1.1' if request.headers['Authorization'].find('us-east-1') >= 0: request.send_response(400) response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) request.send_header('Content-type', 'application/xml') request.send_header('Transfer-Encoding', 'chunked') request.end_headers() request.wfile.write(response.encode('ascii')) elif request.headers['Authorization'].find('us-west-2') >= 0: if request.headers['Host'].startswith('127.0.0.1'): request.send_response(301) response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>PermanentRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) request.send_header('Content-type', 'application/xml') request.send_header('Transfer-Encoding', 'chunked') request.end_headers() request.wfile.write(response.encode('ascii')) elif request.headers['Host'].startswith('localhost'): request.send_response(200) request.send_header('Content-type', 'application/xml') response = """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>a_dir with_space/</Prefix> <NextMarker>bla</NextMarker> <Contents> <Key>a_dir with_space/resource3 with_space.bin</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>123456</Size> </Contents> </ListBucketResult> """ request.send_header('Content-Length', len(response)) request.end_headers() request.wfile.write(response.encode('ascii')) else: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) else: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%20with_space%2F', custom_method=method) handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%20with_space%2F', custom_method=method) handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%20with_space%2F', custom_method=method) def method(request): # /vsis3/ should have remembered the change of region and endpoint if request.headers['Authorization'].find('us-west-2') < 0 or \ not request.headers['Host'].startswith('localhost'): sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.protocol_version = 'HTTP/1.1' request.send_response(200) request.send_header('Content-type', 'application/xml') response = """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>a_dir with_space/</Prefix> <Contents> <Key>a_dir with_space/resource4.bin</Key> <LastModified>2015-10-16T12:34:56.000Z</LastModified> <Size>456789</Size> </Contents> <Contents> <Key>a_dir with_space/i_am_a_glacier_file</Key> <LastModified>2015-10-16T12:34:56.000Z</LastModified> <Size>456789</Size> <StorageClass>GLACIER</StorageClass> </Contents> <CommonPrefixes> <Prefix>a_dir with_space/subdir/</Prefix> </CommonPrefixes> </ListBucketResult> """ request.send_header('Content-Length', len(response)) request.end_headers() request.wfile.write(response.encode('ascii')) handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&marker=bla&prefix=a_dir%20with_space%2F', custom_method=method) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin') if f is None: if gdaltest.is_travis_branch('trusty'): pytest.skip('Skipped on trusty branch, but should be investigated') pytest.fail() gdal.VSIFCloseL(f) with webserver.install_http_handler(webserver.SequentialHandler()): dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space') assert dir_contents == ['resource3 with_space.bin', 'resource4.bin', 'subdir'] assert gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin').size == 123456 assert gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin').mtime == 1 # Same as above: cached dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space') assert dir_contents == ['resource3 with_space.bin', 'resource4.bin', 'subdir'] # ReadDir on something known to be a file shouldn't cause network access dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin') assert dir_contents is None # Test unrelated partial clear of the cache gdal.VSICurlPartialClearCache('/vsis3/s3_fake_bucket_unrelated') assert gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin').size == 123456 dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space') assert dir_contents == ['resource3 with_space.bin', 'resource4.bin', 'subdir'] # Test partial clear of the cache gdal.VSICurlPartialClearCache('/vsis3/s3_fake_bucket2/a_dir with_space') handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket2/a_dir%20with_space/resource3%20with_space.bin', 400) handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&max-keys=100&prefix=a_dir%20with_space%2Fresource3%20with_space.bin%2F', 400) with webserver.install_http_handler(handler): gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin') handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>a_dir/</Prefix> <Contents> <Key>a_dir/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir') assert dir_contents == ['test.txt'] gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%2F', 200, {}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>a_dir/</Prefix> <Contents> <Key>a_dir/resource4.bin</Key> <LastModified>2015-10-16T12:34:56.000Z</LastModified> <Size>456789</Size> </Contents> <Contents> <Key>a_dir/i_am_a_glacier_file</Key> <LastModified>2015-10-16T12:34:56.000Z</LastModified> <Size>456789</Size> <StorageClass>GLACIER</StorageClass> </Contents> <CommonPrefixes> <Prefix>a_dir/subdir/</Prefix> </CommonPrefixes> </ListBucketResult> """) with gdaltest.config_option('CPL_VSIL_CURL_IGNORE_GLACIER_STORAGE', 'NO'): with webserver.install_http_handler(handler): dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir') assert dir_contents == ['resource4.bin', 'i_am_a_glacier_file', 'subdir'] # Test CPL_VSIL_CURL_NON_CACHED for config_option_value in ['/vsis3/s3_non_cached/test.txt', '/vsis3/s3_non_cached', '/vsis3/s3_non_cached:/vsis3/unrelated', '/vsis3/unrelated:/vsis3/s3_non_cached', '/vsis3/unrelated:/vsis3/s3_non_cached:/vsis3/unrelated']: with gdaltest.config_option('CPL_VSIL_CURL_NON_CACHED', config_option_value): handler = webserver.SequentialHandler() handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'foo') with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_non_cached/test.txt') assert f is not None, config_option_value data = gdal.VSIFReadL(1, 3, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo', config_option_value handler = webserver.SequentialHandler() handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'bar2') with webserver.install_http_handler(handler): size = gdal.VSIStatL('/vsis3/s3_non_cached/test.txt').size assert size == 4, config_option_value handler = webserver.SequentialHandler() handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'foo') with webserver.install_http_handler(handler): size = gdal.VSIStatL('/vsis3/s3_non_cached/test.txt').size if size != 3: print(config_option_value) pytest.fail(data) handler = webserver.SequentialHandler() handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'bar2') with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_non_cached/test.txt') assert f is not None, config_option_value data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'bar2', config_option_value # Retry without option for config_option_value in [None, '/vsis3/s3_non_cached/bar.txt']: with gdaltest.config_option('CPL_VSIL_CURL_NON_CACHED', config_option_value): handler = webserver.SequentialHandler() if config_option_value is None: handler.add('GET', '/s3_non_cached/?delimiter=%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix></Prefix> <Contents> <Key>test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> <Contents> <Key>test2.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> </ListBucketResult> """) handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'foo') with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_non_cached/test.txt') assert f is not None, config_option_value data = gdal.VSIFReadL(1, 3, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo', config_option_value handler = webserver.SequentialHandler() with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_non_cached/test.txt') assert f is not None, config_option_value data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) # We should still get foo because of caching assert data == 'foo', config_option_value # List buckets (empty result) handler = webserver.SequentialHandler() handler.add('GET', '/', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListAllMyBucketsResult> <Buckets> </Buckets> </ListAllMyBucketsResult> """) with webserver.install_http_handler(handler): dir_contents = gdal.ReadDir('/vsis3/') assert dir_contents == ['.'] gdal.VSICurlClearCache() # List buckets handler = webserver.SequentialHandler() handler.add('GET', '/', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListAllMyBucketsResult> <Buckets> <Bucket> <Name>mybucket</Name> </Bucket> </Buckets> </ListAllMyBucketsResult> """) with webserver.install_http_handler(handler): dir_contents = gdal.ReadDir('/vsis3/') assert dir_contents == ['mybucket'] # Test temporary redirect handler = webserver.SequentialHandler() class HandlerClass(object): def __init__(self, response_value): self.old_authorization = None self.response_value = response_value def method_req_1(self, request): if request.headers['Host'].find('127.0.0.1') < 0: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return self.old_authorization = request.headers['Authorization'] request.protocol_version = 'HTTP/1.1' request.send_response(307) response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>TemporaryRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) request.send_header('Content-type', 'application/xml') request.send_header('Transfer-Encoding', 'chunked') request.end_headers() request.wfile.write(response.encode('ascii')) def method_req_2(self, request): if request.headers['Host'].find('localhost') < 0: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return if self.old_authorization == request.headers['Authorization']: sys.stderr.write('Should have get a different Authorization. Bad headers: %s\n' % str(request.headers)) request.send_response(403) return request.protocol_version = 'HTTP/1.1' request.send_response(200) request.send_header('Content-type', 'application/xml') response = self.response_value request.send_header('Content-Length', len(response)) request.end_headers() request.wfile.write(response.encode('ascii')) h = HandlerClass("""<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix></Prefix> <CommonPrefixes> <Prefix>test</Prefix> </CommonPrefixes> </ListBucketResult> """) handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F', custom_method=h.method_req_1) handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F', custom_method=h.method_req_2) with webserver.install_http_handler(handler): dir_contents = gdal.ReadDir('/vsis3/s3_test_temporary_redirect_read_dir') assert dir_contents == ['test'] # Retry on the same bucket and check that the redirection was indeed temporary handler = webserver.SequentialHandler() h = HandlerClass("""<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>test/</Prefix> <CommonPrefixes> <Prefix>test/test2</Prefix> </CommonPrefixes> </ListBucketResult> """) handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F&prefix=test%2F', custom_method=h.method_req_1) handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F&prefix=test%2F', custom_method=h.method_req_2) with webserver.install_http_handler(handler): dir_contents = gdal.ReadDir('/vsis3/s3_test_temporary_redirect_read_dir/test') assert dir_contents == ['test2'] ############################################################################### # Test OpenDir() with a fake AWS server def test_vsis3_opendir(): if gdaltest.webserver_port == 0: pytest.skip() # Unlimited depth handler = webserver.SequentialHandler() handler.add('GET', '/vsis3_opendir/', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix/> <Marker/> <Contents> <Key>test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> <Contents> <Key>subdir/</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>0</Size> </Contents> <Contents> <Key>subdir/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>5</Size> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): d = gdal.OpenDir('/vsis3/vsis3_opendir') assert d is not None entry = gdal.GetNextDirEntry(d) assert entry.name == 'test.txt' assert entry.size == 40 assert entry.mode == 32768 assert entry.mtime == 1 entry = gdal.GetNextDirEntry(d) assert entry.name == 'subdir' assert entry.mode == 16384 entry = gdal.GetNextDirEntry(d) assert entry.name == 'subdir/test.txt' entry = gdal.GetNextDirEntry(d) assert entry is None gdal.CloseDir(d) # Depth = 0 handler = webserver.SequentialHandler() handler.add('GET', '/vsis3_opendir/?delimiter=%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix/> <Marker/> <Contents> <Key>test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> <CommonPrefixes> <Prefix>subdir/</Prefix> </CommonPrefixes> </ListBucketResult> """) with webserver.install_http_handler(handler): d = gdal.OpenDir('/vsis3/vsis3_opendir', 0) assert d is not None entry = gdal.GetNextDirEntry(d) assert entry.name == 'test.txt' assert entry.size == 40 assert entry.mode == 32768 assert entry.mtime == 1 entry = gdal.GetNextDirEntry(d) assert entry.name == 'subdir' assert entry.mode == 16384 entry = gdal.GetNextDirEntry(d) assert entry is None gdal.CloseDir(d) # Depth = 1 handler = webserver.SequentialHandler() handler.add('GET', '/vsis3_opendir/?delimiter=%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix/> <Marker/> <Contents> <Key>test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> <CommonPrefixes> <Prefix>subdir/</Prefix> </CommonPrefixes> </ListBucketResult> """) with webserver.install_http_handler(handler): d = gdal.OpenDir('/vsis3/vsis3_opendir', 1) assert d is not None entry = gdal.GetNextDirEntry(d) assert entry.name == 'test.txt' assert entry.size == 40 assert entry.mode == 32768 assert entry.mtime == 1 entry = gdal.GetNextDirEntry(d) assert entry.name == 'subdir' assert entry.mode == 16384 handler = webserver.SequentialHandler() handler.add('GET', '/vsis3_opendir/?delimiter=%2F&prefix=subdir%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>subdir/</Prefix> <Marker/> <Contents> <Key>subdir/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>5</Size> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): entry = gdal.GetNextDirEntry(d) assert entry.name == 'subdir/test.txt' entry = gdal.GetNextDirEntry(d) assert entry is None gdal.CloseDir(d) ############################################################################### # Test simple PUT support with a fake AWS server def test_vsis3_4(): if gdaltest.webserver_port == 0: pytest.skip() with webserver.install_http_handler(webserver.SequentialHandler()): with gdaltest.error_handler(): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3', 'wb') assert f is None handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket3/empty_file.bin', 200, {'Connection': 'close'}, 'foo') with webserver.install_http_handler(handler): assert gdal.VSIStatL('/vsis3/s3_fake_bucket3/empty_file.bin').size == 3 # Empty file handler = webserver.SequentialHandler() def method(request): if request.headers['Content-Length'] != '0': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) return request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/s3_fake_bucket3/empty_file.bin', custom_method=method) with webserver.install_http_handler(handler): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb') assert f is not None gdal.ErrorReset() gdal.VSIFCloseL(f) assert gdal.GetLastErrorMsg() == '' handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket3/empty_file.bin', 200, {'Connection': 'close'}, '') with webserver.install_http_handler(handler): assert gdal.VSIStatL('/vsis3/s3_fake_bucket3/empty_file.bin').size == 0 # Invalid seek handler = webserver.SequentialHandler() with webserver.install_http_handler(handler): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb') assert f is not None with gdaltest.error_handler(): ret = gdal.VSIFSeekL(f, 1, 0) assert ret != 0 gdal.VSIFCloseL(f) # Invalid read handler = webserver.SequentialHandler() with webserver.install_http_handler(handler): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb') assert f is not None with gdaltest.error_handler(): ret = gdal.VSIFReadL(1, 1, f) assert not ret gdal.VSIFCloseL(f) # Error case handler = webserver.SequentialHandler() handler.add('PUT', '/s3_fake_bucket3/empty_file_error.bin', 403) with webserver.install_http_handler(handler): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file_error.bin', 'wb') assert f is not None gdal.ErrorReset() with gdaltest.error_handler(): gdal.VSIFCloseL(f) assert gdal.GetLastErrorMsg() != '' # Nominal case gdal.NetworkStatsReset() with gdaltest.config_option('CPL_VSIL_NETWORK_STATS_ENABLED', 'YES'): with webserver.install_http_handler(webserver.SequentialHandler()): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/another_file.bin', 'wb') assert f is not None assert gdal.VSIFSeekL(f, gdal.VSIFTellL(f), 0) == 0 assert gdal.VSIFSeekL(f, 0, 1) == 0 assert gdal.VSIFSeekL(f, 0, 2) == 0 assert gdal.VSIFWriteL('foo', 1, 3, f) == 3 assert gdal.VSIFSeekL(f, gdal.VSIFTellL(f), 0) == 0 assert gdal.VSIFWriteL('bar', 1, 3, f) == 3 handler = webserver.SequentialHandler() def method(request): if request.headers['Content-Length'] != '6': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii')) content = request.rfile.read(6).decode('ascii') if content != 'foobar': sys.stderr.write('Did not get expected content: %s\n' % content) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/s3_fake_bucket3/another_file.bin', custom_method=method) gdal.ErrorReset() with webserver.install_http_handler(handler): gdal.VSIFCloseL(f) assert gdal.GetLastErrorMsg() == '' j = json.loads(gdal.NetworkStatsGetAsSerializedJSON()) #print(j) assert j == { "methods": { "PUT": { "count": 1, "uploaded_bytes": 6 } }, "handlers": { "vsis3": { "files": { "/vsis3/s3_fake_bucket3/another_file.bin": { "methods": { "PUT": { "count": 1, "uploaded_bytes": 6 } }, "actions": { "Write": { "methods": { "PUT": { "count": 1, "uploaded_bytes": 6 } } } } } }, "methods": { "PUT": { "count": 1, "uploaded_bytes": 6 } } } } } gdal.NetworkStatsReset() # Redirect case with webserver.install_http_handler(webserver.SequentialHandler()): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/redirect', 'wb') assert f is not None assert gdal.VSIFWriteL('foobar', 1, 6, f) == 6 handler = webserver.SequentialHandler() def method(request): request.protocol_version = 'HTTP/1.1' if request.headers['Authorization'].find('us-east-1') >= 0: request.send_response(400) response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) request.send_header('Content-type', 'application/xml') request.send_header('Transfer-Encoding', 'chunked') request.end_headers() request.wfile.write(response.encode('ascii')) elif request.headers['Authorization'].find('us-west-2') >= 0: if request.headers['Content-Length'] != '6': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii')) content = request.rfile.read(6).decode('ascii') if content != 'foobar': sys.stderr.write('Did not get expected content: %s\n' % content) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() else: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/s3_fake_bucket3/redirect', custom_method=method) handler.add('PUT', '/s3_fake_bucket3/redirect', custom_method=method) gdal.ErrorReset() with webserver.install_http_handler(handler): gdal.VSIFCloseL(f) assert gdal.GetLastErrorMsg() == '' ############################################################################### # Test simple PUT support with retry logic def test_vsis3_write_single_put_retry(): if gdaltest.webserver_port == 0: pytest.skip() with gdaltest.config_options({'GDAL_HTTP_MAX_RETRY': '2', 'GDAL_HTTP_RETRY_DELAY': '0.01'}): with webserver.install_http_handler(webserver.SequentialHandler()): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/put_with_retry.bin', 'wb') assert f is not None assert gdal.VSIFWriteL('foo', 1, 3, f) == 3 handler = webserver.SequentialHandler() def method(request): if request.headers['Content-Length'] != '3': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii')) content = request.rfile.read(3).decode('ascii') if content != 'foo': sys.stderr.write('Did not get expected content: %s\n' % content) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/s3_fake_bucket3/put_with_retry.bin', 502) handler.add('PUT', '/s3_fake_bucket3/put_with_retry.bin', custom_method=method) with gdaltest.error_handler(): with webserver.install_http_handler(handler): gdal.VSIFCloseL(f) ############################################################################### # Test simple DELETE support with a fake AWS server def test_vsis3_5(): if gdaltest.webserver_port == 0: pytest.skip() with webserver.install_http_handler(webserver.SequentialHandler()): with gdaltest.error_handler(): ret = gdal.Unlink('/vsis3/foo') assert ret != 0 handler = webserver.SequentialHandler() handler.add('GET', '/s3_delete_bucket/delete_file', 200, {'Connection': 'close'}, 'foo') with webserver.install_http_handler(handler): assert gdal.VSIStatL('/vsis3/s3_delete_bucket/delete_file').size == 3 handler = webserver.SequentialHandler() with webserver.install_http_handler(handler): assert gdal.VSIStatL('/vsis3/s3_delete_bucket/delete_file').size == 3 handler = webserver.SequentialHandler() handler.add('DELETE', '/s3_delete_bucket/delete_file', 204) with webserver.install_http_handler(handler): ret = gdal.Unlink('/vsis3/s3_delete_bucket/delete_file') assert ret == 0 handler = webserver.SequentialHandler() handler.add('GET', '/s3_delete_bucket/delete_file', 404, {'Connection': 'close'}) handler.add('GET', '/s3_delete_bucket/?delimiter=%2F&max-keys=100&prefix=delete_file%2F', 404, {'Connection': 'close'}) with webserver.install_http_handler(handler): assert gdal.VSIStatL('/vsis3/s3_delete_bucket/delete_file') is None handler = webserver.SequentialHandler() handler.add('GET', '/s3_delete_bucket/delete_file_error', 200) handler.add('DELETE', '/s3_delete_bucket/delete_file_error', 403) with webserver.install_http_handler(handler): with gdaltest.error_handler(): ret = gdal.Unlink('/vsis3/s3_delete_bucket/delete_file_error') assert ret != 0 handler = webserver.SequentialHandler() handler.add('GET', '/s3_delete_bucket/redirect', 200) def method(request): request.protocol_version = 'HTTP/1.1' if request.headers['Authorization'].find('us-east-1') >= 0: request.send_response(400) response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) request.send_header('Content-type', 'application/xml') request.send_header('Transfer-Encoding', 'chunked') request.end_headers() request.wfile.write(response.encode('ascii')) elif request.headers['Authorization'].find('us-west-2') >= 0: request.send_response(204) request.send_header('Content-Length', 0) request.end_headers() else: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() handler.add('DELETE', '/s3_delete_bucket/redirect', custom_method=method) handler.add('DELETE', '/s3_delete_bucket/redirect', custom_method=method) with webserver.install_http_handler(handler): ret = gdal.Unlink('/vsis3/s3_delete_bucket/redirect') assert ret == 0 ############################################################################### # Test DeleteObjects with a fake AWS server def test_vsis3_unlink_batch(): if gdaltest.webserver_port == 0: pytest.skip() def method(request): if request.headers['Content-MD5'] != 'Ze0X4LdlTwCsT+WpNxD9FA==': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(403) return content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii') if content != """<?xml version="1.0" encoding="UTF-8"?> <Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> <Object> <Key>foo</Key> </Object> <Object> <Key>bar/baz</Key> </Object> </Delete> """: sys.stderr.write('Did not get expected content: %s\n' % content) request.send_response(403) return request.protocol_version = 'HTTP/1.1' request.send_response(200) response = """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>foo</Key></Deleted><Deleted><Key>bar/baz</Key></Deleted></DeleteResult>""" request.send_header('Content-Length', len(response)) request.send_header('Connection', 'close') request.end_headers() request.wfile.write(response.encode('ascii')) handler = webserver.SequentialHandler() handler.add('POST', '/unlink_batch/?delete', custom_method=method) handler.add('POST', '/unlink_batch/?delete', 200, {}, """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>baw</Key></Deleted></DeleteResult>""") with gdaltest.config_option('CPL_VSIS3_UNLINK_BATCH_SIZE', '2'): with webserver.install_http_handler(handler): ret = gdal.UnlinkBatch(['/vsis3/unlink_batch/foo', '/vsis3/unlink_batch/bar/baz', '/vsis3/unlink_batch/baw']) assert ret handler = webserver.SequentialHandler() handler.add('POST', '/unlink_batch/?delete', 200, {}, """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Failed><Key>foo</Key></Failed></DeleteResult>""") with webserver.install_http_handler(handler): ret = gdal.UnlinkBatch(['/vsis3/unlink_batch/foo']) assert not ret ############################################################################### # Test RmdirRecursive() with a fake AWS server def test_vsis3_rmdir_recursive(): if gdaltest.webserver_port == 0: pytest.skip() handler = webserver.SequentialHandler() handler.add('GET', '/test_rmdir_recursive/?prefix=somedir%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>somedir/</Prefix> <Marker/> <Contents> <Key>somedir/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> <Contents> <Key>somedir/subdir/</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>0</Size> </Contents> <Contents> <Key>somedir/subdir/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>5</Size> </Contents> </ListBucketResult> """) def method(request): content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii') if content != """<?xml version="1.0" encoding="UTF-8"?> <Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> <Object> <Key>somedir/test.txt</Key> </Object> <Object> <Key>somedir/subdir/</Key> </Object> </Delete> """: sys.stderr.write('Did not get expected content: %s\n' % content) request.send_response(403) return request.protocol_version = 'HTTP/1.1' request.send_response(200) response = """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>somedir/test.txt</Key></Deleted><Deleted><Key>somedir/subdir/</Key></Deleted></DeleteResult>""" request.send_header('Content-Length', len(response)) request.send_header('Connection', 'close') request.end_headers() request.wfile.write(response.encode('ascii')) handler.add('POST', '/test_rmdir_recursive/?delete', custom_method=method) def method(request): content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii') if content != """<?xml version="1.0" encoding="UTF-8"?> <Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> <Object> <Key>somedir/subdir/test.txt</Key> </Object> <Object> <Key>somedir/</Key> </Object> </Delete> """: sys.stderr.write('Did not get expected content: %s\n' % content) request.send_response(403) return request.protocol_version = 'HTTP/1.1' request.send_response(200) response = """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>somedir/subdir/test.txt</Key></Deleted><Deleted><Key>somedir/</Key></Deleted></DeleteResult>""" request.send_header('Content-Length', len(response)) request.send_header('Connection', 'close') request.end_headers() request.wfile.write(response.encode('ascii')) handler.add('POST', '/test_rmdir_recursive/?delete', custom_method=method) with gdaltest.config_option('CPL_VSIS3_UNLINK_BATCH_SIZE', '2'): with webserver.install_http_handler(handler): assert gdal.RmdirRecursive('/vsis3/test_rmdir_recursive/somedir') == 0 ############################################################################### # Test multipart upload with a fake AWS server def test_vsis3_6(): if gdaltest.webserver_port == 0: pytest.skip() with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB with webserver.install_http_handler(webserver.SequentialHandler()): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket4/large_file.bin', 'wb') assert f is not None size = 1024 * 1024 + 1 big_buffer = 'a' * size handler = webserver.SequentialHandler() def method(request): request.protocol_version = 'HTTP/1.1' if request.headers['Authorization'].find('us-east-1') >= 0: request.send_response(400) response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>' response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response) request.send_header('Content-type', 'application/xml') request.send_header('Transfer-Encoding', 'chunked') request.end_headers() request.wfile.write(response.encode('ascii')) elif request.headers['Authorization'].find('us-west-2') >= 0: response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>' request.send_response(200) request.send_header('Content-type', 'application/xml') request.send_header('Content-Length', len(response)) request.end_headers() request.wfile.write(response.encode('ascii')) else: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', custom_method=method) handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', custom_method=method) def method(request): if request.headers['Content-Length'] != '1048576': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('ETag', '"first_etag"') request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=1&uploadId=my_id', custom_method=method) with webserver.install_http_handler(handler): ret = gdal.VSIFWriteL(big_buffer, 1, size, f) assert ret == size handler = webserver.SequentialHandler() def method(request): if request.headers['Content-Length'] != '1': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) return request.send_response(200) request.send_header('ETag', '"second_etag"') request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=2&uploadId=my_id', custom_method=method) def method(request): if request.headers['Content-Length'] != '186': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return content = request.rfile.read(186).decode('ascii') if content != """<CompleteMultipartUpload> <Part> <PartNumber>1</PartNumber><ETag>"first_etag"</ETag></Part> <Part> <PartNumber>2</PartNumber><ETag>"second_etag"</ETag></Part> </CompleteMultipartUpload> """: sys.stderr.write('Did not get expected content: %s\n' % content) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploadId=my_id', custom_method=method) gdal.ErrorReset() with webserver.install_http_handler(handler): gdal.VSIFCloseL(f) assert gdal.GetLastErrorMsg() == '' handler = webserver.SequentialHandler() handler.add('POST', '/s3_fake_bucket4/large_file_initiate_403_error.bin?uploads', 403) handler.add('POST', '/s3_fake_bucket4/large_file_initiate_empty_result.bin?uploads', 200) handler.add('POST', '/s3_fake_bucket4/large_file_initiate_invalid_xml_result.bin?uploads', 200, {}, 'foo') handler.add('POST', '/s3_fake_bucket4/large_file_initiate_no_uploadId.bin?uploads', 200, {}, '<foo/>') with webserver.install_http_handler(handler): for filename in ['/vsis3/s3_fake_bucket4/large_file_initiate_403_error.bin', '/vsis3/s3_fake_bucket4/large_file_initiate_empty_result.bin', '/vsis3/s3_fake_bucket4/large_file_initiate_invalid_xml_result.bin', '/vsis3/s3_fake_bucket4/large_file_initiate_no_uploadId.bin']: with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB f = gdal.VSIFOpenL(filename, 'wb') assert f is not None with gdaltest.error_handler(): ret = gdal.VSIFWriteL(big_buffer, 1, size, f) assert ret == 0 gdal.ErrorReset() gdal.VSIFCloseL(f) assert gdal.GetLastErrorMsg() == '' handler = webserver.SequentialHandler() handler.add('POST', '/s3_fake_bucket4/large_file_upload_part_403_error.bin?uploads', 200, {}, '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>') handler.add('PUT', '/s3_fake_bucket4/large_file_upload_part_403_error.bin?partNumber=1&uploadId=my_id', 403) handler.add('DELETE', '/s3_fake_bucket4/large_file_upload_part_403_error.bin?uploadId=my_id', 204) handler.add('POST', '/s3_fake_bucket4/large_file_upload_part_no_etag.bin?uploads', 200, {}, '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>') handler.add('PUT', '/s3_fake_bucket4/large_file_upload_part_no_etag.bin?partNumber=1&uploadId=my_id', 200) handler.add('DELETE', '/s3_fake_bucket4/large_file_upload_part_no_etag.bin?uploadId=my_id', 204) with webserver.install_http_handler(handler): for filename in ['/vsis3/s3_fake_bucket4/large_file_upload_part_403_error.bin', '/vsis3/s3_fake_bucket4/large_file_upload_part_no_etag.bin']: with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB f = gdal.VSIFOpenL(filename, 'wb') assert f is not None, filename with gdaltest.error_handler(): ret = gdal.VSIFWriteL(big_buffer, 1, size, f) assert ret == 0, filename gdal.ErrorReset() gdal.VSIFCloseL(f) assert gdal.GetLastErrorMsg() == '', filename # Simulate failure in AbortMultipart stage handler = webserver.SequentialHandler() handler.add('POST', '/s3_fake_bucket4/large_file_abortmultipart_403_error.bin?uploads', 200, {}, '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>') handler.add('PUT', '/s3_fake_bucket4/large_file_abortmultipart_403_error.bin?partNumber=1&uploadId=my_id', 403) handler.add('DELETE', '/s3_fake_bucket4/large_file_abortmultipart_403_error.bin?uploadId=my_id', 403) filename = '/vsis3/s3_fake_bucket4/large_file_abortmultipart_403_error.bin' with webserver.install_http_handler(handler): with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB f = gdal.VSIFOpenL(filename, 'wb') assert f is not None, filename with gdaltest.error_handler(): ret = gdal.VSIFWriteL(big_buffer, 1, size, f) assert ret == 0, filename gdal.ErrorReset() with gdaltest.error_handler(): gdal.VSIFCloseL(f) assert gdal.GetLastErrorMsg() != '', filename # Simulate failure in CompleteMultipartUpload stage handler = webserver.SequentialHandler() handler.add('POST', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?uploads', 200, {}, '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>') handler.add('PUT', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?partNumber=1&uploadId=my_id', 200, {'ETag': 'first_etag'}, '') handler.add('PUT', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?partNumber=2&uploadId=my_id', 200, {'ETag': 'second_etag'}, '') handler.add('POST', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?uploadId=my_id', 403) # handler.add('DELETE', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?uploadId=my_id', 204) filename = '/vsis3/s3_fake_bucket4/large_file_completemultipart_403_error.bin' with webserver.install_http_handler(handler): with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB f = gdal.VSIFOpenL(filename, 'wb') assert f is not None, filename ret = gdal.VSIFWriteL(big_buffer, 1, size, f) assert ret == size, filename gdal.ErrorReset() with gdaltest.error_handler(): gdal.VSIFCloseL(f) assert gdal.GetLastErrorMsg() != '', filename ############################################################################### # Test multipart upload with retry logic def test_vsis3_write_multipart_retry(): if gdaltest.webserver_port == 0: pytest.skip() with gdaltest.config_options({'GDAL_HTTP_MAX_RETRY': '2', 'GDAL_HTTP_RETRY_DELAY': '0.01'}): with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB with webserver.install_http_handler(webserver.SequentialHandler()): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket4/large_file.bin', 'wb') assert f is not None size = 1024 * 1024 + 1 big_buffer = 'a' * size handler = webserver.SequentialHandler() response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>' handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', 502) handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', 200, {'Content-type': 'application/xml', 'Content-Length': len(response), 'Connection': 'close'}, response) handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=1&uploadId=my_id', 502) handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=1&uploadId=my_id', 200, {'Content-Length': '0', 'ETag': '"first_etag"', 'Connection': 'close'}, {}) with gdaltest.error_handler(): with webserver.install_http_handler(handler): ret = gdal.VSIFWriteL(big_buffer, 1, size, f) assert ret == size handler = webserver.SequentialHandler() handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=2&uploadId=my_id', 200, {'Content-Length': '0', 'ETag': '"second_etag"', 'Connection': 'close'}, {}) handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploadId=my_id', 502) handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploadId=my_id', 200, {'Content-Length': '0', 'Connection': 'close'}, {}) with gdaltest.error_handler(): with webserver.install_http_handler(handler): gdal.VSIFCloseL(f) ############################################################################### # Test Mkdir() / Rmdir() def test_vsis3_7(): if gdaltest.webserver_port == 0: pytest.skip() handler = webserver.SequentialHandler() handler.add('GET', '/s3_bucket_test_mkdir/dir/', 404, {'Connection': 'close'}) handler.add('GET', '/s3_bucket_test_mkdir/?delimiter=%2F&max-keys=100&prefix=dir%2F', 404, {'Connection': 'close'}) handler.add('PUT', '/s3_bucket_test_mkdir/dir/', 200) with webserver.install_http_handler(handler): ret = gdal.Mkdir('/vsis3/s3_bucket_test_mkdir/dir', 0) assert ret == 0 assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/s3_bucket_test_mkdir/dir').mode) dir_content = gdal.ReadDir('/vsis3/s3_bucket_test_mkdir/dir') assert dir_content == ['.'] # Try creating already existing directory handler = webserver.SequentialHandler() handler.add('GET', '/s3_bucket_test_mkdir/dir/', 416, {'Connection': 'close'}) with webserver.install_http_handler(handler): ret = gdal.Mkdir('/vsis3/s3_bucket_test_mkdir/dir', 0) assert ret != 0 handler = webserver.SequentialHandler() handler.add('DELETE', '/s3_bucket_test_mkdir/dir/', 204) with webserver.install_http_handler(handler): ret = gdal.Rmdir('/vsis3/s3_bucket_test_mkdir/dir') assert ret == 0 # Try deleting already deleted directory handler = webserver.SequentialHandler() handler.add('GET', '/s3_bucket_test_mkdir/dir/', 404) handler.add('GET', '/s3_bucket_test_mkdir/?delimiter=%2F&max-keys=100&prefix=dir%2F', 404, {'Connection': 'close'}) with webserver.install_http_handler(handler): ret = gdal.Rmdir('/vsis3/s3_bucket_test_mkdir/dir') assert ret != 0 # Try deleting non-empty directory handler = webserver.SequentialHandler() handler.add('GET', '/s3_bucket_test_mkdir/dir_nonempty/', 416) handler.add('GET', '/s3_bucket_test_mkdir/?delimiter=%2F&max-keys=100&prefix=dir_nonempty%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>dir_nonempty/</Prefix> <Contents> <Key>dir_nonempty/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): ret = gdal.Rmdir('/vsis3/s3_bucket_test_mkdir/dir_nonempty') assert ret != 0 # Try stat'ing a directory not ending with slash handler = webserver.SequentialHandler() handler.add('GET', '/s3_bucket_test_dir_stat/test_dir_stat', 400) handler.add('GET', '/s3_bucket_test_dir_stat/?delimiter=%2F&max-keys=100&prefix=test_dir_stat%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>test_dir_stat/</Prefix> <Contents> <Key>test_dir_stat/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/s3_bucket_test_dir_stat/test_dir_stat').mode) # Try ReadDi'ing a directory not ending with slash handler = webserver.SequentialHandler() handler.add('GET', '/s3_bucket_test_readdir/?delimiter=%2F&prefix=test_dirread%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>test_dirread/</Prefix> <Contents> <Key>test_dirread/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): assert gdal.ReadDir('/vsis3/s3_bucket_test_readdir/test_dirread') is not None # Try stat'ing a directory ending with slash handler = webserver.SequentialHandler() handler.add('GET', '/s3_bucket_test_dir_stat_2/test_dir_stat/', 400) handler.add('GET', '/s3_bucket_test_dir_stat_2/?delimiter=%2F&max-keys=100&prefix=test_dir_stat%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>test_dir_stat/</Prefix> <Contents> <Key>test_dir_stat/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/s3_bucket_test_dir_stat_2/test_dir_stat/').mode) # Try ReadDi'ing a directory ending with slash handler = webserver.SequentialHandler() handler.add('GET', '/s3_bucket_test_readdir2/?delimiter=%2F&prefix=test_dirread%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>test_dirread/</Prefix> <Contents> <Key>test_dirread/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): assert gdal.ReadDir('/vsis3/s3_bucket_test_readdir2/test_dirread') is not None ############################################################################### # Test handling of file and directory with same name def test_vsis3_8(): if gdaltest.webserver_port == 0: pytest.skip() handler = webserver.SequentialHandler() handler.add('GET', '/vsis3_8/?delimiter=%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix></Prefix> <Contents> <Key>test</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>40</Size> </Contents> <CommonPrefixes> <Prefix>test/</Prefix> </CommonPrefixes> </ListBucketResult> """) with webserver.install_http_handler(handler): listdir = gdal.ReadDir('/vsis3/vsis3_8', 0) assert listdir == ['test', 'test/'] handler = webserver.SequentialHandler() with webserver.install_http_handler(handler): assert not stat.S_ISDIR(gdal.VSIStatL('/vsis3/vsis3_8/test').mode) handler = webserver.SequentialHandler() with webserver.install_http_handler(handler): assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/vsis3_8/test/').mode) ############################################################################### # Test vsisync() with SYNC_STRATEGY=ETAG def test_vsis3_sync_etag(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() options = ['SYNC_STRATEGY=ETAG'] with gdaltest.error_handler(): handler = webserver.SequentialHandler() with webserver.install_http_handler(handler): assert not gdal.Sync('/i_do/not/exist', '/vsis3/', options=options) with gdaltest.error_handler(): handler = webserver.SequentialHandler() handler.add('GET', '/do_not/exist', 404) handler.add('GET', '/do_not/?delimiter=%2F&max-keys=100&prefix=exist%2F', 404) handler.add('PUT', '/do_not/exist', 404) with webserver.install_http_handler(handler): assert not gdal.Sync('vsifile.py', '/vsis3/do_not/exist', options=options) handler = webserver.SequentialHandler() handler.add('GET', '/out/', 200) handler.add('GET', '/out/testsync.txt', 404) handler.add('GET', '/out/?delimiter=%2F&max-keys=100&prefix=testsync.txt%2F', 404) def method(request): if request.headers['Content-Length'] != '3': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii')) content = request.rfile.read(3).decode('ascii') if content != 'foo': sys.stderr.write('Did not get expected content: %s\n' % content) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('Content-Length', 0) request.send_header('ETag', '"acbd18db4cc2f85cedef654fccc4a4d8"') request.end_headers() handler.add('PUT', '/out/testsync.txt', custom_method=method) gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'foo') def cbk(pct, _, tab): assert pct > tab[0] tab[0] = pct return True tab = [ 0 ] with webserver.install_http_handler(handler): assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out', options=options, callback=cbk, callback_data=tab) assert tab[0] == 1.0 # Re-try with cached ETag. Should generate no network access handler = webserver.SequentialHandler() with webserver.install_http_handler(handler): assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out', options=options) assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out/testsync.txt', options=options) gdal.VSICurlClearCache() # Other direction: S3 to /vsimem handler = webserver.SequentialHandler() handler.add('GET', '/out/testsync.txt', 206, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3', 'ETag' : '"acbd18db4cc2f85cedef654fccc4a4d8"' }, "foo") with webserver.install_http_handler(handler): assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options) # Shouldn't do any copy, but hard to verify with webserver.install_http_handler(webserver.SequentialHandler()): assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options) assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/testsync.txt', options=options) # Modify target file, and redo synchronization gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'bar') handler = webserver.SequentialHandler() handler.add('GET', '/out/testsync.txt', 200, { 'Content-Length' : '3', 'ETag' : '"acbd18db4cc2f85cedef654fccc4a4d8"' }, "foo") with webserver.install_http_handler(handler): assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options) f = gdal.VSIFOpenL('/vsimem/testsync.txt', 'rb') data = gdal.VSIFReadL(1, 3, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' # /vsimem to S3, but after cleaning the cache gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/out/', 200) handler.add('GET', '/out/testsync.txt', 206, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3', 'ETag' : '"acbd18db4cc2f85cedef654fccc4a4d8"' }, "foo") with webserver.install_http_handler(handler): assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out', options=options) gdal.Unlink('/vsimem/testsync.txt') # Directory copying gdal.VSICurlClearCache() gdal.Mkdir('/vsimem/subdir', 0) gdal.FileFromMemBuffer('/vsimem/subdir/testsync.txt', 'foo') handler = webserver.SequentialHandler() handler.add('GET', '/out/', 200, {}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix/> <Marker/> <IsTruncated>false</IsTruncated> <Contents> <Key>testsync.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>3</Size> <ETag>"acbd18db4cc2f85cedef654fccc4a4d8"</ETag> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): assert gdal.Sync('/vsimem/subdir/', '/vsis3/out', options=options) gdal.RmdirRecursive('/vsimem/subdir') ############################################################################### # Test vsisync() with SYNC_STRATEGY=TIMESTAMP def test_vsis3_sync_timestamp(): if gdaltest.webserver_port == 0: pytest.skip() options = ['SYNC_STRATEGY=TIMESTAMP'] gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'foo') # S3 to local: S3 file is older -> download gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/out/testsync.txt', 206, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3', 'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo") handler.add('GET', '/out/testsync.txt', 200, { 'Content-Length' : '3', 'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo") with webserver.install_http_handler(handler): assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options) # S3 to local: S3 file is newer -> do nothing gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/out/testsync.txt', 206, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3', 'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo") with webserver.install_http_handler(handler): assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options) # Local to S3: S3 file is older -> upload gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/out/testsync.txt', 206, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3', 'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo") handler.add('PUT', '/out/testsync.txt', 200) with webserver.install_http_handler(handler): assert gdal.Sync( '/vsimem/testsync.txt', '/vsis3/out/testsync.txt', options=options) # Local to S3: S3 file is newer -> do nothgin gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/out/testsync.txt', 206, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3', 'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo") with webserver.install_http_handler(handler): assert gdal.Sync( '/vsimem/testsync.txt', '/vsis3/out/testsync.txt', options=options) gdal.Unlink('/vsimem/testsync.txt') ############################################################################### # Test vsisync() with SYNC_STRATEGY=OVERWRITE def test_vsis3_sync_overwrite(): if gdaltest.webserver_port == 0: pytest.skip() options = ['SYNC_STRATEGY=OVERWRITE'] gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'foo') # S3 to local: S3 file is newer gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/out/testsync.txt', 206, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3', 'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo") handler.add('GET', '/out/testsync.txt', 200, { 'Content-Length' : '3', 'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo") with webserver.install_http_handler(handler): assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options) # Local to S3: S3 file is newer gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/out/testsync.txt', 206, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3', 'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo") handler.add('PUT', '/out/testsync.txt', 200) with webserver.install_http_handler(handler): assert gdal.Sync( '/vsimem/testsync.txt', '/vsis3/out/testsync.txt', options=options) gdal.Unlink('/vsimem/testsync.txt') ############################################################################### # Test vsisync() with source and target in /vsis3 def test_vsis3_sync_source_target_in_vsis3(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/in/testsync.txt', 200, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3', 'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo") handler.add('GET', '/out/', 200) handler.add('GET', '/out/testsync.txt', 200, { 'Content-Length' : '3', 'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo") def method(request): if request.headers['Content-Length'] != '0': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) return if request.headers['x-amz-copy-source'] != '/in/testsync.txt': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) return request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/out/testsync.txt', custom_method=method) with webserver.install_http_handler(handler): assert gdal.Sync( '/vsis3/in/testsync.txt', '/vsis3/out/') ############################################################################### # Test rename def test_vsis3_fake_rename(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/test/source.txt', 206, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3' }, "foo") handler.add('GET', '/test/target.txt', 404) handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=target.txt%2F', 200) def method(request): if request.headers['Content-Length'] != '0': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) return if request.headers['x-amz-copy-source'] != '/test/source.txt': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) return request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/test/target.txt', custom_method=method) handler.add('DELETE', '/test/source.txt', 204) with webserver.install_http_handler(handler): assert gdal.Rename( '/vsis3/test/source.txt', '/vsis3/test/target.txt') == 0 ############################################################################### # Test rename def test_vsis3_fake_rename_dir(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/test/source_dir', 404) handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=source_dir%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix>source_dir/</Prefix> <Contents> <Key>source_dir/test.txt</Key> <LastModified>1970-01-01T00:00:01.000Z</LastModified> <Size>3</Size> </Contents> </ListBucketResult> """) handler.add('GET', '/test/target_dir/', 404) handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=target_dir%2F', 404) def method(request): if request.headers['Content-Length'] != '0': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii')) request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/test/target_dir/', custom_method=method) def method(request): if request.headers['Content-Length'] != '0': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) return if request.headers['x-amz-copy-source'] != '/test/source_dir/test.txt': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) return request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/test/target_dir/test.txt', custom_method=method) handler.add('DELETE', '/test/source_dir/test.txt', 204) handler.add('GET', '/test/source_dir/', 404) handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=source_dir%2F', 404) with webserver.install_http_handler(handler): assert gdal.Rename( '/vsis3/test/source_dir', '/vsis3/test/target_dir') == 0 ############################################################################### # Test rename onto existing dir is not allowed def test_vsis3_fake_rename_on_existing_dir(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/test/source.txt', 206, { 'Content-Length' : '3', 'Content-Range': 'bytes 0-2/3' }, "foo") handler.add('GET', '/test_target_dir/', 200) with webserver.install_http_handler(handler): assert gdal.Rename( '/vsis3/test/source.txt', '/vsis3/test_target_dir') == -1 ############################################################################### # Test Sync() and multithreaded download and CHUNK_SIZE def test_vsis3_fake_sync_multithreaded_upload_chunk_size(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() def cbk(pct, _, tab): assert pct >= tab[0] tab[0] = pct return True gdal.Mkdir('/vsimem/test', 0) gdal.FileFromMemBuffer('/vsimem/test/foo', 'foo\n') tab = [ -1 ] handler = webserver.SequentialHandler() handler.add('GET', '/test_bucket/?prefix=test%2F', 200) handler.add('GET', '/test_bucket/test', 404) handler.add('GET', '/test_bucket/?delimiter=%2F&max-keys=100&prefix=test%2F', 200) handler.add('GET', '/test_bucket/', 200) handler.add('GET', '/test_bucket/test/', 404) handler.add('PUT', '/test_bucket/test/', 200) def method(request): request.protocol_version = 'HTTP/1.1' response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>' request.send_response(200) request.send_header('Content-type', 'application/xml') request.send_header('Content-Length', len(response)) request.end_headers() request.wfile.write(response.encode('ascii')) handler.add('POST', '/test_bucket/test/foo?uploads', custom_method=method) def method(request): if request.headers['Content-Length'] != '3': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('ETag', '"first_etag"') request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/test_bucket/test/foo?partNumber=1&uploadId=my_id', custom_method=method) def method(request): if request.headers['Content-Length'] != '1': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('ETag', '"second_etag"') request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/test_bucket/test/foo?partNumber=2&uploadId=my_id', custom_method=method) def method(request): if request.headers['Content-Length'] != '186': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return content = request.rfile.read(186).decode('ascii') if content != """<CompleteMultipartUpload> <Part> <PartNumber>1</PartNumber><ETag>"first_etag"</ETag></Part> <Part> <PartNumber>2</PartNumber><ETag>"second_etag"</ETag></Part> </CompleteMultipartUpload> """: sys.stderr.write('Did not get expected content: %s\n' % content) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() handler.add('POST', '/test_bucket/test/foo?uploadId=my_id', custom_method=method) with gdaltest.config_option('VSIS3_SIMULATE_THREADING', 'YES'): with webserver.install_http_handler(handler): assert gdal.Sync('/vsimem/test', '/vsis3/test_bucket', options=['NUM_THREADS=1', 'CHUNK_SIZE=3'], callback=cbk, callback_data=tab) assert tab[0] == 1.0 gdal.RmdirRecursive('/vsimem/test') def test_vsis3_fake_sync_multithreaded_upload_chunk_size_failure(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() gdal.Mkdir('/vsimem/test', 0) gdal.FileFromMemBuffer('/vsimem/test/foo', 'foo\n') handler = webserver.SequentialHandler() handler.add('GET', '/test_bucket/?prefix=test%2F', 200) handler.add('GET', '/test_bucket/test', 404) handler.add('GET', '/test_bucket/?delimiter=%2F&max-keys=100&prefix=test%2F', 200) handler.add('GET', '/test_bucket/', 200) handler.add('GET', '/test_bucket/test/', 404) handler.add('PUT', '/test_bucket/test/', 200) def method(request): request.protocol_version = 'HTTP/1.1' response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>' request.send_response(200) request.send_header('Content-type', 'application/xml') request.send_header('Content-Length', len(response)) request.end_headers() request.wfile.write(response.encode('ascii')) handler.add('POST', '/test_bucket/test/foo?uploads', custom_method=method) def method(request): if request.headers['Content-Length'] != '3': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('ETag', '"first_etag"') request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/test_bucket/test/foo?partNumber=1&uploadId=my_id', 400) handler.add('DELETE', '/test_bucket/test/foo?uploadId=my_id', 204) with gdaltest.config_options({'VSIS3_SIMULATE_THREADING': 'YES', 'VSIS3_SYNC_MULTITHREADING': 'NO'}): with webserver.install_http_handler(handler): with gdaltest.error_handler(): assert not gdal.Sync('/vsimem/test', '/vsis3/test_bucket', options=['NUM_THREADS=1', 'CHUNK_SIZE=3']) gdal.RmdirRecursive('/vsimem/test') ############################################################################### # Test reading/writing metadata def test_vsis3_metadata(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() # Read HEADERS domain handler = webserver.SequentialHandler() handler.add('GET', '/test_metadata/foo.txt', 200, {'foo': 'bar'}) with webserver.install_http_handler(handler): md = gdal.GetFileMetadata('/vsis3/test_metadata/foo.txt', 'HEADERS') assert 'foo' in md and md['foo'] == 'bar' # Read TAGS domain handler = webserver.SequentialHandler() handler.add('GET', '/test_metadata/foo.txt?tagging', 200, {}, """<Tagging><TagSet><Tag><Key>foo</Key><Value>bar</Value></Tag></TagSet></Tagging>""") with webserver.install_http_handler(handler): md = gdal.GetFileMetadata('/vsis3/test_metadata/foo.txt', 'TAGS') assert 'foo' in md and md['foo'] == 'bar' # Write HEADERS domain handler = webserver.SequentialHandler() def method(request): if request.headers['foo'] != 'bar': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.end_headers() handler.add('PUT', '/test_metadata/foo.txt', custom_method=method) with webserver.install_http_handler(handler): assert gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {'foo': 'bar'}, 'HEADERS') # Write TAGS domain handler = webserver.SequentialHandler() def method(request): request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii')) content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii') if content != """<?xml version="1.0" encoding="UTF-8"?> <Tagging xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> <TagSet> <Tag> <Key>foo</Key> <Value>bar</Value> </Tag> </TagSet> </Tagging> """: sys.stderr.write('Did not get expected content: %s\n' % content) request.send_response(400) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(200) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/test_metadata/foo.txt?tagging', custom_method=method) with webserver.install_http_handler(handler): assert gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {'foo': 'bar'}, 'TAGS') # Write TAGS domain (wiping tags) handler = webserver.SequentialHandler() handler.add('DELETE', '/test_metadata/foo.txt?tagging', 204) with webserver.install_http_handler(handler): assert gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {}, 'TAGS') # Error case with gdaltest.error_handler(): assert gdal.GetFileMetadata('/vsis3/test_metadata/foo.txt', 'UNSUPPORTED') == {} # Error case with gdaltest.error_handler(): assert not gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {}, 'UNSUPPORTED') ############################################################################### # Test that we take into account directory listing to avoid useless # requests def test_vsis3_no_useless_requests(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/no_useless_requests/?delimiter=%2F', 200, {'Content-type': 'application/xml'}, """<?xml version="1.0" encoding="UTF-8"?> <ListBucketResult> <Prefix></Prefix> <Contents> </Contents> </ListBucketResult> """) with webserver.install_http_handler(handler): assert gdal.VSIFOpenL('/vsis3/no_useless_requests/foo.txt', 'rb') is None assert gdal.VSIFOpenL('/vsis3/no_useless_requests/bar.txt', 'rb') is None assert gdal.VSIStatL('/vsis3/no_useless_requests/baz.txt') is None ############################################################################### # Test w+ access def test_vsis3_random_write(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() with gdaltest.error_handler(): assert gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b') is None with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'): f = gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b') assert f assert gdal.VSIFWriteL('foo', 3, 1, f) == 1 assert gdal.VSIFSeekL(f, 0, 0) == 0 assert gdal.VSIFReadL(3, 1, f).decode('ascii') == 'foo' assert gdal.VSIFEofL(f) == 0 assert gdal.VSIFTellL(f) == 3 handler = webserver.SequentialHandler() handler.add('PUT', '/random_write/test.bin', 200, {}, expected_body=b'foo') with webserver.install_http_handler(handler): assert gdal.VSIFCloseL(f) == 0 ############################################################################### # Test w+ access def test_vsis3_random_write_failure_1(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'): f = gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b') assert f handler = webserver.SequentialHandler() handler.add('PUT', '/random_write/test.bin', 400, {}) with webserver.install_http_handler(handler): with gdaltest.error_handler(): assert gdal.VSIFCloseL(f) != 0 ############################################################################### # Test w+ access def test_vsis3_random_write_failure_2(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'): with gdaltest.config_option('VSIS3_CHUNK_SIZE_BYTES', '1'): f = gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b') assert f assert gdal.VSIFWriteL('foo', 3, 1, f) == 1 handler = webserver.SequentialHandler() handler.add('POST', '/random_write/test.bin?uploads', 400, {}) with webserver.install_http_handler(handler): with gdaltest.error_handler(): assert gdal.VSIFCloseL(f) != 0 ############################################################################### # Test w+ access def test_vsis3_random_write_gtiff_create_copy(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/random_write/test.tif', 404, {}) handler.add('GET', '/random_write/?delimiter=%2F&max-keys=100&prefix=test.tif%2F', 404, {}) handler.add('GET', '/random_write/?delimiter=%2F', 404, {}) src_ds = gdal.Open('data/byte.tif') with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'): with webserver.install_http_handler(handler): ds = gdal.GetDriverByName('GTiff').CreateCopy('/vsis3/random_write/test.tif', src_ds) assert ds is not None handler = webserver.SequentialHandler() handler.add('PUT', '/random_write/test.tif', 200, {}) with webserver.install_http_handler(handler): ds = None ############################################################################### # Read credentials from simulated ~/.aws/credentials def test_vsis3_read_credentials_file(): if gdaltest.webserver_port == 0: pytest.skip() gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '') gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials') gdal.VSICurlClearCache() gdal.FileFromMemBuffer('/vsimem/aws_credentials', """ [unrelated] aws_access_key_id = foo aws_secret_access_key = bar [default] aws_access_key_id = AWS_ACCESS_KEY_ID aws_secret_access_key = AWS_SECRET_ACCESS_KEY [unrelated] aws_access_key_id = foo aws_secret_access_key = bar """) handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '') gdal.Unlink('/vsimem/aws_credentials') ############################################################################### # Read credentials from simulated ~/.aws/config def test_vsis3_read_config_file(): if gdaltest.webserver_port == 0: pytest.skip() gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '') gdal.SetConfigOption('AWS_CONFIG_FILE', '/vsimem/aws_config') gdal.VSICurlClearCache() gdal.FileFromMemBuffer('/vsimem/aws_config', """ [unrelated] aws_access_key_id = foo aws_secret_access_key = bar [default] aws_access_key_id = AWS_ACCESS_KEY_ID aws_secret_access_key = AWS_SECRET_ACCESS_KEY region = us-east-1 [unrelated] aws_access_key_id = foo aws_secret_access_key = bar """) handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' gdal.SetConfigOption('AWS_CONFIG_FILE', '') gdal.Unlink('/vsimem/aws_config') ############################################################################### # Read credentials from simulated ~/.aws/credentials and ~/.aws/config def test_vsis3_read_credentials_config_file(): if gdaltest.webserver_port == 0: pytest.skip() gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '') gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials') gdal.SetConfigOption('AWS_CONFIG_FILE', '/vsimem/aws_config') gdal.VSICurlClearCache() gdal.FileFromMemBuffer('/vsimem/aws_credentials', """ [unrelated] aws_access_key_id = foo aws_secret_access_key = bar [default] aws_access_key_id = AWS_ACCESS_KEY_ID aws_secret_access_key = AWS_SECRET_ACCESS_KEY [unrelated] aws_access_key_id = foo aws_secret_access_key = bar """) gdal.FileFromMemBuffer('/vsimem/aws_config', """ [unrelated] aws_access_key_id = foo aws_secret_access_key = bar [default] aws_access_key_id = AWS_ACCESS_KEY_ID aws_secret_access_key = AWS_SECRET_ACCESS_KEY region = us-east-1 [unrelated] aws_access_key_id = foo aws_secret_access_key = bar """) handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '') gdal.Unlink('/vsimem/aws_credentials') gdal.SetConfigOption('AWS_CONFIG_FILE', '') gdal.Unlink('/vsimem/aws_config') ############################################################################### # Read credentials from simulated ~/.aws/credentials and ~/.aws/config with # a non default profile def test_vsis3_read_credentials_config_file_non_default_profile(tmpdir): if gdaltest.webserver_port == 0: pytest.skip() gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '') gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', None) gdal.SetConfigOption('AWS_CONFIG_FILE', None) gdal.SetConfigOption('AWS_PROFILE', 'myprofile') os_aws = tmpdir.mkdir(".aws") gdal.VSICurlClearCache() os_aws.join('credentials').write(""" [unrelated] aws_access_key_id = foo aws_secret_access_key = bar [myprofile] aws_access_key_id = AWS_ACCESS_KEY_ID aws_secret_access_key = AWS_SECRET_ACCESS_KEY [default] aws_access_key_id = foo aws_secret_access_key = bar """) os_aws.join('config').write(""" [unrelated] aws_access_key_id = foo aws_secret_access_key = bar [profile myprofile] region = us-east-1 [default] aws_access_key_id = foo aws_secret_access_key = bar """) handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method) with webserver.install_http_handler(handler): with gdaltest.config_option( 'USERPROFILE' if sys.platform == 'win32' else 'HOME', str(tmpdir) ): f = open_for_read('/vsis3/s3_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' gdal.SetConfigOption('AWS_PROFILE', '') ############################################################################### # Read credentials from simulated ~/.aws/credentials and ~/.aws/config def test_vsis3_read_credentials_config_file_inconsistent(): if gdaltest.webserver_port == 0: pytest.skip() gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '') gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials') gdal.SetConfigOption('AWS_CONFIG_FILE', '/vsimem/aws_config') gdal.VSICurlClearCache() gdal.FileFromMemBuffer('/vsimem/aws_credentials', """ [unrelated] aws_access_key_id = foo aws_secret_access_key = bar [default] aws_access_key_id = AWS_ACCESS_KEY_ID aws_secret_access_key = AWS_SECRET_ACCESS_KEY [unrelated] aws_access_key_id = foo aws_secret_access_key = bar """) gdal.FileFromMemBuffer('/vsimem/aws_config', """ [unrelated] aws_access_key_id = foo aws_secret_access_key = bar [default] aws_access_key_id = AWS_ACCESS_KEY_ID_inconsistent aws_secret_access_key = AWS_SECRET_ACCESS_KEY_inconsistent region = us-east-1 [unrelated] aws_access_key_id = foo aws_secret_access_key = bar """) gdal.ErrorReset() handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method) with webserver.install_http_handler(handler): with gdaltest.error_handler(): f = open_for_read('/vsis3/s3_fake_bucket/resource') assert f is not None assert gdal.GetLastErrorMsg() != '' data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '') gdal.Unlink('/vsimem/aws_credentials') gdal.SetConfigOption('AWS_CONFIG_FILE', '') gdal.Unlink('/vsimem/aws_config') ############################################################################### # Read credentials from simulated EC2 instance def test_vsis3_read_credentials_ec2_imdsv2(): if gdaltest.webserver_port == 0: pytest.skip() if sys.platform not in ('linux', 'linux2', 'win32'): pytest.skip() gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '') gdal.SetConfigOption('AWS_CONFIG_FILE', '') gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '') gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', 'http://localhost:%d' % gdaltest.webserver_port) # Disable hypervisor related check to test if we are really on EC2 gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', 'NO') gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('PUT', '/latest/api/token', 200, {}, 'mytoken', expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'}) handler.add('GET', '/latest/meta-data/iam/security-credentials/', 200, {}, 'myprofile', expected_headers={'X-aws-ec2-metadata-token': 'mytoken'}) handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {}, """{ "AccessKeyId": "AWS_ACCESS_KEY_ID", "SecretAccessKey": "AWS_SECRET_ACCESS_KEY", "Expiration": "3000-01-01T00:00:00Z" }""", expected_headers={'X-aws-ec2-metadata-token': 'mytoken'}) handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' # Set a fake URL to check that credentials re-use works gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '') handler = webserver.SequentialHandler() handler.add('GET', '/s3_fake_bucket/bar', 200, {}, 'bar') with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket/bar') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'bar' gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '') gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', None) ############################################################################### # Read credentials from simulated EC2 instance that only supports IMDSv1 def test_vsis3_read_credentials_ec2_imdsv1(): if gdaltest.webserver_port == 0: pytest.skip() if sys.platform not in ('linux', 'linux2', 'win32'): pytest.skip() gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '') gdal.SetConfigOption('AWS_CONFIG_FILE', '') gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '') gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', 'http://localhost:%d' % gdaltest.webserver_port) # Disable hypervisor related check to test if we are really on EC2 gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', 'NO') gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('PUT', '/latest/api/token', 403, {}, expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'}) handler.add('GET', '/latest/meta-data/iam/security-credentials/', 200, {}, 'myprofile', unexpected_headers=['X-aws-ec2-metadata-token']) handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {}, """{ "AccessKeyId": "AWS_ACCESS_KEY_ID", "SecretAccessKey": "AWS_SECRET_ACCESS_KEY", "Expiration": "3000-01-01T00:00:00Z" }""", unexpected_headers=['X-aws-ec2-metadata-token']) handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '') gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', None) ############################################################################### # Read credentials from simulated EC2 instance with expiration of the # cached credentials def test_vsis3_read_credentials_ec2_expiration(): if gdaltest.webserver_port == 0: pytest.skip() if sys.platform not in ('linux', 'linux2', 'win32'): pytest.skip() gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '') gdal.SetConfigOption('AWS_CONFIG_FILE', '') gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '') gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', 'http://localhost:%d' % gdaltest.webserver_port) # Disable hypervisor related check to test if we are really on EC2 gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', 'NO') gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('PUT', '/latest/api/token', 200, {}, 'mytoken', expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'}) handler.add('GET', '/latest/meta-data/iam/security-credentials/', 200, {}, 'myprofile', expected_headers={'X-aws-ec2-metadata-token': 'mytoken'}) handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {}, """{ "AccessKeyId": "AWS_ACCESS_KEY_ID", "SecretAccessKey": "AWS_SECRET_ACCESS_KEY", "Expiration": "1970-01-01T00:00:00Z" }""", expected_headers={'X-aws-ec2-metadata-token': 'mytoken'}) handler.add('PUT', '/latest/api/token', 200, {}, 'mytoken2', expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'}) handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {}, """{ "AccessKeyId": "AWS_ACCESS_KEY_ID", "SecretAccessKey": "AWS_SECRET_ACCESS_KEY", "Expiration": "1970-01-01T00:00:00Z" }""", expected_headers={'X-aws-ec2-metadata-token': 'mytoken2'}) handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method) with webserver.install_http_handler(handler): f = open_for_read('/vsis3/s3_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' # Set a fake URL to demonstrate we try to re-fetch credentials gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', 'http://localhost:%d/invalid' % gdaltest.webserver_port) handler = webserver.SequentialHandler() handler.add('PUT', '/invalid/latest/api/token', 404) handler.add('GET', '/invalid/latest/meta-data/iam/security-credentials/myprofile', 404) with webserver.install_http_handler(handler): with gdaltest.error_handler(): f = open_for_read('/vsis3/s3_fake_bucket/bar') assert f is None gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '') gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', None) ############################################################################### def test_vsis3_stop_webserver(): if gdaltest.webserver_port == 0: pytest.skip() # Clearcache needed to close all connections, since the Python server # can only handle one connection at a time gdal.VSICurlClearCache() webserver.server_stop(gdaltest.webserver_process, gdaltest.webserver_port) ############################################################################### # Nominal cases (require valid credentials) def test_vsis3_extra_1(): if not gdaltest.built_against_curl(): pytest.skip() credentials_filename = gdal.GetConfigOption('HOME', gdal.GetConfigOption('USERPROFILE', '')) + '/.aws/credentials' # Either a bucket name or bucket/filename s3_resource = gdal.GetConfigOption('S3_RESOURCE') if not os.path.exists(credentials_filename): if gdal.GetConfigOption('AWS_SECRET_ACCESS_KEY') is None: pytest.skip('Missing AWS_SECRET_ACCESS_KEY') elif gdal.GetConfigOption('AWS_ACCESS_KEY_ID') is None: pytest.skip('Missing AWS_ACCESS_KEY_ID') if s3_resource is None: pytest.skip('Missing S3_RESOURCE') if '/' not in s3_resource: path = '/vsis3/' + s3_resource statres = gdal.VSIStatL(path) assert statres is not None and stat.S_ISDIR(statres.mode), \ ('%s is not a valid bucket' % path) readdir = gdal.ReadDir(path) assert readdir is not None, 'ReadDir() should not return empty list' for filename in readdir: if filename != '.': subpath = path + '/' + filename assert gdal.VSIStatL(subpath) is not None, \ ('Stat(%s) should not return an error' % subpath) unique_id = 'vsis3_test' subpath = path + '/' + unique_id ret = gdal.Mkdir(subpath, 0) assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath) readdir = gdal.ReadDir(path) assert unique_id in readdir, \ ('ReadDir(%s) should contain %s' % (path, unique_id)) ret = gdal.Mkdir(subpath, 0) assert ret != 0, ('Mkdir(%s) repeated should return an error' % subpath) ret = gdal.Rmdir(subpath) assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath) readdir = gdal.ReadDir(path) assert unique_id not in readdir, \ ('ReadDir(%s) should not contain %s' % (path, unique_id)) ret = gdal.Rmdir(subpath) assert ret != 0, ('Rmdir(%s) repeated should return an error' % subpath) ret = gdal.Mkdir(subpath, 0) assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath) f = gdal.VSIFOpenL(subpath + '/test.txt', 'wb') assert f is not None gdal.VSIFWriteL('hello', 1, 5, f) gdal.VSIFCloseL(f) ret = gdal.Rmdir(subpath) assert ret != 0, \ ('Rmdir(%s) on non empty directory should return an error' % subpath) f = gdal.VSIFOpenL(subpath + '/test.txt', 'rb') assert f is not None data = gdal.VSIFReadL(1, 5, f).decode('utf-8') assert data == 'hello' gdal.VSIFCloseL(f) assert gdal.Rename(subpath + '/test.txt', subpath + '/test2.txt') == 0 f = gdal.VSIFOpenL(subpath + '/test2.txt', 'rb') assert f is not None data = gdal.VSIFReadL(1, 5, f).decode('utf-8') assert data == 'hello' gdal.VSIFCloseL(f) ret = gdal.Unlink(subpath + '/test2.txt') assert ret >= 0, \ ('Unlink(%s) should not return an error' % (subpath + '/test2.txt')) ret = gdal.Rmdir(subpath) assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath) return f = open_for_read('/vsis3/' + s3_resource) assert f is not None, ('cannot open %s' % ('/vsis3/' + s3_resource)) ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1 # Same with /vsis3_streaming/ f = open_for_read('/vsis3_streaming/' + s3_resource) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1 if False: # pylint: disable=using-constant-test # we actually try to read at read() time and bSetError = false # Invalid bucket : "The specified bucket does not exist" gdal.ErrorReset() f = open_for_read('/vsis3/not_existing_bucket/foo') with gdaltest.error_handler(): gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert gdal.VSIGetLastErrorMsg() != '' # Invalid resource gdal.ErrorReset() f = open_for_read('/vsis3_streaming/' + gdal.GetConfigOption('S3_RESOURCE') + '/invalid_resource.baz') assert f is None, gdal.VSIGetLastErrorMsg() # Test GetSignedURL() signed_url = gdal.GetSignedURL('/vsis3/' + s3_resource) f = open_for_read('/vsicurl_streaming/' + signed_url) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1 ############################################################################### def test_vsis3_cleanup(): for var in gdaltest.aws_vars: gdal.SetConfigOption(var, gdaltest.aws_vars[var]) gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', None) gdal.SetConfigOption('AWS_CONFIG_FILE', None) gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', None)
40.974366
335
0.610749
3,765
0.026465
0
0
0
0
0
0
62,980
0.442701
d9f92ab910680bac296e7b003e06e2747df83ea4
882
py
Python
day06/part1.py
bugra-yilmaz/adventofcode2021
136cb1d4fba42af4eea934a73714c93710c8741e
[ "MIT" ]
null
null
null
day06/part1.py
bugra-yilmaz/adventofcode2021
136cb1d4fba42af4eea934a73714c93710c8741e
[ "MIT" ]
null
null
null
day06/part1.py
bugra-yilmaz/adventofcode2021
136cb1d4fba42af4eea934a73714c93710c8741e
[ "MIT" ]
null
null
null
import os.path from collections import Counter import pytest INPUT_TXT = os.path.join(os.path.dirname(__file__), 'input.txt') def compute(s: str) -> int: lines = s.splitlines() numbers = Counter(int(f) for f in lines[0].split(",")) for d in range(80): numbers2 = Counter({8: numbers[0], 6: numbers[0]}) for k, v in numbers.items(): if k >= 1: numbers2[k - 1] += v numbers = numbers2 return sum(numbers.values()) INPUT_S = '''\ 3,4,3,1,2 ''' EXPECTED = 5934 @pytest.mark.parametrize( ('input_s', 'expected'), ( (INPUT_S, EXPECTED), ), ) def test(input_s: str, expected: int) -> None: assert compute(input_s) == expected def main() -> int: with open(INPUT_TXT, "r") as f: print(compute(f.read())) return 0 if __name__ == '__main__': raise SystemExit(main())
18.765957
64
0.580499
0
0
0
0
185
0.209751
0
0
64
0.072562
d9f9595b5ef66170be57096ea8261b3da13883ac
132
py
Python
functional_tests.py
gustavomazevedo/tbackup-client
eb2fdf75eff7abf17c9bce12920de793ba760f61
[ "MIT" ]
null
null
null
functional_tests.py
gustavomazevedo/tbackup-client
eb2fdf75eff7abf17c9bce12920de793ba760f61
[ "MIT" ]
null
null
null
functional_tests.py
gustavomazevedo/tbackup-client
eb2fdf75eff7abf17c9bce12920de793ba760f61
[ "MIT" ]
null
null
null
from selenium import webdriver browser = webdriver.Firefox() browser.get('http://localhost:8000') assert 'Django' in browser.title
22
36
0.780303
0
0
0
0
0
0
0
0
31
0.234848
d9f9cd4e7a0b73e79eb71d2bdbfa755d69a9cc9d
597
py
Python
examples/first_char_last_column.py
clarkfitzg/sta141c
129704ba0952a4b80f9b093dcfa49f49f37b052d
[ "MIT" ]
24
2019-01-08T20:10:11.000Z
2021-11-26T12:18:58.000Z
examples/first_char_last_column.py
timilchene/sta141c-winter19
129704ba0952a4b80f9b093dcfa49f49f37b052d
[ "MIT" ]
1
2017-06-25T05:35:24.000Z
2017-06-25T05:35:24.000Z
examples/first_char_last_column.py
timilchene/sta141c-winter19
129704ba0952a4b80f9b093dcfa49f49f37b052d
[ "MIT" ]
22
2019-01-08T20:02:15.000Z
2021-12-16T23:27:56.000Z
#!/usr/bin/env python3 """ For the last column, print only the first character. Usage: $ printf "100,200\n0,\n" | python3 first_char_last_column.py Should print "100,2\n0," """ import csv from sys import stdin, stdout def main(): reader = csv.reader(stdin) writer = csv.writer(stdout) for row in reader: try: row[-1] = row[-1][0] except IndexError: # Python: Better to ask forgiveness than permission # Alternative: Look before you leap pass writer.writerow(row) if __name__ == "__main__": main()
19.258065
64
0.606365
0
0
0
0
0
0
0
0
277
0.463987
d9fb744315858b3e553e097f0866c6de49262adf
1,996
py
Python
env_ci.py
reloadware/stickybeak
8ac52a80849a3098fb6b2f47115970a734a73c14
[ "Apache-2.0" ]
null
null
null
env_ci.py
reloadware/stickybeak
8ac52a80849a3098fb6b2f47115970a734a73c14
[ "Apache-2.0" ]
null
null
null
env_ci.py
reloadware/stickybeak
8ac52a80849a3098fb6b2f47115970a734a73c14
[ "Apache-2.0" ]
1
2022-01-01T15:14:42.000Z
2022-01-01T15:14:42.000Z
from pathlib import Path root = Path(__file__).parent.absolute() import envo envo.add_source_roots([root]) from pathlib import Path from typing import Any, Dict, List, Optional, Tuple from envo import Env, Namespace, env_var, logger, run from env_comm import StickybeakCommEnv as ParentEnv p = Namespace("p") class StickybeakCiEnv(ParentEnv): class Meta(ParentEnv.Meta): stage: str = "ci" emoji: str = "⚙" load_env_vars = True class Environ(ParentEnv.Environ): pypi_username: Optional[str] = env_var(raw=True) pypi_password: Optional[str] = env_var(raw=True) e: Environ def init(self) -> None: super().init() @p.command def bootstrap(self, test_apps=True) -> None: super().bootstrap(test_apps) @p.command def test(self) -> None: run("pytest --reruns 2 -v tests") @p.command def build(self) -> None: run("poetry build") @p.command def publish(self) -> None: run(f'poetry publish --username "{self.e.pypi_username}" --password "{self.e.pypi_password}"', verbose=False) @p.command def rstcheck(self) -> None: pass # run("rstcheck README.rst | tee ./workspace/rstcheck.txt") @p.command def flake(self) -> None: pass # run("flake8 . | tee ./workspace/flake8.txt") @p.command def check_black(self) -> None: run("black --check .") @p.command def check_isort(self) -> None: run("black --check .") @p.command def mypy(self) -> None: pass run("mypy .") @p.command def generate_version(self) -> None: import toml config = toml.load(str(self.meta.root / "pyproject.toml")) version: str = config["tool"]["poetry"]["version"] version_file = self.meta.root / "stickybeak/__version__.py" Path(version_file).touch() version_file.write_text(f'__version__ = "{version}"\n') ThisEnv = StickybeakCiEnv
22.942529
117
0.613727
1,651
0.826326
0
0
1,223
0.612112
0
0
386
0.193193
d9fb745b63e853aa5e221b1f87db67c0723efc2d
394
py
Python
zmq_srv.py
iyedb/boost_asio_zeromq
63110c18540c8303ac29d574f25cba234a00a22d
[ "MIT" ]
4
2015-04-07T06:00:34.000Z
2019-09-10T01:45:41.000Z
zmq_srv.py
iyedb/boost_asio_zeromq
63110c18540c8303ac29d574f25cba234a00a22d
[ "MIT" ]
null
null
null
zmq_srv.py
iyedb/boost_asio_zeromq
63110c18540c8303ac29d574f25cba234a00a22d
[ "MIT" ]
3
2015-06-30T07:37:41.000Z
2019-09-10T01:45:47.000Z
from __future__ import print_function import zmq import time ADDR='tcp://127.0.0.1:11155' ctx = zmq.Context() srv = ctx.socket(zmq.REP) srv.bind(ADDR) #srv.setsockopt(zmq.RCVTIMEO, 3000); while True: try: msg = srv.recv() except Exception as e: print('zmq socket revc timedout:', e) else: print('client says: %s' % msg) srv.send('hi from server') time.sleep(2)
17.130435
41
0.659898
0
0
0
0
0
0
0
0
119
0.30203
d9fdf7b2da8d5e9203d4272f61f62e3af6000e66
10,408
py
Python
mypy/server/aststrip.py
mmaryada27/mypy
39103273d705fe45a55c4879779a0d5567f01876
[ "PSF-2.0" ]
null
null
null
mypy/server/aststrip.py
mmaryada27/mypy
39103273d705fe45a55c4879779a0d5567f01876
[ "PSF-2.0" ]
null
null
null
mypy/server/aststrip.py
mmaryada27/mypy
39103273d705fe45a55c4879779a0d5567f01876
[ "PSF-2.0" ]
null
null
null
"""Strip/reset AST in-place to match state after semantic analysis pass 1. Fine-grained incremental mode reruns semantic analysis (passes 2 and 3) and type checking for *existing* AST nodes (targets) when changes are propagated using fine-grained dependencies. AST nodes attributes are often changed during semantic analysis passes 2 and 3, and running semantic analysis again on those nodes would produce incorrect results, since these passes aren't idempotent. This pass resets AST nodes to reflect the state after semantic analysis pass 1, so that we can rerun semantic analysis. (The above is in contrast to behavior with modules that have source code changes, for which we reparse the entire module and reconstruct a fresh AST. No stripping is required in this case. Both modes of operation should have the same outcome.) Notes: * This is currently pretty fragile, as we must carefully undo whatever changes can be made in semantic analysis passes 2 and 3, including changes to symbol tables. * We reuse existing AST nodes because it makes it relatively straightforward to reprocess only a single target within a module efficiently. If there was a way to parse a single target within a file, in time proportional to the size of the target, we'd rather create fresh AST nodes than strip them. Alas, no such facility exists and building it is non-trivial. * Currently we don't actually reset all changes, but only those known to affect non-idempotent semantic analysis behavior. TODO: It would be more principled and less fragile to reset everything changed in semantic analysis pass 2 and later. * Reprocessing may recreate AST nodes (such as Var nodes, and TypeInfo nodes created with assignment statements) that will get different identities from the original AST. Thus running an AST merge is necessary after stripping, even though some identities are preserved. """ import contextlib from typing import Union, Iterator, Optional from mypy.nodes import ( Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt, ImportFrom, Import, TypeInfo, SymbolTable, Var, CallExpr, Decorator, OverloadedFuncDef, SuperExpr, UNBOUND_IMPORTED, GDEF, MDEF, IndexExpr ) from mypy.traverser import TraverserVisitor def strip_target(node: Union[MypyFile, FuncItem, OverloadedFuncDef]) -> None: """Reset a fine-grained incremental target to state after semantic analysis pass 1. NOTE: Currently we opportunistically only reset changes that are known to otherwise cause trouble. """ visitor = NodeStripVisitor() if isinstance(node, MypyFile): visitor.strip_file_top_level(node) else: node.accept(visitor) class NodeStripVisitor(TraverserVisitor): def __init__(self) -> None: self.type = None # type: Optional[TypeInfo] self.names = None # type: Optional[SymbolTable] self.is_class_body = False # By default, process function definitions. If False, don't -- this is used for # processing module top levels. self.recurse_into_functions = True def strip_file_top_level(self, file_node: MypyFile) -> None: """Strip a module top-level (don't recursive into functions).""" self.names = file_node.names self.recurse_into_functions = False file_node.accept(self) def visit_class_def(self, node: ClassDef) -> None: """Strip class body and type info, but don't strip methods.""" node.info.type_vars = [] node.info.bases = [] node.info.abstract_attributes = [] node.info.mro = [] node.info.add_type_vars() node.info.tuple_type = None node.info.typeddict_type = None node.info._cache = set() node.info._cache_proper = set() node.base_type_exprs.extend(node.removed_base_type_exprs) node.removed_base_type_exprs = [] with self.enter_class(node.info): super().visit_class_def(node) def visit_func_def(self, node: FuncDef) -> None: if not self.recurse_into_functions: return node.expanded = [] node.type = node.unanalyzed_type with self.enter_method(node.info) if node.info else nothing(): super().visit_func_def(node) def visit_decorator(self, node: Decorator) -> None: node.var.type = None for expr in node.decorators: expr.accept(self) if self.recurse_into_functions: node.func.accept(self) def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> None: if not self.recurse_into_functions: return if node.impl: # Revert change made during semantic analysis pass 2. assert node.items[-1] is not node.impl node.items.append(node.impl) super().visit_overloaded_func_def(node) @contextlib.contextmanager def enter_class(self, info: TypeInfo) -> Iterator[None]: # TODO: Update and restore self.names old_type = self.type old_is_class_body = self.is_class_body self.type = info self.is_class_body = True yield self.type = old_type self.is_class_body = old_is_class_body @contextlib.contextmanager def enter_method(self, info: TypeInfo) -> Iterator[None]: # TODO: Update and restore self.names old_type = self.type old_is_class_body = self.is_class_body self.type = info self.is_class_body = False yield self.type = old_type self.is_class_body = old_is_class_body def visit_assignment_stmt(self, node: AssignmentStmt) -> None: node.type = node.unanalyzed_type if self.type and not self.is_class_body: # TODO: Handle multiple assignment if len(node.lvalues) == 1: lvalue = node.lvalues[0] if isinstance(lvalue, MemberExpr) and lvalue.is_new_def: # Remove defined attribute from the class symbol table. If is_new_def is # true for a MemberExpr, we know that it must be an assignment through # self, since only those can define new attributes. del self.type.names[lvalue.name] super().visit_assignment_stmt(node) def visit_import_from(self, node: ImportFrom) -> None: if node.assignments: node.assignments = [] else: if self.names: # Reset entries in the symbol table. This is necessary since # otherwise the semantic analyzer will think that the import # assigns to an existing name instead of defining a new one. for name, as_name in node.names: imported_name = as_name or name symnode = self.names[imported_name] symnode.kind = UNBOUND_IMPORTED symnode.node = None def visit_import(self, node: Import) -> None: if node.assignments: node.assignments = [] else: if self.names: # Reset entries in the symbol table. This is necessary since # otherwise the semantic analyzer will think that the import # assigns to an existing name instead of defining a new one. for name, as_name in node.ids: imported_name = as_name or name initial = imported_name.split('.')[0] symnode = self.names[initial] symnode.kind = UNBOUND_IMPORTED symnode.node = None def visit_name_expr(self, node: NameExpr) -> None: # Global assignments are processed in semantic analysis pass 1, and we # only want to strip changes made in passes 2 or later. if not (node.kind == GDEF and node.is_new_def): # Remove defined attributes so that they can recreated during semantic analysis. if node.kind == MDEF and node.is_new_def: self.strip_class_attr(node.name) self.strip_ref_expr(node) def visit_member_expr(self, node: MemberExpr) -> None: self.strip_ref_expr(node) # These need to cleared for member expressions but not for other RefExprs since # these can change based on changed in a base class. node.is_new_def = False node.is_inferred_def = False if self.is_duplicate_attribute_def(node): # This is marked as an instance variable definition but a base class # defines an attribute with the same name, and we can't have # multiple definitions for an attribute. Defer to the base class # definition. self.strip_class_attr(node.name) node.def_var = None super().visit_member_expr(node) def visit_index_expr(self, node: IndexExpr) -> None: node.analyzed = None # was a type alias super().visit_index_expr(node) def strip_class_attr(self, name: str) -> None: if self.type is not None: del self.type.names[name] def is_duplicate_attribute_def(self, node: MemberExpr) -> bool: if not node.is_inferred_def: return False assert self.type is not None, "Internal error: Member defined outside class" if node.name not in self.type.names: return False return any(info.get(node.name) is not None for info in self.type.mro[1:]) def strip_ref_expr(self, node: RefExpr) -> None: node.kind = None node.node = None node.fullname = None node.is_new_def = False node.is_inferred_def = False def visit_call_expr(self, node: CallExpr) -> None: node.analyzed = None super().visit_call_expr(node) def visit_super_expr(self, node: SuperExpr) -> None: node.info = None super().visit_super_expr(node) # TODO: handle more node types def is_self_member_ref(memberexpr: MemberExpr) -> bool: """Does memberexpr refer to an attribute of self?""" # TODO: Merge with is_self_member_ref in semanal.py. if not isinstance(memberexpr.expr, NameExpr): return False node = memberexpr.expr.node return isinstance(node, Var) and node.is_self @contextlib.contextmanager def nothing() -> Iterator[None]: yield
41.13834
95
0.662952
7,281
0.699558
698
0.067064
787
0.075615
0
0
3,859
0.370772
d9fe5aa1f8632d451d56260ea6fb9079bd975a31
475
py
Python
bsp/nrf5x/tools/sdk_dist.py
BreederBai/rt-thread
53ed0314982556dfa9c5db75d4f3e02485d16ab5
[ "Apache-2.0" ]
7,482
2015-01-01T09:23:08.000Z
2022-03-31T19:34:05.000Z
bsp/nrf5x/tools/sdk_dist.py
ArdaFu/rt-thread
eebb2561ec166e0016187c7b7998ada4f8212b3a
[ "Apache-2.0" ]
2,543
2015-01-09T02:01:34.000Z
2022-03-31T23:10:14.000Z
bsp/nrf5x/tools/sdk_dist.py
ArdaFu/rt-thread
eebb2561ec166e0016187c7b7998ada4f8212b3a
[ "Apache-2.0" ]
4,645
2015-01-06T07:05:31.000Z
2022-03-31T18:21:50.000Z
import os import sys import shutil cwd_path = os.getcwd() sys.path.append(os.path.join(os.path.dirname(cwd_path), 'rt-thread', 'tools')) # BSP dist function def dist_do_building(BSP_ROOT, dist_dir): from mkdist import bsp_copy_files import rtconfig library_dir = os.path.join(dist_dir, 'libraries') print("=> copy nrf52 bsp libraries") library_path = os.path.join(os.path.dirname(BSP_ROOT), 'libraries') bsp_copy_files(library_path, library_dir)
26.388889
78
0.734737
0
0
0
0
0
0
0
0
88
0.185263
d9fe6882b9e62ad1b9764fdded272caab1b5cf79
9,991
py
Python
lib/spack/spack/multimethod.py
kkauder/spack
6ae8d5c380c1f42094b05d38be26b03650aafb39
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
2
2020-09-10T22:50:08.000Z
2021-01-12T22:18:54.000Z
lib/spack/spack/multimethod.py
kkauder/spack
6ae8d5c380c1f42094b05d38be26b03650aafb39
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
17
2019-03-21T15:54:00.000Z
2022-03-29T19:34:28.000Z
lib/spack/spack/multimethod.py
kkauder/spack
6ae8d5c380c1f42094b05d38be26b03650aafb39
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
2
2018-04-06T09:04:11.000Z
2020-01-24T12:52:12.000Z
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """This module contains utilities for using multi-methods in spack. You can think of multi-methods like overloaded methods -- they're methods with the same name, and we need to select a version of the method based on some criteria. e.g., for overloaded methods, you would select a version of the method to call based on the types of its arguments. In spack, multi-methods are used to ease the life of package authors. They allow methods like install() (or other methods called by install()) to declare multiple versions to be called when the package is instantiated with different specs. e.g., if the package is built with OpenMPI on x86_64,, you might want to call a different install method than if it was built for mpich2 on BlueGene/Q. Likewise, you might want to do a different type of install for different versions of the package. Multi-methods provide a simple decorator-based syntax for this that avoids overly complicated rat nests of if statements. Obviously, depending on the scenario, regular old conditionals might be clearer, so package authors should use their judgement. """ import functools import inspect from llnl.util.lang import caller_locals import spack.architecture import spack.error from spack.spec import Spec class MultiMethodMeta(type): """This allows us to track the class's dict during instantiation.""" #: saved dictionary of attrs on the class being constructed _locals = None @classmethod def __prepare__(cls, name, bases, **kwargs): """Save the dictionary that will be used for the class namespace.""" MultiMethodMeta._locals = dict() return MultiMethodMeta._locals def __init__(cls, name, bases, attr_dict): """Clear out the cached locals dict once the class is built.""" MultiMethodMeta._locals = None super(MultiMethodMeta, cls).__init__(name, bases, attr_dict) class SpecMultiMethod(object): """This implements a multi-method for Spack specs. Packages are instantiated with a particular spec, and you may want to execute different versions of methods based on what the spec looks like. For example, you might want to call a different version of install() for one platform than you call on another. The SpecMultiMethod class implements a callable object that handles method dispatch. When it is called, it looks through registered methods and their associated specs, and it tries to find one that matches the package's spec. If it finds one (and only one), it will call that method. This is intended for use with decorators (see below). The decorator (see docs below) creates SpecMultiMethods and registers method versions with them. To register a method, you can do something like this: mm = SpecMultiMethod() mm.register("^chaos_5_x86_64_ib", some_method) The object registered needs to be a Spec or some string that will parse to be a valid spec. When the mm is actually called, it selects a version of the method to call based on the sys_type of the object it is called on. See the docs for decorators below for more details. """ def __init__(self, default=None): self.method_list = [] self.default = default if default: functools.update_wrapper(self, default) def register(self, spec, method): """Register a version of a method for a particular spec.""" self.method_list.append((spec, method)) if not hasattr(self, '__name__'): functools.update_wrapper(self, method) else: assert(self.__name__ == method.__name__) def __get__(self, obj, objtype): """This makes __call__ support instance methods.""" # Method_list is a list of tuples (constraint, method) # Here we are going to assume that we have at least one # element in the list. The first registered function # will be the one 'wrapped'. wrapped_method = self.method_list[0][1] # Call functools.wraps manually to get all the attributes # we need to be disguised as the wrapped_method func = functools.wraps(wrapped_method)( functools.partial(self.__call__, obj) ) return func def _get_method_by_spec(self, spec): """Find the method of this SpecMultiMethod object that satisfies the given spec, if one exists """ for condition, method in self.method_list: if spec.satisfies(condition): return method return self.default or None def __call__(self, package_self, *args, **kwargs): """Find the first method with a spec that matches the package's spec. If none is found, call the default or if there is none, then raise a NoSuchMethodError. """ spec_method = self._get_method_by_spec(package_self.spec) if spec_method: return spec_method(package_self, *args, **kwargs) # Unwrap the MRO of `package_self by hand. Note that we can't # use `super()` here, because using `super()` recursively # requires us to know the class of `package_self`, as well as # its superclasses for successive calls. We don't have that # information within `SpecMultiMethod`, because it is not # associated with the package class. for cls in inspect.getmro(package_self.__class__)[1:]: superself = cls.__dict__.get(self.__name__, None) if isinstance(superself, SpecMultiMethod): # Check parent multimethod for method for spec. superself_method = superself._get_method_by_spec( package_self.spec ) if superself_method: return superself_method(package_self, *args, **kwargs) elif superself: return superself(package_self, *args, **kwargs) raise NoSuchMethodError( type(package_self), self.__name__, package_self.spec, [m[0] for m in self.method_list] ) class when(object): """This annotation lets packages declare multiple versions of methods like install() that depend on the package's spec. For example: .. code-block:: python class SomePackage(Package): ... def install(self, prefix): # Do default install @when('target=x86_64:') def install(self, prefix): # This will be executed instead of the default install if # the package's target is in the x86_64 family. @when('target=ppc64:') def install(self, prefix): # This will be executed if the package's target is in # the ppc64 family This allows each package to have a default version of install() AND specialized versions for particular platforms. The version that is called depends on the architecutre of the instantiated package. Note that this works for methods other than install, as well. So, if you only have part of the install that is platform specific, you could do this: .. code-block:: python class SomePackage(Package): ... # virtual dependence on MPI. # could resolve to mpich, mpich2, OpenMPI depends_on('mpi') def setup(self): # do nothing in the default case pass @when('^openmpi') def setup(self): # do something special when this is built with OpenMPI for # its MPI implementations. def install(self, prefix): # Do common install stuff self.setup() # Do more common install stuff Note that the default version of decorated methods must *always* come first. Otherwise it will override all of the platform-specific versions. There's not much we can do to get around this because of the way decorators work. """ def __init__(self, condition): if isinstance(condition, bool): self.spec = Spec() if condition else None else: self.spec = Spec(condition) def __call__(self, method): # In Python 2, Get the first definition of the method in the # calling scope by looking at the caller's locals. In Python 3, # we handle this using MultiMethodMeta.__prepare__. if MultiMethodMeta._locals is None: MultiMethodMeta._locals = caller_locals() # Create a multimethod with this name if there is not one already original_method = MultiMethodMeta._locals.get(method.__name__) if not type(original_method) == SpecMultiMethod: original_method = SpecMultiMethod(original_method) if self.spec is not None: original_method.register(self.spec, method) return original_method class MultiMethodError(spack.error.SpackError): """Superclass for multimethod dispatch errors""" def __init__(self, message): super(MultiMethodError, self).__init__(message) class NoSuchMethodError(spack.error.SpackError): """Raised when we can't find a version of a multi-method.""" def __init__(self, cls, method_name, spec, possible_specs): super(NoSuchMethodError, self).__init__( "Package %s does not support %s called with %s. Options are: %s" % (cls.__name__, method_name, spec, ", ".join(str(s) for s in possible_specs)))
38.875486
77
0.651887
8,530
0.853768
0
0
218
0.02182
0
0
6,436
0.64418
d9fe73cee8f0ad5d98f81eb365b256cba7970cbe
13,093
gyp
Python
third_party/protobuf/protobuf.gyp
meego-tablet-ux/meego-app-browser
0f4ef17bd4b399c9c990a2f6ca939099495c2b9c
[ "BSD-3-Clause" ]
1
2015-10-12T09:14:22.000Z
2015-10-12T09:14:22.000Z
third_party/protobuf/protobuf.gyp
meego-tablet-ux/meego-app-browser
0f4ef17bd4b399c9c990a2f6ca939099495c2b9c
[ "BSD-3-Clause" ]
null
null
null
third_party/protobuf/protobuf.gyp
meego-tablet-ux/meego-app-browser
0f4ef17bd4b399c9c990a2f6ca939099495c2b9c
[ "BSD-3-Clause" ]
1
2020-11-04T07:22:28.000Z
2020-11-04T07:22:28.000Z
# Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'conditions': [ ['OS!="win"', { 'variables': { 'config_h_dir': '.', # crafted for gcc/linux. }, }, { # else, OS=="win" 'variables': { 'config_h_dir': 'vsprojects', # crafted for msvc. }, 'target_defaults': { 'msvs_disabled_warnings': [ 4018, # signed/unsigned mismatch in comparison 4244, # implicit conversion, possible loss of data 4355, # 'this' used in base member initializer list ], 'defines!': [ 'WIN32_LEAN_AND_MEAN', # Protobuf defines this itself. ], }, }] ], 'targets': [ # The "lite" lib is about 1/7th the size of the heavy lib, # but it doesn't support some of the more exotic features of # protobufs, like reflection. To generate C++ code that can link # against the lite version of the library, add the option line: # # option optimize_for = LITE_RUNTIME; # # to your .proto file. { 'target_name': 'protobuf_lite', 'type': '<(library)', 'toolsets': ['host', 'target'], 'sources': [ 'src/google/protobuf/stubs/common.h', 'src/google/protobuf/stubs/once.h', 'src/google/protobuf/extension_set.h', 'src/google/protobuf/generated_message_util.h', 'src/google/protobuf/message_lite.h', 'src/google/protobuf/repeated_field.h', 'src/google/protobuf/unknown_field_set.cc', 'src/google/protobuf/unknown_field_set.h', 'src/google/protobuf/wire_format_lite.h', 'src/google/protobuf/wire_format_lite_inl.h', 'src/google/protobuf/io/coded_stream.h', 'src/google/protobuf/io/zero_copy_stream.h', 'src/google/protobuf/io/zero_copy_stream_impl_lite.h', 'src/google/protobuf/stubs/common.cc', 'src/google/protobuf/stubs/once.cc', 'src/google/protobuf/stubs/hash.h', 'src/google/protobuf/stubs/map-util.h', 'src/google/protobuf/stubs/stl_util-inl.h', 'src/google/protobuf/extension_set.cc', 'src/google/protobuf/generated_message_util.cc', 'src/google/protobuf/message_lite.cc', 'src/google/protobuf/repeated_field.cc', 'src/google/protobuf/wire_format_lite.cc', 'src/google/protobuf/io/coded_stream.cc', 'src/google/protobuf/io/coded_stream_inl.h', 'src/google/protobuf/io/zero_copy_stream.cc', 'src/google/protobuf/io/zero_copy_stream_impl_lite.cc', '<(config_h_dir)/config.h', ], 'include_dirs': [ '<(config_h_dir)', 'src', ], # This macro must be defined to suppress the use of dynamic_cast<>, # which requires RTTI. 'defines': [ 'GOOGLE_PROTOBUF_NO_RTTI', ], 'direct_dependent_settings': { 'include_dirs': [ '<(config_h_dir)', 'src', ], 'defines': [ 'GOOGLE_PROTOBUF_NO_RTTI', ], }, }, # This is the full, heavy protobuf lib that's needed for c++ .proto's # that don't specify the LITE_RUNTIME option. The protocol # compiler itself (protoc) falls into that category. # # DO NOT LINK AGAINST THIS TARGET IN CHROME CODE --agl { 'target_name': 'protobuf_full_do_not_use', 'type': '<(library)', 'toolsets': ['host','target'], 'sources': [ 'src/google/protobuf/descriptor.h', 'src/google/protobuf/descriptor.pb.h', 'src/google/protobuf/descriptor_database.h', 'src/google/protobuf/dynamic_message.h', 'src/google/protobuf/generated_message_reflection.h', 'src/google/protobuf/message.h', 'src/google/protobuf/reflection_ops.h', 'src/google/protobuf/service.h', 'src/google/protobuf/text_format.h', 'src/google/protobuf/unknown_field_set.h', 'src/google/protobuf/wire_format.h', 'src/google/protobuf/io/gzip_stream.h', 'src/google/protobuf/io/printer.h', 'src/google/protobuf/io/tokenizer.h', 'src/google/protobuf/io/zero_copy_stream_impl.h', 'src/google/protobuf/compiler/code_generator.h', 'src/google/protobuf/compiler/command_line_interface.h', 'src/google/protobuf/compiler/importer.h', 'src/google/protobuf/compiler/parser.h', 'src/google/protobuf/stubs/strutil.cc', 'src/google/protobuf/stubs/strutil.h', 'src/google/protobuf/stubs/substitute.cc', 'src/google/protobuf/stubs/substitute.h', 'src/google/protobuf/stubs/structurally_valid.cc', 'src/google/protobuf/descriptor.cc', 'src/google/protobuf/descriptor.pb.cc', 'src/google/protobuf/descriptor_database.cc', 'src/google/protobuf/dynamic_message.cc', 'src/google/protobuf/extension_set_heavy.cc', 'src/google/protobuf/generated_message_reflection.cc', 'src/google/protobuf/message.cc', 'src/google/protobuf/reflection_ops.cc', 'src/google/protobuf/service.cc', 'src/google/protobuf/text_format.cc', 'src/google/protobuf/unknown_field_set.cc', 'src/google/protobuf/wire_format.cc', # This file pulls in zlib, but it's not actually used by protoc, so # instead of compiling zlib for the host, let's just exclude this. # 'src/src/google/protobuf/io/gzip_stream.cc', 'src/google/protobuf/io/printer.cc', 'src/google/protobuf/io/tokenizer.cc', 'src/google/protobuf/io/zero_copy_stream_impl.cc', 'src/google/protobuf/compiler/importer.cc', 'src/google/protobuf/compiler/parser.cc', ], 'dependencies': [ 'protobuf_lite', ], 'export_dependent_settings': [ 'protobuf_lite', ], }, { 'target_name': 'protoc', 'type': 'executable', 'toolsets': ['host'], 'sources': [ 'src/google/protobuf/compiler/code_generator.cc', 'src/google/protobuf/compiler/command_line_interface.cc', 'src/google/protobuf/compiler/plugin.cc', 'src/google/protobuf/compiler/plugin.pb.cc', 'src/google/protobuf/compiler/subprocess.cc', 'src/google/protobuf/compiler/subprocess.h', 'src/google/protobuf/compiler/zip_writer.cc', 'src/google/protobuf/compiler/zip_writer.h', 'src/google/protobuf/compiler/cpp/cpp_enum.cc', 'src/google/protobuf/compiler/cpp/cpp_enum.h', 'src/google/protobuf/compiler/cpp/cpp_enum_field.cc', 'src/google/protobuf/compiler/cpp/cpp_enum_field.h', 'src/google/protobuf/compiler/cpp/cpp_extension.cc', 'src/google/protobuf/compiler/cpp/cpp_extension.h', 'src/google/protobuf/compiler/cpp/cpp_field.cc', 'src/google/protobuf/compiler/cpp/cpp_field.h', 'src/google/protobuf/compiler/cpp/cpp_file.cc', 'src/google/protobuf/compiler/cpp/cpp_file.h', 'src/google/protobuf/compiler/cpp/cpp_generator.cc', 'src/google/protobuf/compiler/cpp/cpp_helpers.cc', 'src/google/protobuf/compiler/cpp/cpp_helpers.h', 'src/google/protobuf/compiler/cpp/cpp_message.cc', 'src/google/protobuf/compiler/cpp/cpp_message.h', 'src/google/protobuf/compiler/cpp/cpp_message_field.cc', 'src/google/protobuf/compiler/cpp/cpp_message_field.h', 'src/google/protobuf/compiler/cpp/cpp_primitive_field.cc', 'src/google/protobuf/compiler/cpp/cpp_primitive_field.h', 'src/google/protobuf/compiler/cpp/cpp_service.cc', 'src/google/protobuf/compiler/cpp/cpp_service.h', 'src/google/protobuf/compiler/cpp/cpp_string_field.cc', 'src/google/protobuf/compiler/cpp/cpp_string_field.h', 'src/google/protobuf/compiler/java/java_enum.cc', 'src/google/protobuf/compiler/java/java_enum.h', 'src/google/protobuf/compiler/java/java_enum_field.cc', 'src/google/protobuf/compiler/java/java_enum_field.h', 'src/google/protobuf/compiler/java/java_extension.cc', 'src/google/protobuf/compiler/java/java_extension.h', 'src/google/protobuf/compiler/java/java_field.cc', 'src/google/protobuf/compiler/java/java_field.h', 'src/google/protobuf/compiler/java/java_file.cc', 'src/google/protobuf/compiler/java/java_file.h', 'src/google/protobuf/compiler/java/java_generator.cc', 'src/google/protobuf/compiler/java/java_helpers.cc', 'src/google/protobuf/compiler/java/java_helpers.h', 'src/google/protobuf/compiler/java/java_message.cc', 'src/google/protobuf/compiler/java/java_message.h', 'src/google/protobuf/compiler/java/java_message_field.cc', 'src/google/protobuf/compiler/java/java_message_field.h', 'src/google/protobuf/compiler/java/java_primitive_field.cc', 'src/google/protobuf/compiler/java/java_primitive_field.h', 'src/google/protobuf/compiler/java/java_service.cc', 'src/google/protobuf/compiler/java/java_service.h', 'src/google/protobuf/compiler/java/java_string_field.cc', 'src/google/protobuf/compiler/java/java_string_field.h', 'src/google/protobuf/compiler/python/python_generator.cc', 'src/google/protobuf/compiler/main.cc', ], 'dependencies': [ 'protobuf_full_do_not_use', ], 'include_dirs': [ '<(config_h_dir)', 'src/src', ], }, { # Generate the python module needed by all protoc-generated Python code. 'target_name': 'py_proto', 'type': 'none', 'copies': [ { 'destination': '<(PRODUCT_DIR)/pyproto/google/', 'files': [ # google/ module gets an empty __init__.py. '__init__.py', ], }, { 'destination': '<(PRODUCT_DIR)/pyproto/google/protobuf', 'files': [ 'python/google/protobuf/__init__.py', 'python/google/protobuf/descriptor.py', 'python/google/protobuf/message.py', 'python/google/protobuf/reflection.py', 'python/google/protobuf/service.py', 'python/google/protobuf/service_reflection.py', 'python/google/protobuf/text_format.py', # TODO(ncarter): protoc's python generator treats descriptor.proto # specially, but it's not possible to trigger the special treatment # unless you run protoc from ./src/src (the treatment is based # on the path to the .proto file matching a constant exactly). # I'm not sure how to convince gyp to execute a rule from a # different directory. Until this is resolved, use a copy of # descriptor_pb2.py that I manually generated. 'descriptor_pb2.py', ], }, { 'destination': '<(PRODUCT_DIR)/pyproto/google/protobuf/internal', 'files': [ 'python/google/protobuf/internal/__init__.py', 'python/google/protobuf/internal/api_implementation.py', 'python/google/protobuf/internal/containers.py', 'python/google/protobuf/internal/cpp_message.py', 'python/google/protobuf/internal/decoder.py', 'python/google/protobuf/internal/encoder.py', 'python/google/protobuf/internal/generator_test.py', 'python/google/protobuf/internal/message_listener.py', 'python/google/protobuf/internal/python_message.py', 'python/google/protobuf/internal/type_checkers.py', 'python/google/protobuf/internal/wire_format.py', ], }, ], # # We can't generate a proper descriptor_pb2.py -- see earlier comment. # 'rules': [ # { # 'rule_name': 'genproto', # 'extension': 'proto', # 'inputs': [ # '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)', # ], # 'variables': { # # The protoc compiler requires a proto_path argument with the # # directory containing the .proto file. # 'rule_input_relpath': 'src/google/protobuf', # }, # 'outputs': [ # '<(PRODUCT_DIR)/pyproto/google/protobuf/<(RULE_INPUT_ROOT)_pb2.py', # ], # 'action': [ # '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)', # '-I./src', # '-I.', # '--python_out=<(PRODUCT_DIR)/pyproto/google/protobuf', # 'google/protobuf/descriptor.proto', # ], # 'message': 'Generating Python code from <(RULE_INPUT_PATH)', # }, # ], # 'dependencies': [ # 'protoc#host', # ], # 'sources': [ # 'src/google/protobuf/descriptor.proto', # ], }, ], } # Local Variables: # tab-width:2 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=2 shiftwidth=2:
41.302839
81
0.621554
0
0
0
0
0
0
0
0
10,235
0.781715
d9ff0e5cd63921d7a1a7f3f682d268671ab38688
834
py
Python
main.py
Yash-s-Code-Camp/Python-Day-4
887c4e172905b2b0dea493a3c9c1f61e403556fc
[ "MIT" ]
null
null
null
main.py
Yash-s-Code-Camp/Python-Day-4
887c4e172905b2b0dea493a3c9c1f61e403556fc
[ "MIT" ]
null
null
null
main.py
Yash-s-Code-Camp/Python-Day-4
887c4e172905b2b0dea493a3c9c1f61e403556fc
[ "MIT" ]
null
null
null
# def mul(a): # return lambda b:b*a # singler = mul(1) # addition = lambda b:b*1 # doubler = mul(2) # addition = lambda b:b*2 # tripler = mul(3) # addition = lambda b:b*3 # print(doubler(7)) # 7*2 = 14 # print(tripler(7)) # 7*3 = 21 # print(singler(7)) # 7*1 = 7 class Student: def __init__(self, fname): self.fname = fname def greet(self, fname): return f"Hello, {fname}" class BatchA(Student): def __init__(self, lname): self.lname = lname #Student.__init__(self, "Nikunj") super().__init__("Nikunj") def printName(self): return f"{self.fname} {self.lname}" stud = BatchA("Thakor") print(stud.printName()) rgb(255, 255, 255) # White rgb(255, 0, 0) # Red rgb(0, 0, 0) # Black rgb(0, 255, 255) # Cyan rgb(255, 255, 0) # Yellow #00ff00 //green #1e90ff //dodgerblue
17.020408
47
0.603118
311
0.372902
0
0
0
0
0
0
437
0.523981
8a00049d0a23118a6b45ced9a50bf455984aaa3c
8,974
py
Python
paperstream/create_diary.py
MarcoRosso/paperstream
f8d5485ea337334b036393f9566b74394b5dd234
[ "MIT" ]
null
null
null
paperstream/create_diary.py
MarcoRosso/paperstream
f8d5485ea337334b036393f9566b74394b5dd234
[ "MIT" ]
null
null
null
paperstream/create_diary.py
MarcoRosso/paperstream
f8d5485ea337334b036393f9566b74394b5dd234
[ "MIT" ]
null
null
null
""" Create diaries in A5 and A4 sizes based on PDF templates. Julio Vega """ import datetime import math import sys from io import BytesIO from pathlib import Path from PyPDF2 import PdfFileReader, PdfFileWriter from reportlab.lib.pagesizes import A5, A4 from reportlab.lib.utils import ImageReader from reportlab.pdfbase import pdfmetrics from reportlab.pdfbase.ttfonts import TTFError, TTFont from reportlab.pdfgen import canvas def resource_path(relative_path): """ Get absolute path to resource, works for dev and for PyInstaller """ base_path = getattr(sys, '_MEIPASS', Path(__file__).resolve().parent) return base_path / Path(relative_path) CORNER_DIR = resource_path("input/1_diaries_to_create/resources") LOGO_PATH = resource_path(CORNER_DIR / Path("logo.png")) DEFAULT_FONT = resource_path(CORNER_DIR / Path('FreeSansLocal.ttf')) CREATED_DIARIES_DIR = resource_path("output/created_diaries/") ############################################################# ############################################################# ############################################################# ##### Algorithm to convert A4 pages into an A5 booklet ###### ############################################################# ############################################################# ############################################################# ## Adapted from the work by Luke Plant, https://bitbucket.org/spookylukey/booklet-maker/src class Sheet(object): '''A4 Sheets''' def __init__(self): self.front = PrintPage() self.back = PrintPage() class PrintPage(object): '''A4 page with containers for A4 pages''' def __init__(self): self.left = PageContainer() self.right = PageContainer() class PageContainer(object): '''A5 containers''' def __init__(self): self.page = None def build_booklet(pages): ''' Build booklet ''' # Double sized page, with double-sided printing, fits 4 of the original. sheet_count = int(math.ceil(len(pages) / 4.0)) booklet = [Sheet() for i in range(0, sheet_count)] # Assign input pages to sheets # This is the core algo. To understand it: # * pick up 3 A4 sheets, landscape # * number the sheets from 1 to 3, starting with bottom one # * fold the stack in the middle to form an A5 booklet # * work out what order you need to use the front left, # front right, back left and back right sides. def containers(): '''Yields parts of the booklet in the order they should be used.''' for sheet in booklet: yield sheet.back.right yield sheet.front.left for sheet in reversed(booklet): yield sheet.front.right yield sheet.back.left for container, page in zip(containers(), pages): container.page = page return booklet def add_double_page(writer, page_size, print_page): ''' Adds a double page ''' width, height = page_size page = writer.insertBlankPage(width=width, height=height, index=writer.getNumPages()) # Merge the left page l_page = print_page.left.page if l_page is not None: page.mergePage(l_page) # Merge the right page with translation r_page = print_page.right.page if r_page is not None: page.mergeTranslatedPage(r_page, width / 2, 0) def convert_to_a5_booklet(input_file, blanks=0): '''Converts a PDF into a double sided A5 file to print as an A4 (two A5 pages per A4 page)''' # Create internal dir to save the a5 files a5_booklets_dir = CREATED_DIARIES_DIR Path.mkdir(a5_booklets_dir, parents=True, exist_ok=True) # Create the a5 booklet's name a5_booklet_name = Path(input_file).stem + "_as_a5_booklet" a5_booklet = a5_booklets_dir / Path("{}.pdf".format(a5_booklet_name)) reader = PdfFileReader(open(input_file, "rb")) pages = [reader.getPage(p) for p in range(0, reader.getNumPages())] for index in range(0, blanks): pages.insert(0, None) sheets = build_booklet(pages) writer = PdfFileWriter() firs_page = reader.getPage(0) input_width = firs_page.mediaBox.getWidth() output_width = input_width * 2 input_height = firs_page.mediaBox.getHeight() output_height = input_height page_size = (output_width, output_height) # We want to group fronts and backs together. for sheet in sheets: add_double_page(writer, page_size, sheet.back) add_double_page(writer, page_size, sheet.front) with open(a5_booklet, "wb") as a5_booklet_stream: writer.write(a5_booklet_stream) return a5_booklet ############################################################# ############################################################# ############################################################# ########## Create A4 paper diary ############ ############################################################# ############################################################# ############################################################# def create_diary_cover(participant_id, email, font): '''Create cover of the A5 diary''' packet = BytesIO() cover_canvas = canvas.Canvas(packet, pagesize=A4) width, height = A4 # Centering the logo or participant ID if Path.exists(LOGO_PATH): logo = ImageReader(LOGO_PATH) cover_canvas.drawImage(logo, x=(width * (1/6.0)), y=(height/4), width=width * (4/6.0), preserveAspectRatio=True, mask='auto') else: cover_canvas.setFont(font, 50) cover_canvas.drawCentredString(width/2, height/2, participant_id) # Lost legend if not (email is None or email == ""): cover_canvas.setFont(font, 15) cover_canvas.drawCentredString(width/2, 50, "If you find this document, please email " + email) cover_canvas.save() packet.seek(0) return PdfFileReader(packet).getPage(0) def create_diary_page(pdf_template, font, top_left_text, page_number, top_right_text): packet = BytesIO() diary_canvas = canvas.Canvas(packet, pagesize=A5) # Header diary_canvas.setFont(font, 11) #diary_canvas.drawRightString(378, 562, str(top_right_text)) diary_canvas.drawString(36.5, 562, top_left_text) # Corners corners = [(CORNER_DIR / Path("corner_ul.png"), 25, 553), (CORNER_DIR / Path("corner_ur.png"), 365, 553), (CORNER_DIR / Path("corner_bl.png"), 25, 15), (CORNER_DIR / Path("corner_br.png"), 365, 15)] for corner_path, x, y in corners: if corner_path.exists(): corner = ImageReader(corner_path) diary_canvas.drawImage(corner, x=x, y=y, mask='auto') # Footer #diary_canvas.setFont(font, 8) #diary_canvas.drawString(36.5, 24, str(page_number)) diary_canvas.save() # Merge template and additions (header, corners and footer) packet.seek(0) page_additions = PdfFileReader(packet).getPage(0) new_page = PdfFileReader(open(pdf_template, "rb")).getPage(0) new_page.mergePage(page_additions) new_page.scaleTo(A4[0], A4[1]) return new_page def create_a4_diary(pdf_template, pages, top_left_text, email=None, font='Arial'): """Creates an A4 document with [PAGES] from [STARTING_DATE]""" starting_date = parse_date(top_left_text) font = set_active_font(font) # Create output folder/file if not Path(pdf_template).exists(): raise ValueError("Template does not exist {}".format(pdf_template)) Path.mkdir(CREATED_DIARIES_DIR, parents=True, exist_ok=True) a4_document_name = Path(pdf_template).stem a4_document_path = CREATED_DIARIES_DIR / Path("{}_document.pdf".format(a4_document_name)) pdf_file = PdfFileWriter() # Cover pdf_file.addPage(create_diary_cover(a4_document_name, email, font)) pdf_file.addBlankPage() # Pages for page in range(1, pages+1): if starting_date is not None: top_left_text = starting_date.strftime('%A, %d %b %Y') starting_date += datetime.timedelta(days=1) new_page = create_diary_page(pdf_template, font, top_left_text,page, a4_document_name) pdf_file.addPage(new_page) # Backcover pdf_file.addBlankPage() # Save a4 document with open(a4_document_path, "wb") as output_stream: pdf_file.write(output_stream) return a4_document_path def set_active_font(font): """Register the font to use in header and footer of the diary""" try: pdfmetrics.registerFont(TTFont(font, font + '.ttf')) except TTFError: font = 'FreeSansLocal' pdfmetrics.registerFont(TTFont(font, DEFAULT_FONT)) return font def parse_date(s): try: return datetime.datetime.strptime(s, "%d/%m/%Y") except ValueError: return None
33.864151
97
0.613885
398
0.04435
1,008
0.112324
0
0
0
0
2,797
0.311678
8a009f467895ff4a7817d2ca2bfbdacdd183cb58
2,459
py
Python
wextractor/extractors/csv_extractor.py
codeforamerica/w-drive-extractor
1c62bfff6fc21c4cce4a4409b76355ec4e07daae
[ "MIT" ]
3
2015-01-14T06:27:16.000Z
2015-02-26T23:39:39.000Z
wextractor/extractors/csv_extractor.py
codeforamerica/w-drive-extractor
1c62bfff6fc21c4cce4a4409b76355ec4e07daae
[ "MIT" ]
8
2015-01-15T17:50:30.000Z
2015-05-12T17:09:04.000Z
wextractor/extractors/csv_extractor.py
codeforamerica/w-drive-extractor
1c62bfff6fc21c4cce4a4409b76355ec4e07daae
[ "MIT" ]
4
2015-01-14T15:20:49.000Z
2021-04-16T10:45:22.000Z
#!/usr/bin/env python import urllib2 import httplib from urlparse import urlparse import csv from wextractor.extractors.extractor import Extractor class CsvExtractor(Extractor): def __init__(self, target, header=None, dtypes=None, url=None): ''' CsvExtractor initializes with an optional url flag that tells the extractor whether or not the resource is local or remote so that it can be loaded accordingly ''' super(CsvExtractor, self).__init__(target, header, dtypes) if url is None: self.url = self.detect_url(target) elif type(url) != bool: raise TypeError('url kwarg must be of type bool') else: self.url = url def detect_url(self, target): # see: http://stackoverflow.com/questions/2924422/how-do-i-determine-if-a-web-page-exists-with-shell-scripting # and http://stackoverflow.com/questions/1140661/python-get-http-response-code-from-a-url # for additional information good_codes = [httplib.OK, httplib.FOUND, httplib.MOVED_PERMANENTLY] # check to see if we have a scheme in the url, and append one if not parsed_target = urlparse(target) if bool(parsed_target.scheme) is False: target = 'http://' + target host, path = urlparse(target)[1:3] try: conn = httplib.HTTPConnection(host) conn.request("HEAD", path) status = conn.getresponse().status except StandardError: status = None return status in good_codes def extract(self): if self.url: raw_data = urllib2.urlopen(self.target).read().decode('utf-8-sig').rstrip() else: with open(self.target, 'r') as f: raw_data = f.read().decode('utf-8-sig').rstrip() # standardize the file endings raw_data = raw_data.replace('\r\n', '\n').replace('\r', '\n') if self.header is None: # use first line if self.header not defined current_headers = raw_data.split('\n')[0].split(',') raw_data = '\n'.join(raw_data.split('\n')[1:]) else: current_headers = self.header output = [] reader = csv.reader(raw_data.splitlines(), delimiter=',') for row in reader: output.append( self.transform_row(current_headers, row) ) return output
34.152778
118
0.604311
2,308
0.938593
0
0
0
0
0
0
696
0.283042
8a01b2b39f8bda22480b43b79a5034c95f31f8f0
9,010
py
Python
pyscf/geomopt/berny_solver.py
r-peng/pyscf
9a14f9bcc63bc75f5939cb4d00eb47861d8d8989
[ "Apache-2.0" ]
2
2021-06-30T22:33:35.000Z
2021-11-22T18:02:36.000Z
pyscf/geomopt/berny_solver.py
r-peng/pyscf
9a14f9bcc63bc75f5939cb4d00eb47861d8d8989
[ "Apache-2.0" ]
null
null
null
pyscf/geomopt/berny_solver.py
r-peng/pyscf
9a14f9bcc63bc75f5939cb4d00eb47861d8d8989
[ "Apache-2.0" ]
2
2021-09-16T23:37:42.000Z
2021-10-14T23:00:39.000Z
#!/usr/bin/env python # Copyright 2014-2019 The PySCF Developers. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ''' Interface to geometry optimizer pyberny https://github.com/jhrmnn/pyberny ''' from __future__ import absolute_import import pkg_resources try: dist = pkg_resources.get_distribution('pyberny') except pkg_resources.DistributionNotFound: dist = None if dist is None or [int(x) for x in dist.version.split('.')] < [0, 6, 2]: msg = ('Geometry optimizer Pyberny not found or outdated. Install or update ' 'with:\n\n\tpip install -U pyberny') raise ImportError(msg) import time import numpy import logging from pyscf import lib from pyscf.geomopt.addons import (as_pyscf_method, dump_mol_geometry, symmetrize) from pyscf import __config__ from pyscf.grad.rhf import GradientsBasics from berny import Berny, geomlib, coords # Overwrite pyberny's atomic unit coords.angstrom = 1./lib.param.BOHR INCLUDE_GHOST = getattr(__config__, 'geomopt_berny_solver_optimize_include_ghost', True) ASSERT_CONV = getattr(__config__, 'geomopt_berny_solver_optimize_assert_convergence', True) def to_berny_geom(mol, include_ghost=INCLUDE_GHOST): atom_charges = mol.atom_charges() if include_ghost: # Symbol Ghost is not supported in current version of pyberny #species = [mol.atom_symbol(i) if z != 0 else 'Ghost' # for i,z in enumerate(atom_charges)] species = [mol.atom_symbol(i) if z != 0 else 'H' for i,z in enumerate(atom_charges)] coords = mol.atom_coords() * lib.param.BOHR else: atmlst = numpy.where(atom_charges != 0)[0] # Exclude ghost atoms species = [mol.atom_symbol(i) for i in atmlst] coords = mol.atom_coords()[atmlst] * lib.param.BOHR # geomlib.Geometry is available in the new version of pyberny solver. (issue #212) if getattr(geomlib, 'Geometry', None): return geomlib.Geometry(species, coords) else: return geomlib.Molecule(species, coords) def _geom_to_atom(mol, geom, include_ghost): coords = geom.coords if include_ghost: atom_coords = coords / lib.param.BOHR else: atmlst = numpy.where(mol.atom_charges() != 0)[0] atom_coords = mol.atom_coords() atom_coords[atmlst] = coords / lib.param.BOHR return atom_coords def to_berny_log(pyscf_log): '''Adapter to allow pyberny to use pyscf.logger ''' class PyscfHandler(logging.Handler): def emit(self, record): pyscf_log.info(record.getMessage()) log = logging.getLogger('{}.{}'.format(__name__, id(pyscf_log))) log.addHandler(PyscfHandler()) log.setLevel('INFO') return log def kernel(method, assert_convergence=ASSERT_CONV, include_ghost=INCLUDE_GHOST, callback=None, **kwargs): '''Optimize geometry with pyberny for the given method. To adjust the convergence threshold, parameters can be set in kwargs as below: .. code-block:: python conv_params = { # They are default settings 'gradientmax': 0.45e-3, # Eh/[Bohr|rad] 'gradientrms': 0.15e-3, # Eh/[Bohr|rad] 'stepmax': 1.8e-3, # [Bohr|rad] 'steprms': 1.2e-3, # [Bohr|rad] } from pyscf.geomopt import berny_solver opt = berny_solver.GeometryOptimizer(method) opt.params = conv_params opt.kernel() ''' t0 = time.clock(), time.time() mol = method.mol.copy() if 'log' in kwargs: log = lib.logger.new_logger(method, kwargs['log']) elif 'verbose' in kwargs: log = lib.logger.new_logger(method, kwargs['verbose']) else: log = lib.logger.new_logger(method) if isinstance(method, lib.GradScanner): g_scanner = method elif isinstance(method, GradientsBasics): g_scanner = method.as_scanner() elif getattr(method, 'nuc_grad_method', None): g_scanner = method.nuc_grad_method().as_scanner() else: raise NotImplementedError('Nuclear gradients of %s not available' % method) if not include_ghost: g_scanner.atmlst = numpy.where(method.mol.atom_charges() != 0)[0] # When symmetry is enabled, the molecule may be shifted or rotated to make # the z-axis be the main axis. The transformation can cause inconsistency # between the optimization steps. The transformation is muted by setting # an explict point group to the keyword mol.symmetry (see symmetry # detection code in Mole.build function). if mol.symmetry: mol.symmetry = mol.topgroup # temporary interface, taken from berny.py optimize function berny_log = to_berny_log(log) geom = to_berny_geom(mol, include_ghost) optimizer = Berny(geom, logger=berny_log, **kwargs) t1 = t0 e_last = 0 for cycle, geom in enumerate(optimizer): if log.verbose >= lib.logger.NOTE: log.note('\nGeometry optimization cycle %d', cycle+1) dump_mol_geometry(mol, geom.coords, log) if mol.symmetry: geom.coords = symmetrize(mol, geom.coords) mol.set_geom_(_geom_to_atom(mol, geom, include_ghost), unit='Bohr') energy, gradients = g_scanner(mol) log.note('cycle %d: E = %.12g dE = %g norm(grad) = %g', cycle+1, energy, energy - e_last, numpy.linalg.norm(gradients)) e_last = energy if callable(callback): callback(locals()) if assert_convergence and not g_scanner.converged: raise RuntimeError('Nuclear gradients of %s not converged' % method) optimizer.send((energy, gradients)) t1 = log.timer('geomoetry optimization cycle %d'%cycle, *t1) t0 = log.timer('geomoetry optimization', *t0) return optimizer._converged, mol def optimize(method, assert_convergence=ASSERT_CONV, include_ghost=INCLUDE_GHOST, callback=None, **kwargs): '''Optimize geometry with pyberny for the given method. To adjust the convergence threshold, parameters can be set in kwargs as below: .. code-block:: python conv_params = { # They are default settings 'gradientmax': 0.45e-3, # Eh/[Bohr|rad] 'gradientrms': 0.15e-3, # Eh/[Bohr|rad] 'stepmax': 1.8e-3, # [Bohr|rad] 'steprms': 1.2e-3, # [Bohr|rad] } from pyscf.geomopt import berny_solver newmol = berny_solver.optimize(method, **conv_params) ''' return kernel(method, assert_convergence, include_ghost, callback, **kwargs)[1] class GeometryOptimizer(lib.StreamObject): '''Optimize the molecular geometry for the input method. Note the method.mol will be changed after calling .kernel() method. ''' def __init__(self, method): self.method = method self.callback = None self.params = {} self.converged = False self.max_cycle = 100 @property def mol(self): return self.method.mol @mol.setter def mol(self, x): self.method.mol = x def kernel(self, params=None): if params is not None: self.params.update(params) params = dict(self.params) params['maxsteps'] = self.max_cycle self.converged, self.mol = \ kernel(self.method, callback=self.callback, **params) return self.mol optimize = kernel del(INCLUDE_GHOST, ASSERT_CONV) if __name__ == '__main__': from pyscf import gto from pyscf import scf, dft, cc, mp mol = gto.M(atom=''' C 1.1879 -0.3829 0.0000 C 0.0000 0.5526 0.0000 O -1.1867 -0.2472 0.0000 H -1.9237 0.3850 0.0000 H 2.0985 0.2306 0.0000 H 1.1184 -1.0093 0.8869 H 1.1184 -1.0093 -0.8869 H -0.0227 1.1812 0.8852 H -0.0227 1.1812 -0.8852 ''', basis='3-21g') mf = scf.RHF(mol) conv_params = { 'gradientmax': 6e-3, # Eh/Bohr 'gradientrms': 2e-3, # Eh/Bohr 'stepmax': 2e-2, # Bohr 'steprms': 1.5e-2, # Bohr } mol1 = optimize(mf, **conv_params) print(mf.kernel() - -153.219208484874) print(scf.RHF(mol1).kernel() - -153.222680852335) mf = dft.RKS(mol) mf.xc = 'pbe,' mf.conv_tol = 1e-7 mol1 = optimize(mf) mymp2 = mp.MP2(scf.RHF(mol)) mol1 = optimize(mymp2) mycc = cc.CCSD(scf.RHF(mol)) mol1 = optimize(mycc)
34.787645
91
0.642619
944
0.104772
0
0
120
0.013319
0
0
3,658
0.405993
8a01ccf4f5933cd1046863655e9835118928c6fc
1,838
py
Python
src/main/python/taf/foundation/api/ui/aut.py
WesleyPeng/uiXautomation
2d2c4d5a774ffda934d5615036a80c449bac930d
[ "Apache-2.0" ]
6
2017-09-19T15:05:47.000Z
2021-07-16T16:07:46.000Z
src/main/python/taf/foundation/api/ui/aut.py
WesleyPeng/uiXautomation
2d2c4d5a774ffda934d5615036a80c449bac930d
[ "Apache-2.0" ]
1
2018-06-02T18:45:51.000Z
2018-06-02T18:45:51.000Z
src/main/python/taf/foundation/api/ui/aut.py
WesleyPeng/uiXautomation
2d2c4d5a774ffda934d5615036a80c449bac930d
[ "Apache-2.0" ]
null
null
null
# Copyright (c) 2017-2018 {Flair Inc.} WESLEY PENG # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from taf.foundation.utils import ConnectionCache class AUT(object): cache = None current = None def __init__( self, name=None, identifier=None, **kwargs ): if not AUT.cache: AUT.cache = ConnectionCache(identifier) self.id = self.cache.register( self._create_instance(name, **kwargs), identifier ) AUT.current = self @staticmethod def launch(app_location, **kwargs): raise NotImplementedError( 'Launch application' ) def activate(self): if self.id != self.cache.current_key: self.cache.current_key = self.id AUT.current = self def take_screenshot(self): self.activate() return self.get_screenshot_data() def close(self): self.cache.close(self.id) if not self.cache.current: AUT.cache = None AUT.current = None def get_screenshot_data(self): raise NotImplementedError( 'Get screenshot data from AUT' ) def _create_instance(self, name, **kwargs): raise NotImplementedError( 'Create instance of AUT' )
25.887324
74
0.62568
1,192
0.648531
0
0
131
0.071273
0
0
654
0.355822
8a02d8606a3a24d720ef5682953d80e75a8dcabc
1,758
py
Python
algo/vigenere.py
dkushche/Crypto
75919d6df2084aee1de76c9999ac4e361c4efd48
[ "MIT" ]
3
2020-05-07T22:03:48.000Z
2021-03-11T16:36:56.000Z
algo/vigenere.py
dkushche/Crypto
75919d6df2084aee1de76c9999ac4e361c4efd48
[ "MIT" ]
null
null
null
algo/vigenere.py
dkushche/Crypto
75919d6df2084aee1de76c9999ac4e361c4efd48
[ "MIT" ]
null
null
null
import crypto_tools from itertools import cycle def vigenere_little_doc(): return "encrypt/decrypt using vigenere cypher" def vigenere_full_doc(): return """ Advanced caesar we change dict on each char """ def vigenere_str_to_list(string, vigenere_dict): result = list() for char in string: try: result.append(vigenere_dict.index(char)) except ValueError: err_msg = f"There is no {key[inx]} in alphabet" raise ValueError(err_msg) return result def vigenere_processing(data, key, lang, encrypt): vigenere_dict = crypto_tools.get_param_json_data("alphabets.json", lang) num_data = vigenere_str_to_list(data, vigenere_dict) num_key = vigenere_str_to_list(key, vigenere_dict) dict_size = len(vigenere_dict) num_key = cycle(num_key) if (encrypt == "encrypt"): num_result = [(a + b) % dict_size for a, b in zip(num_data, num_key)] else: num_result = [ (a + dict_size - b) % dict_size for a, b in zip(num_data, num_key) ] result_str = "" for val in num_result: result_str += vigenere_dict[val] return result_str @crypto_tools.file_manipulation() def vigenere(data): lang = crypto_tools.cterm('input', 'Data language: ', 'ans') key = crypto_tools.cterm('input', 'Enter key(str): ', 'ans') encrypt = crypto_tools.cterm('input', 'You want encrypt or decrypt: ', 'ans') if encrypt != "encrypt" and encrypt != "decrypt": raise ValueError("Incorrect action") data = crypto_tools.utf_decoder(data) return vigenere_processing(data, key, lang, encrypt) vigenere.little_doc = vigenere_little_doc vigenere.full_doc = vigenere_full_doc
30.310345
78
0.660978
0
0
0
0
496
0.282139
0
0
300
0.170648
8a03248b6fead646cb68e7a6a935435de664969c
14,492
py
Python
anaconda-mode/0.1.13/jedi-0.15.1-py3.7.egg/jedi/evaluate/base_context.py
space-scl/emacs.d
6285c38714023b72a023fe24cbcb5e4fcdcdb949
[ "Apache-2.0" ]
4
2019-07-26T11:32:22.000Z
2019-09-11T05:34:59.000Z
anaconda-mode/0.1.13/jedi-0.15.1-py3.7.egg/jedi/evaluate/base_context.py
space-scl/emacs.d
6285c38714023b72a023fe24cbcb5e4fcdcdb949
[ "Apache-2.0" ]
10
2020-05-11T20:29:28.000Z
2022-01-13T01:41:27.000Z
anaconda-mode/0.1.13/jedi-0.15.1-py3.7.egg/jedi/evaluate/base_context.py
space-scl/emacs.d
6285c38714023b72a023fe24cbcb5e4fcdcdb949
[ "Apache-2.0" ]
2
2019-08-28T14:57:54.000Z
2019-11-26T16:18:30.000Z
""" Contexts are the "values" that Python would return. However Contexts are at the same time also the "contexts" that a user is currently sitting in. A ContextSet is typically used to specify the return of a function or any other static analysis operation. In jedi there are always multiple returns and not just one. """ from functools import reduce from operator import add from parso.python.tree import ExprStmt, SyncCompFor from jedi import debug from jedi._compatibility import zip_longest, unicode from jedi.parser_utils import clean_scope_docstring from jedi.common import BaseContextSet, BaseContext from jedi.evaluate.helpers import SimpleGetItemNotFound from jedi.evaluate.utils import safe_property from jedi.evaluate.cache import evaluator_as_method_param_cache from jedi.cache import memoize_method _sentinel = object() class HelperContextMixin(object): def get_root_context(self): context = self while True: if context.parent_context is None: return context context = context.parent_context @classmethod @evaluator_as_method_param_cache() def create_cached(cls, *args, **kwargs): return cls(*args, **kwargs) def execute(self, arguments): return self.evaluator.execute(self, arguments=arguments) def execute_evaluated(self, *value_list): from jedi.evaluate.arguments import ValuesArguments arguments = ValuesArguments([ContextSet([value]) for value in value_list]) return self.evaluator.execute(self, arguments) def execute_annotation(self): return self.execute_evaluated() def gather_annotation_classes(self): return ContextSet([self]) def merge_types_of_iterate(self, contextualized_node=None, is_async=False): return ContextSet.from_sets( lazy_context.infer() for lazy_context in self.iterate(contextualized_node, is_async) ) def py__getattribute__(self, name_or_str, name_context=None, position=None, search_global=False, is_goto=False, analysis_errors=True): """ :param position: Position of the last statement -> tuple of line, column """ if name_context is None: name_context = self from jedi.evaluate import finder f = finder.NameFinder(self.evaluator, self, name_context, name_or_str, position, analysis_errors=analysis_errors) filters = f.get_filters(search_global) if is_goto: return f.filter_name(filters) return f.find(filters, attribute_lookup=not search_global) def py__await__(self): await_context_set = self.py__getattribute__(u"__await__") if not await_context_set: debug.warning('Tried to run __await__ on context %s', self) return await_context_set.execute_evaluated() def eval_node(self, node): return self.evaluator.eval_element(self, node) def create_context(self, node, node_is_context=False, node_is_object=False): return self.evaluator.create_context(self, node, node_is_context, node_is_object) def iterate(self, contextualized_node=None, is_async=False): debug.dbg('iterate %s', self) if is_async: from jedi.evaluate.lazy_context import LazyKnownContexts # TODO if no __aiter__ contexts are there, error should be: # TypeError: 'async for' requires an object with __aiter__ method, got int return iter([ LazyKnownContexts( self.py__getattribute__('__aiter__').execute_evaluated() .py__getattribute__('__anext__').execute_evaluated() .py__getattribute__('__await__').execute_evaluated() .py__stop_iteration_returns() ) # noqa ]) return self.py__iter__(contextualized_node) def is_sub_class_of(self, class_context): for cls in self.py__mro__(): if cls.is_same_class(class_context): return True return False def is_same_class(self, class2): # Class matching should prefer comparisons that are not this function. if type(class2).is_same_class != HelperContextMixin.is_same_class: return class2.is_same_class(self) return self == class2 class Context(HelperContextMixin, BaseContext): """ Should be defined, otherwise the API returns empty types. """ predefined_names = {} """ To be defined by subclasses. """ tree_node = None @property def api_type(self): # By default just lower name of the class. Can and should be # overwritten. return self.__class__.__name__.lower() def py__getitem__(self, index_context_set, contextualized_node): from jedi.evaluate import analysis # TODO this context is probably not right. analysis.add( contextualized_node.context, 'type-error-not-subscriptable', contextualized_node.node, message="TypeError: '%s' object is not subscriptable" % self ) return NO_CONTEXTS def py__iter__(self, contextualized_node=None): if contextualized_node is not None: from jedi.evaluate import analysis analysis.add( contextualized_node.context, 'type-error-not-iterable', contextualized_node.node, message="TypeError: '%s' object is not iterable" % self) return iter([]) def get_signatures(self): return [] def is_class(self): return False def is_instance(self): return False def is_function(self): return False def is_module(self): return False def is_namespace(self): return False def is_compiled(self): return False def is_bound_method(self): return False def py__bool__(self): """ Since Wrapper is a super class for classes, functions and modules, the return value will always be true. """ return True def py__doc__(self): try: self.tree_node.get_doc_node except AttributeError: return '' else: return clean_scope_docstring(self.tree_node) return None def get_safe_value(self, default=_sentinel): if default is _sentinel: raise ValueError("There exists no safe value for context %s" % self) return default def py__call__(self, arguments): debug.warning("no execution possible %s", self) return NO_CONTEXTS def py__stop_iteration_returns(self): debug.warning("Not possible to return the stop iterations of %s", self) return NO_CONTEXTS def get_qualified_names(self): # Returns Optional[Tuple[str, ...]] return None def is_stub(self): # The root context knows if it's a stub or not. return self.parent_context.is_stub() def iterate_contexts(contexts, contextualized_node=None, is_async=False): """ Calls `iterate`, on all contexts but ignores the ordering and just returns all contexts that the iterate functions yield. """ return ContextSet.from_sets( lazy_context.infer() for lazy_context in contexts.iterate(contextualized_node, is_async=is_async) ) class _ContextWrapperBase(HelperContextMixin): predefined_names = {} @safe_property def name(self): from jedi.evaluate.names import ContextName wrapped_name = self._wrapped_context.name if wrapped_name.tree_name is not None: return ContextName(self, wrapped_name.tree_name) else: from jedi.evaluate.compiled import CompiledContextName return CompiledContextName(self, wrapped_name.string_name) @classmethod @evaluator_as_method_param_cache() def create_cached(cls, evaluator, *args, **kwargs): return cls(*args, **kwargs) def __getattr__(self, name): assert name != '_wrapped_context', 'Problem with _get_wrapped_context' return getattr(self._wrapped_context, name) class LazyContextWrapper(_ContextWrapperBase): @safe_property @memoize_method def _wrapped_context(self): with debug.increase_indent_cm('Resolve lazy context wrapper'): return self._get_wrapped_context() def __repr__(self): return '<%s>' % (self.__class__.__name__) def _get_wrapped_context(self): raise NotImplementedError class ContextWrapper(_ContextWrapperBase): def __init__(self, wrapped_context): self._wrapped_context = wrapped_context def __repr__(self): return '%s(%s)' % (self.__class__.__name__, self._wrapped_context) class TreeContext(Context): def __init__(self, evaluator, parent_context, tree_node): super(TreeContext, self).__init__(evaluator, parent_context) self.predefined_names = {} self.tree_node = tree_node def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self.tree_node) class ContextualizedNode(object): def __init__(self, context, node): self.context = context self.node = node def get_root_context(self): return self.context.get_root_context() def infer(self): return self.context.eval_node(self.node) def __repr__(self): return '<%s: %s in %s>' % (self.__class__.__name__, self.node, self.context) class ContextualizedName(ContextualizedNode): # TODO merge with TreeNameDefinition?! @property def name(self): return self.node def assignment_indexes(self): """ Returns an array of tuple(int, node) of the indexes that are used in tuple assignments. For example if the name is ``y`` in the following code:: x, (y, z) = 2, '' would result in ``[(1, xyz_node), (0, yz_node)]``. When searching for b in the case ``a, *b, c = [...]`` it will return:: [(slice(1, -1), abc_node)] """ indexes = [] is_star_expr = False node = self.node.parent compare = self.node while node is not None: if node.type in ('testlist', 'testlist_comp', 'testlist_star_expr', 'exprlist'): for i, child in enumerate(node.children): if child == compare: index = int(i / 2) if is_star_expr: from_end = int((len(node.children) - i) / 2) index = slice(index, -from_end) indexes.insert(0, (index, node)) break else: raise LookupError("Couldn't find the assignment.") is_star_expr = False elif node.type == 'star_expr': is_star_expr = True elif isinstance(node, (ExprStmt, SyncCompFor)): break compare = node node = node.parent return indexes def _getitem(context, index_contexts, contextualized_node): from jedi.evaluate.context.iterable import Slice # The actual getitem call. simple_getitem = getattr(context, 'py__simple_getitem__', None) result = NO_CONTEXTS unused_contexts = set() for index_context in index_contexts: if simple_getitem is not None: index = index_context if isinstance(index_context, Slice): index = index.obj try: method = index.get_safe_value except AttributeError: pass else: index = method(default=None) if type(index) in (float, int, str, unicode, slice, bytes): try: result |= simple_getitem(index) continue except SimpleGetItemNotFound: pass unused_contexts.add(index_context) # The index was somehow not good enough or simply a wrong type. # Therefore we now iterate through all the contexts and just take # all results. if unused_contexts or not index_contexts: result |= context.py__getitem__( ContextSet(unused_contexts), contextualized_node ) debug.dbg('py__getitem__ result: %s', result) return result class ContextSet(BaseContextSet): def py__class__(self): return ContextSet(c.py__class__() for c in self._set) def iterate(self, contextualized_node=None, is_async=False): from jedi.evaluate.lazy_context import get_merged_lazy_context type_iters = [c.iterate(contextualized_node, is_async=is_async) for c in self._set] for lazy_contexts in zip_longest(*type_iters): yield get_merged_lazy_context( [l for l in lazy_contexts if l is not None] ) def execute(self, arguments): return ContextSet.from_sets(c.evaluator.execute(c, arguments) for c in self._set) def execute_evaluated(self, *args, **kwargs): return ContextSet.from_sets(c.execute_evaluated(*args, **kwargs) for c in self._set) def py__getattribute__(self, *args, **kwargs): if kwargs.get('is_goto'): return reduce(add, [c.py__getattribute__(*args, **kwargs) for c in self._set], []) return ContextSet.from_sets(c.py__getattribute__(*args, **kwargs) for c in self._set) def get_item(self, *args, **kwargs): return ContextSet.from_sets(_getitem(c, *args, **kwargs) for c in self._set) def try_merge(self, function_name): context_set = self.__class__([]) for c in self._set: try: method = getattr(c, function_name) except AttributeError: pass else: context_set |= method() return context_set def gather_annotation_classes(self): return ContextSet.from_sets([c.gather_annotation_classes() for c in self._set]) def get_signatures(self): return [sig for c in self._set for sig in c.get_signatures()] NO_CONTEXTS = ContextSet([]) def iterator_to_context_set(func): def wrapper(*args, **kwargs): return ContextSet(func(*args, **kwargs)) return wrapper
33.162471
94
0.637524
11,748
0.810654
395
0.027256
1,081
0.074593
0
0
2,454
0.169335
8a036923cf292987a326de518f02ae1d70e60da4
974
py
Python
kiwi_scp/commands/cmd_cmd.py
yavook/kiwi-scp
ca4263d913cfbdedc8b14334e3cad61c3b95f0a7
[ "MIT" ]
null
null
null
kiwi_scp/commands/cmd_cmd.py
yavook/kiwi-scp
ca4263d913cfbdedc8b14334e3cad61c3b95f0a7
[ "MIT" ]
null
null
null
kiwi_scp/commands/cmd_cmd.py
yavook/kiwi-scp
ca4263d913cfbdedc8b14334e3cad61c3b95f0a7
[ "MIT" ]
null
null
null
from typing import Tuple import click from .cmd import KiwiCommandType, KiwiCommand from .decorators import kiwi_command from ..executable import COMPOSE_EXE from ..instance import Instance from ..project import Project @click.argument( "compose_args", metavar="[ARG]...", nargs=-1, ) @click.argument( "compose_cmd", metavar="COMMAND", ) @kiwi_command( short_help="Run docker-compose command", # ignore arguments looking like options # just pass everything down to docker-compose context_settings={"ignore_unknown_options": True}, ) class CmdCommand(KiwiCommand): """Run raw docker-compose command in a project""" type = KiwiCommandType.PROJECT enabled_only = True @classmethod def run_for_project(cls, instance: Instance, project: Project, compose_cmd: str = None, compose_args: Tuple[str] = None) -> None: COMPOSE_EXE.run([compose_cmd, *compose_args], **project.process_kwargs)
26.324324
91
0.708419
400
0.410678
0
0
749
0.768994
0
0
231
0.237166
8a036ee66041ffdf97db3dd3911676a6d37fc888
4,339
py
Python
homework/Testing with Examples (Network)/impl_fail-add_relation-does_not_fail_when_person1_is_non_existent.py
rvprasad/software-testing-course
3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0
[ "CC-BY-4.0" ]
11
2018-02-08T05:23:28.000Z
2021-05-24T13:23:56.000Z
homework/Testing with Examples (Network)/impl_fail-add_relation-does_not_fail_when_person1_is_non_existent.py
rvprasad/software-testing-course
3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0
[ "CC-BY-4.0" ]
null
null
null
homework/Testing with Examples (Network)/impl_fail-add_relation-does_not_fail_when_person1_is_non_existent.py
rvprasad/software-testing-course
3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0
[ "CC-BY-4.0" ]
2
2020-09-15T08:51:22.000Z
2021-01-26T12:07:18.000Z
class MyError(Exception): pass class PropertyContainer(object): def __init__(self): self.props = {} def set_property(self, prop, value): self.props[prop] = value def get_property(self, prop): return self.props.get(prop) def has_property(self, prop): return prop in self.props class Node(PropertyContainer): pass class Edge(PropertyContainer): def __init__(self, node1, node2): super().__init__() self.node1 = node1 self.node2 = node2 class Network(object): NAME_PROP = "name" # NAME_PROP is an optional string property FRIEND_PROP = "friend" # FRIEND_PROP is an optional boolean property def __init__(self): self.nodes = set() self.edges = set() def create_person(self): node = Node() self.nodes.add(node) return node # add prop to value; overwrite if prop exists def add_person_property(self, person, prop, value): # flag non-existent person if person not in self.nodes: raise RuntimeError("person does not exist") if prop == Network.NAME_PROP: # disallow non-string values for NAME_PROP property if not isinstance(value, str): raise TypeError( "{0} is a string property".format(Network.NAME_PROP)) # disallow multiple people to have the same name for p in self.nodes: if p.get_property(Network.NAME_PROP) == value and \ p is not person: raise ValueError("{0} name already taken".format(value)) person.set_property(prop, value) def add_relation(self, person1, person2): # flag non-existent persons if person1 not in self.nodes: # raise RuntimeError("person1 does not exist") person1 = self.create_person() if person2 not in self.nodes: raise RuntimeError("person2 does not exist") # flag existing edge for e in self.edges: if (e.node1 is person1 and e.node2 is person2) or \ (e.node1 is person2 and e.node2 is person1): raise ValueError("relation exists") self.edges.add(Edge(person1, person2)) def add_relation_property(self, person1, person2, prop, value): # disallow non-boolean values for FRIEND_PROP property if prop == Network.FRIEND_PROP and not isinstance(value, bool): raise TypeError( "{0} is a boolean property".format(Network.FRIEND_PROP)) for e in self.edges: if (e.node1 is person1 and e.node2 is person2) or \ (e.node1 is person2 and e.node2 is person1): e.set_property(prop, value) return # flag non-existent relation raise RuntimeError("Non-existent relation") # get a person with given name def get_person(self, name): # disallow non-string values for name if not isinstance(name, str): raise TypeError( "{0} is a string argument".format(Network.NAME_PROP)) for n in self.nodes: if n.get_property(Network.NAME_PROP) == name: return n # flag non-existent person raise RuntimeError("No person named {0}".format(name)) # get friends of friends of a person with given name def friends_of_friends(self, name): # disallow non-string values for name if not isinstance(name, str): raise TypeError( "{0} is a string argument".format(Network.NAME_PROP)) # flag non-existent person person = self.get_person(name) visited = set([person]) i = 0 while i < 2: newly_visited = set() for p in (x for x in visited): for e in (x for x in self.edges if x.get_property(Network.FRIEND_PROP) == True): n1 = e.node1 n2 = e.node2 if n1 == p: newly_visited.add(e.node2) elif n2 == p: newly_visited.add(e.node1) visited = newly_visited i += 1 return list(visited)
34.991935
76
0.569717
4,326
0.997004
0
0
0
0
0
0
894
0.206038
8a03ced3330b9102f19e53ae0f85a628054986d1
36
py
Python
tools/__init__.py
BranKein/Flask-template
3d8f43b3c44163e855c727de2a0dfe37d3b788f9
[ "MIT" ]
null
null
null
tools/__init__.py
BranKein/Flask-template
3d8f43b3c44163e855c727de2a0dfe37d3b788f9
[ "MIT" ]
null
null
null
tools/__init__.py
BranKein/Flask-template
3d8f43b3c44163e855c727de2a0dfe37d3b788f9
[ "MIT" ]
null
null
null
from . import ip __all__ = ['ip']
7.2
16
0.583333
0
0
0
0
0
0
0
0
4
0.111111
8a040db174b4e066ad1fcf13a9fc64667e2a81e2
274
py
Python
leetCode/algorithms/easy/count_and_say.py
ferhatelmas/algo
a7149c7a605708bc01a5cd30bf5455644cefd04d
[ "WTFPL" ]
25
2015-01-21T16:39:18.000Z
2021-05-24T07:01:24.000Z
leetCode/algorithms/easy/count_and_say.py
gauravsingh58/algo
397859a53429e7a585e5f6964ad24146c6261326
[ "WTFPL" ]
2
2020-09-30T19:39:36.000Z
2020-10-01T17:15:16.000Z
leetCode/algorithms/easy/count_and_say.py
ferhatelmas/algo
a7149c7a605708bc01a5cd30bf5455644cefd04d
[ "WTFPL" ]
15
2015-01-21T16:39:27.000Z
2020-10-01T17:00:22.000Z
from itertools import groupby class Solution: def countAndSay(self, n): def gen(s): return "".join(str(len(list(g))) + k for k, g in groupby(s)) s, i = "1", 1 while i < n: s = gen(s) i += 1 return s
19.571429
72
0.463504
241
0.879562
0
0
0
0
0
0
5
0.018248
8a045d9a56c4a8715b77c0b2cd2d5ff977fa98ed
609
py
Python
conf/feature_config.py
pupuwudi/nlp_xiaojiang
182ac4522b6012a52de6e1d0db7e6a47cb716e5b
[ "MIT" ]
null
null
null
conf/feature_config.py
pupuwudi/nlp_xiaojiang
182ac4522b6012a52de6e1d0db7e6a47cb716e5b
[ "MIT" ]
null
null
null
conf/feature_config.py
pupuwudi/nlp_xiaojiang
182ac4522b6012a52de6e1d0db7e6a47cb716e5b
[ "MIT" ]
2
2021-01-18T10:07:20.000Z
2022-01-12T10:09:47.000Z
# -*- coding: UTF-8 -*- # !/usr/bin/python # @time :2019/5/10 9:13 # @author :Mo # @function :path of FeatureProject import pathlib import sys import os # base dir projectdir = str(pathlib.Path(os.path.abspath(__file__)).parent.parent) sys.path.append(projectdir) # path of BERT model model_dir = projectdir + '/Data/chinese_L-12_H-768_A-12' config_name = model_dir + '/bert_config.json' ckpt_name = model_dir + '/bert_model.ckpt' vocab_file = model_dir + '/vocab.txt' # gpu使用率 gpu_memory_fraction = 0.32 # 默认取倒数第二层的输出值作为句向量 layer_indexes = [-2] # 序列的最大程度 max_seq_len = 32
22.555556
72
0.689655
0
0
0
0
0
0
0
0
328
0.494721
8a047a8d5dd4c7ba8745cc48738110ca5fef1d2f
813
py
Python
tests/test_prep_read.py
taruma/hidrokit
a96c4ba2235d58d2bbc905be44d1b413ed19b3d2
[ "MIT" ]
5
2019-07-15T13:35:52.000Z
2020-04-01T17:34:16.000Z
tests/test_prep_read.py
taruma/hidrokit
a96c4ba2235d58d2bbc905be44d1b413ed19b3d2
[ "MIT" ]
107
2019-01-03T02:12:26.000Z
2020-02-18T00:48:27.000Z
tests/test_prep_read.py
hidrokit/hidrokit
c8b949aa6a81981684a24e5dd1e498ec82cbe0ca
[ "MIT" ]
2
2020-06-17T00:08:32.000Z
2020-08-24T18:55:38.000Z
"""Test for .prep.read module """ from hidrokit.prep import read import numpy as np import pandas as pd A = pd.DataFrame( data=[ [1, 3, 4, np.nan, 2, np.nan], [np.nan, 2, 3, np.nan, 1, 4], [2, np.nan, 1, 3, 4, np.nan] ], columns=['A', 'B', 'C', 'D', 'E', 'F'] ) A_date = A.set_index(pd.date_range("20190617", "20190619")) res_A_number = {'A': [1], 'B': [2], 'C': [], 'D': [0, 1], 'E': [], 'F': [0, 2]} res_A_date = {'A': ['0618'], 'B': ['0619'], 'C': [], 'D': ['0617', '0618'], 'E': [], 'F': ['0617', '0619']} def test_read_number(): test = read.missing_row(A, date_index=False) assert test.items() == res_A_number.items() def test_read_date(): test = read.missing_row(A_date, date_format="%m%d") assert test.items() == res_A_date.items()
25.40625
79
0.526445
0
0
0
0
0
0
0
0
149
0.183272
8a047dbb3e81227c03ec206589ca325125601905
1,721
py
Python
app/blueprints/department_blueprint.py
Maxcutex/personal_ecommerce
be09fb20eae1b225523acde06f8e75effcc3676f
[ "MIT" ]
null
null
null
app/blueprints/department_blueprint.py
Maxcutex/personal_ecommerce
be09fb20eae1b225523acde06f8e75effcc3676f
[ "MIT" ]
2
2019-05-21T08:44:29.000Z
2021-04-30T20:46:08.000Z
app/blueprints/department_blueprint.py
Maxcutex/personal_ecommerce
be09fb20eae1b225523acde06f8e75effcc3676f
[ "MIT" ]
null
null
null
from flasgger import swag_from from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth from app.controllers.department_controller import DepartmentController url_prefix = '{}/departments'.format(BaseBlueprint.base_url_prefix) department_blueprint = Blueprint('department', __name__, url_prefix=url_prefix) department_controller = DepartmentController(request) @department_blueprint.route('/', methods=['GET']) @Auth.has_permission('view_department') @swag_from('documentation/get_all_departments.yml') def list_departments(): return department_controller.list_departments() @department_blueprint.route('/<int:department_id>', methods=['GET']) @Auth.has_permission('view_department') @swag_from('documentation/get_single_department.yml') def get_department(department_id): return department_controller.get_department(department_id) @department_blueprint.route('/', methods=['POST']) @Auth.has_role('admin') @Security.validator(['name|required:ifExists_Department_name', 'description|required']) @swag_from('documentation/create_department.yml') def create_department(): return department_controller.create_department() @department_blueprint.route('/<int:department_id>', methods=['DELETE']) @Auth.has_role('admin') @swag_from('documentation/delete_department.yml') def delete_department(department_id): return department_controller.delete_department(department_id) @department_blueprint.route('/<int:department_id>', methods=['PATCH']) @Auth.has_role('admin') @Security.validator(['name|optional', 'description|optional']) @swag_from('documentation/update_department.yml') def update_department(department_id): return department_controller.update_department(department_id)
41.97561
91
0.820453
0
0
0
0
1,312
0.762347
0
0
476
0.276583
8a04b26d17a373b84c1afb19abef67f291bb970a
9,747
py
Python
src/train_DFCAN.py
ikecoglu/DL-SR
5e4c794f1434cd4a9b2b1aecf3738065b11bede1
[ "MIT" ]
46
2021-01-07T03:38:07.000Z
2022-03-24T19:11:23.000Z
src/train_DFCAN.py
ikecoglu/DL-SR
5e4c794f1434cd4a9b2b1aecf3738065b11bede1
[ "MIT" ]
7
2021-02-06T14:23:18.000Z
2022-02-13T04:08:45.000Z
src/train_DFCAN.py
ikecoglu/DL-SR
5e4c794f1434cd4a9b2b1aecf3738065b11bede1
[ "MIT" ]
16
2021-01-26T16:22:49.000Z
2022-02-26T03:21:08.000Z
import argparse from keras import optimizers import matplotlib.pyplot as plt import numpy as np import datetime from keras.callbacks import TensorBoard import glob import os import tensorflow as tf from models import * from utils.lr_controller import ReduceLROnPlateau from utils.data_loader import data_loader, data_loader_multi_channel from utils.utils import img_comp from utils.loss import loss_mse_ssim parser = argparse.ArgumentParser() parser.add_argument("--gpu_id", type=int, default=1) parser.add_argument("--gpu_memory_fraction", type=float, default=0.3) parser.add_argument("--mixed_precision_training", type=int, default=1) parser.add_argument("--data_dir", type=str, default="../dataset/train/F-actin") parser.add_argument("--save_weights_dir", type=str, default="../trained_models") parser.add_argument("--model_name", type=str, default="DFCAN") parser.add_argument("--patch_height", type=int, default=128) parser.add_argument("--patch_width", type=int, default=128) parser.add_argument("--input_channels", type=int, default=9) parser.add_argument("--scale_factor", type=int, default=2) parser.add_argument("--norm_flag", type=int, default=1) parser.add_argument("--iterations", type=int, default=1000000) parser.add_argument("--sample_interval", type=int, default=1000) parser.add_argument("--validate_interval", type=int, default=2000) parser.add_argument("--validate_num", type=int, default=500) parser.add_argument("--batch_size", type=int, default=4) parser.add_argument("--start_lr", type=float, default=1e-4) parser.add_argument("--lr_decay_factor", type=float, default=0.5) parser.add_argument("--load_weights", type=int, default=0) parser.add_argument("--optimizer_name", type=str, default="adam") args = parser.parse_args() gpu_id = str(args.gpu_id) gpu_memory_fraction = args.gpu_memory_fraction mixed_precision_training = str(args.mixed_precision_training) data_dir = args.data_dir save_weights_dir = args.save_weights_dir validate_interval = args.validate_interval batch_size = args.batch_size start_lr = args.start_lr lr_decay_factor = args.lr_decay_factor patch_height = args.patch_height patch_width = args.patch_width input_channels = args.input_channels scale_factor = args.scale_factor norm_flag = args.norm_flag validate_num = args.validate_num iterations = args.iterations load_weights = args.load_weights optimizer_name = args.optimizer_name model_name = args.model_name sample_interval = args.sample_interval os.environ["TF_ENABLE_AUTO_MIXED_PRECISION"] = mixed_precision_training os.environ["CUDA_VISIBLE_DEVICES"] = gpu_id gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_memory_fraction) tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) data_name = data_dir.split('/')[-1] if input_channels == 1: save_weights_name = model_name + '-SISR_' + data_name cur_data_loader = data_loader train_images_path = data_dir + '/training_wf/' validate_images_path = data_dir + '/validate_wf/' else: save_weights_name = model_name + '-SIM_' + data_name cur_data_loader = data_loader_multi_channel train_images_path = data_dir + '/training/' validate_images_path = data_dir + '/validate/' save_weights_path = save_weights_dir + '/' + save_weights_name + '/' train_gt_path = data_dir + '/training_gt/' validate_gt_path = data_dir + '/validate_gt/' sample_path = save_weights_path + 'sampled_img/' if not os.path.exists(save_weights_path): os.mkdir(save_weights_path) if not os.path.exists(sample_path): os.mkdir(sample_path) # -------------------------------------------------------------------------------- # select models and optimizer # -------------------------------------------------------------------------------- modelFns = {'DFCAN': DFCAN16.DFCAN} modelFN = modelFns[model_name] optimizer_g = optimizers.adam(lr=start_lr, beta_1=0.9, beta_2=0.999) # -------------------------------------------------------------------------------- # define combined model # -------------------------------------------------------------------------------- g = modelFN((patch_height, patch_width, input_channels)) g.compile(loss=loss_mse_ssim, optimizer=optimizer_g) lr_controller = ReduceLROnPlateau(model=g, factor=lr_decay_factor, patience=10, mode='min', min_delta=1e-4, cooldown=0, min_lr=start_lr * 0.1, verbose=1) # -------------------------------------------------------------------------------- # about Tensorboard # -------------------------------------------------------------------------------- log_path = save_weights_path + 'graph' if not os.path.exists(log_path): os.mkdir(log_path) callback = TensorBoard(log_path) callback.set_model(g) train_names = 'training_loss' val_names = ['val_MSE', 'val_SSIM', 'val_PSNR', 'val_NRMSE'] def write_log(callback, names, logs, batch_no): summary = tf.Summary() summary_value = summary.value.add() summary_value.simple_value = logs summary_value.tag = names callback.writer.add_summary(summary, batch_no) callback.writer.flush() # -------------------------------------------------------------------------------- # Sample and validate # -------------------------------------------------------------------------------- def Validate(iter, sample=0): validate_path = glob.glob(validate_images_path + '*') validate_path.sort() if sample == 1: r, c = 3, 3 mses, nrmses, psnrs, ssims = [], [], [], [] img_show, gt_show, output_show = [], [], [] validate_path = np.random.choice(validate_path, size=r) for path in validate_path: [img, gt] = cur_data_loader([path], validate_images_path, validate_gt_path, patch_height, patch_width, 1, norm_flag=norm_flag, scale=scale_factor) output = np.squeeze(g.predict(img)) mses, nrmses, psnrs, ssims = img_comp(gt, output, mses, nrmses, psnrs, ssims) img_show.append(np.squeeze(np.mean(img, 3))) gt_show.append(np.squeeze(gt)) output_show.append(output) # show some examples fig, axs = plt.subplots(r, c) cnt = 0 for row in range(r): axs[row, 1].set_title('MSE=%.4f, SSIM=%.4f, PSNR=%.4f' % (mses[row], ssims[row], psnrs[row])) for col, image in enumerate([img_show, output_show, gt_show]): axs[row, col].imshow(np.squeeze(image[row])) axs[row, col].axis('off') cnt += 1 fig.savefig(sample_path + '%d.png' % iter) plt.close() else: if validate_num < validate_path.__len__(): validate_path = validate_path[0:validate_num] mses, nrmses, psnrs, ssims = [], [], [], [] for path in validate_path: [img, gt] = cur_data_loader([path], validate_images_path, validate_gt_path, patch_height, patch_width, 1, norm_flag=norm_flag, scale=scale_factor) output = np.squeeze(g.predict(img)) mses, nrmses, psnrs, ssims = img_comp(gt, output, mses, nrmses, psnrs, ssims) # if best, save weights.best g.save_weights(save_weights_path + 'weights.latest') if min(validate_nrmse) > np.mean(nrmses): g.save_weights(save_weights_path + 'weights.best') validate_nrmse.append(np.mean(nrmses)) curlr = lr_controller.on_epoch_end(iter, np.mean(nrmses)) write_log(callback, val_names[0], np.mean(mses), iter) write_log(callback, val_names[1], np.mean(ssims), iter) write_log(callback, val_names[2], np.mean(psnrs), iter) write_log(callback, val_names[3], np.mean(nrmses), iter) write_log(callback, 'lr', curlr, iter) # -------------------------------------------------------------------------------- # if exist, load weights # -------------------------------------------------------------------------------- if load_weights: if os.path.exists(save_weights_path + 'weights.best'): g.save_weights(save_weights_path + 'weights.best') print('Loading weights successfully: ' + save_weights_path + 'weights.best') elif os.path.exists(save_weights_path + 'weights.latest'): g.save_weights(save_weights_path + 'weights.latest') print('Loading weights successfully: ' + save_weights_path + 'weights.latest') # -------------------------------------------------------------------------------- # training # -------------------------------------------------------------------------------- start_time = datetime.datetime.now() loss_record = [] validate_nrmse = [np.Inf] lr_controller.on_train_begin() images_path = glob.glob(train_images_path + '/*') for it in range(iterations): # ------------------------------------ # train generator # ------------------------------------ input_g, gt_g = cur_data_loader(images_path, train_images_path, train_gt_path, patch_height, patch_width, batch_size, norm_flag=norm_flag, scale=scale_factor) loss_generator = g.train_on_batch(input_g, gt_g) loss_record.append(loss_generator) elapsed_time = datetime.datetime.now() - start_time print("%d epoch: time: %s, g_loss = %s" % (it + 1, elapsed_time, loss_generator)) if (it + 1) % sample_interval == 0: images_path = glob.glob(train_images_path + '/*') Validate(it + 1, sample=1) if (it + 1) % validate_interval == 0: Validate(it + 1, sample=0) write_log(callback, train_names, np.mean(loss_record), it + 1) loss_record = []
45.125
109
0.612804
0
0
0
0
0
0
0
0
2,356
0.241715
8a04bef0858eef7458b1e38ddd409346a98cb2cc
2,635
py
Python
catalyst/exchange/live_graph_clock.py
erlendve/catalyst
463575bc23c0abd1287f8ec81c4377baabf2b8b8
[ "Apache-2.0" ]
null
null
null
catalyst/exchange/live_graph_clock.py
erlendve/catalyst
463575bc23c0abd1287f8ec81c4377baabf2b8b8
[ "Apache-2.0" ]
null
null
null
catalyst/exchange/live_graph_clock.py
erlendve/catalyst
463575bc23c0abd1287f8ec81c4377baabf2b8b8
[ "Apache-2.0" ]
null
null
null
import pandas as pd from catalyst.constants import LOG_LEVEL from catalyst.exchange.utils.stats_utils import prepare_stats from catalyst.gens.sim_engine import ( BAR, SESSION_START ) from logbook import Logger log = Logger('LiveGraphClock', level=LOG_LEVEL) class LiveGraphClock(object): """Realtime clock for live trading. This class is a drop-in replacement for :class:`zipline.gens.sim_engine.MinuteSimulationClock`. This mixes the clock with a live graph. Notes ----- This seemingly awkward approach allows us to run the program using a single thread. This is important because Matplotlib does not play nice with multi-threaded environments. Zipline probably does not either. Matplotlib has a pause() method which is a wrapper around time.sleep() used in the SimpleClock. The key difference is that users can still interact with the chart during the pause cycles. This is what enables us to keep a single thread. This is also why we are not using the 'animate' callback of Matplotlib. We need to direct access to the __iter__ method in order to yield events to Zipline. The :param:`time_skew` parameter represents the time difference between the exchange and the live trading machine's clock. It's not used currently. """ def __init__(self, sessions, context, callback=None, time_skew=pd.Timedelta('0s')): self.sessions = sessions self.time_skew = time_skew self._last_emit = None self._before_trading_start_bar_yielded = True self.context = context self.callback = callback def __iter__(self): from matplotlib import pyplot as plt yield pd.Timestamp.utcnow(), SESSION_START while True: current_time = pd.Timestamp.utcnow() current_minute = current_time.floor('1T') if self._last_emit is None or current_minute > self._last_emit: log.debug('emitting minutely bar: {}'.format(current_minute)) self._last_emit = current_minute yield current_minute, BAR recorded_cols = list(self.context.recorded_vars.keys()) df, _ = prepare_stats( self.context.frame_stats, recorded_cols=recorded_cols ) self.callback(self.context, df) else: # I can't use the "animate" reactive approach here because # I need to yield from the main loop. # Workaround: https://stackoverflow.com/a/33050617/814633 plt.pause(1)
35.133333
79
0.666414
2,365
0.897533
990
0.375712
0
0
0
0
1,214
0.460721
8a04ff873e3cd041bc9cad7f7fc7707f7c185cce
6,652
py
Python
invera/api/tests.py
LeoLeiva/todo-challenge
f6f24f53758eb4e425c91516bcab7af8cad66814
[ "MIT" ]
null
null
null
invera/api/tests.py
LeoLeiva/todo-challenge
f6f24f53758eb4e425c91516bcab7af8cad66814
[ "MIT" ]
null
null
null
invera/api/tests.py
LeoLeiva/todo-challenge
f6f24f53758eb4e425c91516bcab7af8cad66814
[ "MIT" ]
1
2021-01-10T20:19:42.000Z
2021-01-10T20:19:42.000Z
# -*- coding: utf-8 -*- from __future__ import unicode_literals import inspect from task.models import InveraTask from api.utils import send_test_csv_report from django.contrib.auth.models import User from rest_framework.test import APIClient, APITestCase from rest_framework.reverse import reverse from rest_framework import status TEST_RESULTS = [] RECIPIENTS = ['[email protected]'] class TaskListTestCase(APITestCase): def setUp(self) -> None: self.user = User.objects.create_user( username='test_user', password='adminpass') self.other_user = User.objects.create_user( username='other_user', password='adminpass') self.task = InveraTask.objects.create( userTask=self.user, title='My Initial Task') self.client = APIClient() @classmethod def tearDownClass(cls): User.objects.filter(username__in=['test_user', 'other_user']).delete() def test_create_task_with_un_authenticate_user(self): """ En este caso de prueba, estamos probando la API Task Create utilizando un usuario no autenticado. """ response = self.client.post( reverse('api-task'), {'title': 'My Task 1'}, format='json') is_passed = response.status_code == status.HTTP_403_FORBIDDEN TEST_RESULTS.append({ "result": "Passed" if is_passed else "Failed", "test_name": inspect.currentframe().f_code.co_name, "test_description": "El usuario no autenticado no puede agregar una tarea a la lista" }) if is_passed: print("Resultado: Aprobado") else: print("Resultado: Fallido") print("Nombre del test: " + inspect.currentframe().f_code.co_name) print("Descripcion: El usuario no autenticado no puede agregar una tarea a la lista") print("-----------") def test_put_task_with_un_authenticate_user(self): """ En este caso de prueba, estamos probando la API Task PUT utilizando un usuario no autenticado. """ response = self.client.put( reverse('api-task'), {'title': 'My Task'}, format='json') is_passed = response.status_code == status.HTTP_403_FORBIDDEN TEST_RESULTS.append({ "result": "Passed" if is_passed else "Failed", "test_name": inspect.currentframe().f_code.co_name, "test_description": "El usuario no autenticado no puede modificar una tarea" }) if is_passed: print("Resultado: Aprobado") else: print("Resultado: Fallido") print("Nombre del test: " + inspect.currentframe().f_code.co_name) print("Descripcion: El usuario no autenticado no puede modificar una tarea") print("-----------") def test_put_task_with_authenticated_user(self): self.client.login(username='test_user', password='adminpass') response = self.client.put(reverse('api-task-detail', args=[str(self.task.idTask)]), {'title': 'My Task 2'}, format='json') is_passed = response.status_code == status.HTTP_200_OK TEST_RESULTS.append({ "result": "Passed" if is_passed else "Failed", "test_name": inspect.currentframe().f_code.co_name, "test_description": "Usuario autenticado puede modificar una tarea suya" }) if is_passed: print("Resultado: Aprobado") else: print("Resultado: Fallido") print("Nombre del test: " + inspect.currentframe().f_code.co_name) print("Descripcion: Usuario autenticado puede modificar una tarea suya") print("-----------") def test_get_other_user_task_detail(self): """ En este caso de prueba, estamos probando la API Task GET y tratando de obtener detalles de la tarea de un usuario que usa credenciales de usuario diferentes. """ self.client.login(username='other_user', password='adminpass') response = self.client.get(reverse('api-task-detail', args=[str(self.task.idTask)])) is_passed = response.status_code == status.HTTP_404_NOT_FOUND # is_passed = response.status_code == status.HTTP_403_FORBIDDEN TEST_RESULTS.append({ "result": "Passed" if is_passed else "Failed", "test_name": inspect.currentframe().f_code.co_name, "test_description": "Solo el propietario puede ver el detalle de la tarea" }) if is_passed: print("Resultado: Aprobado") else: print("Resultado: Fallido") print("Nombre del test: " + inspect.currentframe().f_code.co_name) print("Descripcion: Solo el propietario puede ver el detalle de la tarea") print("-----------") def test_create_task_with_authenticated_user(self): self.client.login(username='test_user', password='adminpass') response = self.client.post(reverse('api-task'), {'title': 'My Task'}, format='json') is_passed = response.status_code == status.HTTP_201_CREATED TEST_RESULTS.append({ "result": "Passed" if is_passed else "Failed", "test_name": inspect.currentframe().f_code.co_name, "test_description": "Usuario autenticado agrega tarea a la lista" }) if is_passed: print("Resultado: Aprobado") else: print("Resultado: Fallido") print("Nombre del test: " + inspect.currentframe().f_code.co_name) print("Descripcion: Usuario autenticado agrega tarea a la lista") print("-----------") def test_get_task_detail(self): self.client.login(username='test_user', password='adminpass') response = self.client.get(reverse('api-task-detail', args=[str(self.task.idTask)])) is_passed = response.status_code == status.HTTP_200_OK TEST_RESULTS.append({ "result": "Passed" if is_passed else "Failed", "test_name": inspect.currentframe().f_code.co_name, "test_description": "Usuario autenticado puede ver detalles de la tarea correctamente" }) if is_passed: print("Resultado: Aprobado") else: print("Resultado: Fallido") print("Nombre del test: " + inspect.currentframe().f_code.co_name) print("Descripcion: Usuario autenticado puede ver detalles de la tarea correctamente") print("-----------") class CSVReportTest(APITestCase): def test_send_csv(self): send_test_csv_report( test_results=TEST_RESULTS, recipients=RECIPIENTS )
37.370787
165
0.634245
6,253
0.940018
0
0
119
0.017889
0
0
2,382
0.358088
8a06be2dde291c66efbc5f80746f557a0f2cecaa
336
py
Python
experiments/seidel-2d/tmp_files/6745.py
LoopTilingBenchmark/benchmark
52a3d2e70216552a498fd91de02a2fa9cb62122c
[ "BSD-2-Clause" ]
null
null
null
experiments/seidel-2d/tmp_files/6745.py
LoopTilingBenchmark/benchmark
52a3d2e70216552a498fd91de02a2fa9cb62122c
[ "BSD-2-Clause" ]
null
null
null
experiments/seidel-2d/tmp_files/6745.py
LoopTilingBenchmark/benchmark
52a3d2e70216552a498fd91de02a2fa9cb62122c
[ "BSD-2-Clause" ]
null
null
null
from chill import * source('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/polybench/polybench-code/stencils/seidel-2d/kernel.c') destination('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/experiments/seidel-2d/tmp_files/6745.c') procedure('kernel_seidel_2d') loop(0) known(' n > 2 ') tile(0,2,16,2) tile(0,4,16,4)
30.545455
118
0.764881
0
0
0
0
0
0
0
0
233
0.693452
8a06d974512def3c400fb25769c0185d59195602
1,405
py
Python
baymax/api.py
dmrz/baymax
60cca5ae2e7cb42e093747f91b809e34e6782fcd
[ "MIT" ]
34
2018-02-14T09:37:26.000Z
2021-02-13T10:06:54.000Z
baymax/api.py
Avishekbhattacharjee/baymax
487930c4f3021ff50504d371de09ff31e458c09f
[ "MIT" ]
1
2018-03-03T02:55:38.000Z
2018-03-17T21:57:15.000Z
baymax/api.py
Avishekbhattacharjee/baymax
487930c4f3021ff50504d371de09ff31e458c09f
[ "MIT" ]
7
2018-02-28T07:35:35.000Z
2022-01-26T11:54:40.000Z
import json import aiohttp async def request(url, payload=None, params=None, headers=None): headers = {'content-type': 'application/json', **(headers or {})} data = payload and json.dumps(payload) async with aiohttp.ClientSession() as client: async with client.post( url, data=data, params=params, headers=headers) as resp: # TODO: Check response status json_response = await resp.json() return json_response async def get_updates(base_url, timeout, offset): params = { 'timeout': timeout, 'offset': offset } return await request(f'{base_url}/getUpdates', params=params) async def send_message(base_url, chat_id, text, reply_markup=None): payload = { 'chat_id': chat_id, 'text': text } if reply_markup is not None: payload['reply_markup'] = reply_markup return await request(f'{base_url}/sendMessage', payload) async def answer_callback_query( base_url, callback_query_id, text, show_alert, url=None, cache_time=None): payload = { 'callback_query_id': callback_query_id, 'text': text, 'show_alert': show_alert } if url is not None: payload['url'] = url if cache_time is not None: payload['cache_time'] = cache_time return await request(f'{base_url}/answerCallbackQuery', payload)
29.270833
72
0.641993
0
0
0
0
0
0
1,365
0.97153
243
0.172954
8a072b60d911bf4164d6e02341054f5f6f3f27f0
3,479
py
Python
nautobot_device_onboarding/tests/test_netdev_keeper.py
pszulczewski/nautobot-plugin-device-onboarding
9ddec52d7bcc751c4616bd7c1180ed2a1d31ff2c
[ "Apache-2.0" ]
13
2021-03-05T10:47:50.000Z
2022-03-18T19:07:09.000Z
nautobot_device_onboarding/tests/test_netdev_keeper.py
pszulczewski/nautobot-plugin-device-onboarding
9ddec52d7bcc751c4616bd7c1180ed2a1d31ff2c
[ "Apache-2.0" ]
18
2021-03-05T10:29:13.000Z
2022-03-08T13:10:38.000Z
nautobot_device_onboarding/tests/test_netdev_keeper.py
pszulczewski/nautobot-plugin-device-onboarding
9ddec52d7bcc751c4616bd7c1180ed2a1d31ff2c
[ "Apache-2.0" ]
14
2021-03-06T19:33:46.000Z
2022-03-28T16:31:38.000Z
"""Unit tests for nautobot_device_onboarding.netdev_keeper module and its classes. (c) 2020-2021 Network To Code Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from socket import gaierror from unittest import mock from django.test import TestCase from nautobot.dcim.models import Site, DeviceRole, Platform from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip from nautobot_device_onboarding.models import OnboardingTask class NetdevKeeperTestCase(TestCase): """Test the NetdevKeeper Class.""" def setUp(self): """Create a superuser and token for API calls.""" self.site1 = Site.objects.create(name="USWEST", slug="uswest") self.device_role1 = DeviceRole.objects.create(name="Firewall", slug="firewall") self.platform1 = Platform.objects.create(name="JunOS", slug="junos", napalm_driver="junos") # self.platform2 = Platform.objects.create(name="Cisco NX-OS", slug="cisco-nx-os") self.onboarding_task4 = OnboardingTask.objects.create( ip_address="ntc123.local", site=self.site1, role=self.device_role1, platform=self.platform1 ) self.onboarding_task5 = OnboardingTask.objects.create( ip_address="bad.local", site=self.site1, role=self.device_role1, platform=self.platform1 ) self.onboarding_task7 = OnboardingTask.objects.create( ip_address="192.0.2.1/32", site=self.site1, role=self.device_role1, platform=self.platform1 ) @mock.patch("nautobot_device_onboarding.helpers.socket.gethostbyname") def test_check_ip(self, mock_get_hostbyname): """Check DNS to IP address.""" # Look up response value mock_get_hostbyname.return_value = "192.0.2.1" # FQDN -> IP onboarding_task_fqdn_to_ip(ot=self.onboarding_task4) # Run the check to change the IP address self.assertEqual(self.onboarding_task4.ip_address, "192.0.2.1") @mock.patch("nautobot_device_onboarding.helpers.socket.gethostbyname") def test_failed_check_ip(self, mock_get_hostbyname): """Check DNS to IP address failing.""" # Look up a failed response mock_get_hostbyname.side_effect = gaierror(8) # Check for bad.local raising an exception with self.assertRaises(OnboardException) as exc_info: onboarding_task_fqdn_to_ip(ot=self.onboarding_task5) self.assertEqual(exc_info.exception.message, "ERROR failed to complete DNS lookup: bad.local") self.assertEqual(exc_info.exception.reason, "fail-dns") # Check for exception with prefix address entered with self.assertRaises(OnboardException) as exc_info: onboarding_task_fqdn_to_ip(ot=self.onboarding_task7) self.assertEqual(exc_info.exception.reason, "fail-prefix") self.assertEqual(exc_info.exception.message, "ERROR appears a prefix was entered: 192.0.2.1/32")
44.602564
108
0.728658
2,486
0.714573
0
0
1,436
0.412762
0
0
1,417
0.407301
8a076cdd50a9d69b52cffcb8dbe3df578f17d801
2,577
py
Python
superneurons/tools/img_val/main.py
Phaeton-lang/baselines
472c248047fbb55b5fa0e620758047b7f0a1d041
[ "MIT" ]
null
null
null
superneurons/tools/img_val/main.py
Phaeton-lang/baselines
472c248047fbb55b5fa0e620758047b7f0a1d041
[ "MIT" ]
null
null
null
superneurons/tools/img_val/main.py
Phaeton-lang/baselines
472c248047fbb55b5fa0e620758047b7f0a1d041
[ "MIT" ]
null
null
null
# Created by ay27 at 17/4/9 import os import matplotlib.pyplot as plt import struct import numpy as np def trans(row): return list(map(lambda x: np.uint8(x), row)) def read_image(filename): with open(filename, mode='rb') as file: n = file.read(8) n = struct.unpack("<Q", n)[0] c = file.read(8) c = struct.unpack("<Q", c)[0] h = file.read(8) h = struct.unpack("<Q", h)[0] w = file.read(8) w = struct.unpack("<Q", w)[0] print(n, c, h, w) for ii in range(n): r = trans(file.read(h*w)) g = trans(file.read(h*w)) b = trans(file.read(h*w)) if ii == 100: break print(file.tell() == os.fstat(file.fileno()).st_size) img = np.array([r,g,b]).transpose(1,0).reshape(h,w,c) print(img.shape) plt.imshow(img) plt.show() def read_label(path, ground_truth=None): with open(path, 'rb') as file: n = file.read(8) n = struct.unpack("<Q", n)[0] c = file.read(8) c = struct.unpack("<Q", c)[0] h = file.read(8) h = struct.unpack("<Q", h)[0] w = file.read(8) w = struct.unpack("<Q", w)[0] print(n, c, h, w) label = [] sets = set() while not (file.tell() == os.fstat(file.fileno()).st_size): ch = file.read(4) num = struct.unpack("<l", ch)[0] label.append(num) sets.add(num) # print(file.tell() == os.fstat(file.fileno()).st_size) print(label) print(len(label)) # print(label[900],label[901], label[902], label[903], label[904]) return label # if ground_truth: # g = [] # with open(ground_truth) as file: # for line in file: # g.append(int(line.split(' ')[1])) # np.testing.assert_array_equal(g, label) if __name__ == '__main__': # read_image('../../data/ilsvrc2012/img.bin') # read_label('../../data/ilsvrc2012/label.bin', '../../data/ilsvrc2012/val.txt') # read_image('../../build/cifar100_train_image.bin') # read_label('../../build/cifar100_train_label.bin') read_image('../../build/val_data_8.bin') for i in range(10): read_label('../../build/val_label_%d.bin' % i) # labels = [] # for i in range(10): # labels.append(read_label('../../build/val_label_%d.bin' % i)) # # ground = [] # with open('../../build/shuffled_list') as file: # ground.append()
28.01087
84
0.509895
0
0
0
0
0
0
0
0
861
0.334109
8a0988ba1c9ee5db70eabfa7b9b35ad041f9c1f7
2,238
py
Python
pymatgen/analysis/tests/test_piezo.py
exenGT/pymatgen
a8ffb820ab8fc3f60251099e38c8888f45eae618
[ "MIT" ]
1
2021-11-02T21:10:11.000Z
2021-11-02T21:10:11.000Z
pymatgen/analysis/tests/test_piezo.py
exenGT/pymatgen
a8ffb820ab8fc3f60251099e38c8888f45eae618
[ "MIT" ]
5
2018-08-07T23:00:23.000Z
2021-01-05T22:46:23.000Z
pymatgen/analysis/tests/test_piezo.py
exenGT/pymatgen
a8ffb820ab8fc3f60251099e38c8888f45eae618
[ "MIT" ]
6
2019-04-26T18:50:41.000Z
2020-03-29T17:58:34.000Z
# Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. """ Test for the piezo tensor class """ __author__ = "Shyam Dwaraknath" __version__ = "0.1" __maintainer__ = "Shyam Dwaraknath" __email__ = "[email protected]" __status__ = "Development" __date__ = "4/1/16" import os import unittest import numpy as np from pymatgen.analysis.piezo import PiezoTensor from pymatgen.util.testing import PymatgenTest class PiezoTest(PymatgenTest): def setUp(self): self.piezo_struc = self.get_structure("BaNiO3") self.voigt_matrix = np.array( [ [0.0, 0.0, 0.0, 0.0, 0.03839, 0.0], [0.0, 0.0, 0.0, 0.03839, 0.0, 0.0], [6.89822, 6.89822, 27.46280, 0.0, 0.0, 0.0], ] ) self.vasp_matrix = np.array( [ [0.0, 0.0, 0.0, 0.0, 0.0, 0.03839], [0.0, 0.0, 0.0, 0.0, 0.03839, 0.0, 0.0], [6.89822, 6.89822, 27.46280, 0.0, 0.0, 0.0], ] ) self.full_tensor_array = [ [[0.0, 0.0, 0.03839], [0.0, 0.0, 0.0], [0.03839, 0.0, 0.0]], [[0.0, 0.0, 0.0], [0.0, 0.0, 0.03839], [0.0, 0.03839, 0.0]], [[6.89822, 0.0, 0.0], [0.0, 6.89822, 0.0], [0.0, 0.0, 27.4628]], ] def test_new(self): pt = PiezoTensor(self.full_tensor_array) self.assertArrayAlmostEqual(pt, self.full_tensor_array) bad_dim_array = np.zeros((3, 3)) self.assertRaises(ValueError, PiezoTensor, bad_dim_array) def test_from_voigt(self): bad_voigt = np.zeros((3, 7)) pt = PiezoTensor.from_voigt(self.voigt_matrix) self.assertArrayEqual(pt, self.full_tensor_array) self.assertRaises(ValueError, PiezoTensor.from_voigt, bad_voigt) self.assertArrayEqual(self.voigt_matrix, pt.voigt) def test_from_vasp_voigt(self): bad_voigt = np.zeros((3, 7)) pt = PiezoTensor.from_vasp_voigt(self.vasp_matrix) self.assertArrayEqual(pt, self.full_tensor_array) self.assertRaises(ValueError, PiezoTensor.from_voigt, bad_voigt) self.assertArrayEqual(self.voigt_matrix, pt.voigt) if __name__ == "__main__": unittest.main()
31.521127
76
0.594281
1,743
0.77882
0
0
0
0
0
0
226
0.100983
8a0ab3edf5559c9bdaa844115d82ed95f2b065a1
63,787
py
Python
nova/virt/driver.py
larsbutler/nova
fb190f30a911658d8b0c4deaf43cbb8c9e35b672
[ "Apache-2.0" ]
null
null
null
nova/virt/driver.py
larsbutler/nova
fb190f30a911658d8b0c4deaf43cbb8c9e35b672
[ "Apache-2.0" ]
null
null
null
nova/virt/driver.py
larsbutler/nova
fb190f30a911658d8b0c4deaf43cbb8c9e35b672
[ "Apache-2.0" ]
null
null
null
# Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Driver base-classes: (Beginning of) the contract that compute drivers must follow, and shared types that support that contract """ import sys from oslo_log import log as logging from oslo_utils import importutils import nova.conf from nova.i18n import _, _LE, _LI from nova import utils from nova.virt import event as virtevent CONF = nova.conf.CONF LOG = logging.getLogger(__name__) def driver_dict_from_config(named_driver_config, *args, **kwargs): driver_registry = dict() for driver_str in named_driver_config: driver_type, _sep, driver = driver_str.partition('=') driver_class = importutils.import_class(driver) driver_registry[driver_type] = driver_class(*args, **kwargs) return driver_registry def get_block_device_info(instance, block_device_mapping): """Converts block device mappings for an instance to driver format. Virt drivers expect block device mapping to be presented in the format of a dict containing the following keys: - root_device_name: device name of the root disk - ephemerals: a (potentially empty) list of DriverEphemeralBlockDevice instances - swap: An instance of DriverSwapBlockDevice or None - block_device_mapping: a (potentially empty) list of DriverVolumeBlockDevice or any of it's more specialized subclasses. """ from nova.virt import block_device as virt_block_device block_device_info = { 'root_device_name': instance.root_device_name, 'ephemerals': virt_block_device.convert_ephemerals( block_device_mapping), 'block_device_mapping': virt_block_device.convert_all_volumes(*block_device_mapping) } swap_list = virt_block_device.convert_swap(block_device_mapping) block_device_info['swap'] = virt_block_device.get_swap(swap_list) return block_device_info def block_device_info_get_root(block_device_info): block_device_info = block_device_info or {} return block_device_info.get('root_device_name') def block_device_info_get_swap(block_device_info): block_device_info = block_device_info or {} return block_device_info.get('swap') or {'device_name': None, 'swap_size': 0} def swap_is_usable(swap): return swap and swap['device_name'] and swap['swap_size'] > 0 def block_device_info_get_ephemerals(block_device_info): block_device_info = block_device_info or {} ephemerals = block_device_info.get('ephemerals') or [] return ephemerals def block_device_info_get_mapping(block_device_info): block_device_info = block_device_info or {} block_device_mapping = block_device_info.get('block_device_mapping') or [] return block_device_mapping class ComputeDriver(object): """Base class for compute drivers. The interface to this class talks in terms of 'instances' (Amazon EC2 and internal Nova terminology), by which we mean 'running virtual machine' (XenAPI terminology) or domain (Xen or libvirt terminology). An instance has an ID, which is the identifier chosen by Nova to represent the instance further up the stack. This is unfortunately also called a 'name' elsewhere. As far as this layer is concerned, 'instance ID' and 'instance name' are synonyms. Note that the instance ID or name is not human-readable or customer-controlled -- it's an internal ID chosen by Nova. At the nova.virt layer, instances do not have human-readable names at all -- such things are only known higher up the stack. Most virtualization platforms will also have their own identity schemes, to uniquely identify a VM or domain. These IDs must stay internal to the platform-specific layer, and never escape the connection interface. The platform-specific layer is responsible for keeping track of which instance ID maps to which platform-specific ID, and vice versa. Some methods here take an instance of nova.compute.service.Instance. This is the data structure used by nova.compute to store details regarding an instance, and pass them into this layer. This layer is responsible for translating that generic data structure into terms that are specific to the virtualization platform. """ capabilities = { "has_imagecache": False, "supports_recreate": False, "supports_migrate_to_same_host": False, "supports_attach_interface": False, "supports_device_tagging": False, } def __init__(self, virtapi): self.virtapi = virtapi self._compute_event_callback = None def init_host(self, host): """Initialize anything that is necessary for the driver to function, including catching up with currently running VM's on the given host. """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def cleanup_host(self, host): """Clean up anything that is necessary for the driver gracefully stop, including ending remote sessions. This is optional. """ pass def get_info(self, instance): """Get the current status of an instance, by name (not ID!) :param instance: nova.objects.instance.Instance object Returns a InstanceInfo object """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def get_num_instances(self): """Return the total number of virtual machines. Return the number of virtual machines that the hypervisor knows about. .. note:: This implementation works for all drivers, but it is not particularly efficient. Maintainers of the virt drivers are encouraged to override this method with something more efficient. """ return len(self.list_instances()) def instance_exists(self, instance): """Checks existence of an instance on the host. :param instance: The instance to lookup Returns True if an instance with the supplied ID exists on the host, False otherwise. .. note:: This implementation works for all drivers, but it is not particularly efficient. Maintainers of the virt drivers are encouraged to override this method with something more efficient. """ try: return instance.uuid in self.list_instance_uuids() except NotImplementedError: return instance.name in self.list_instances() def estimate_instance_overhead(self, instance_info): """Estimate the virtualization overhead required to build an instance of the given flavor. Defaults to zero, drivers should override if per-instance overhead calculations are desired. :param instance_info: Instance/flavor to calculate overhead for. :returns: Dict of estimated overhead values. """ return {'memory_mb': 0, 'disk_gb': 0} def list_instances(self): """Return the names of all the instances known to the virtualization layer, as a list. """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def list_instance_uuids(self): """Return the UUIDS of all the instances known to the virtualization layer, as a list. """ raise NotImplementedError() def rebuild(self, context, instance, image_meta, injected_files, admin_password, bdms, detach_block_devices, attach_block_devices, network_info=None, recreate=False, block_device_info=None, preserve_ephemeral=False): """Destroy and re-make this instance. A 'rebuild' effectively purges all existing data from the system and remakes the VM with given 'metadata' and 'personalities'. This base class method shuts down the VM, detaches all block devices, then spins up the new VM afterwards. It may be overridden by hypervisors that need to - e.g. for optimisations, or when the 'VM' is actually proxied and needs to be held across the shutdown + spin up steps. :param context: security context :param instance: nova.objects.instance.Instance This function should use the data there to guide the creation of the new instance. :param nova.objects.ImageMeta image_meta: The metadata of the image of the instance. :param injected_files: User files to inject into instance. :param admin_password: Administrator password to set in instance. :param bdms: block-device-mappings to use for rebuild :param detach_block_devices: function to detach block devices. See nova.compute.manager.ComputeManager:_rebuild_default_impl for usage. :param attach_block_devices: function to attach block devices. See nova.compute.manager.ComputeManager:_rebuild_default_impl for usage. :param network_info: instance network information :param recreate: True if the instance is being recreated on a new hypervisor - all the cleanup of old state is skipped. :param block_device_info: Information about block devices to be attached to the instance. :param preserve_ephemeral: True if the default ephemeral storage partition must be preserved on rebuild """ raise NotImplementedError() def spawn(self, context, instance, image_meta, injected_files, admin_password, network_info=None, block_device_info=None): """Create a new instance/VM/domain on the virtualization platform. Once this successfully completes, the instance should be running (power_state.RUNNING). If this fails, any partial instance should be completely cleaned up, and the virtualization platform should be in the state that it was before this call began. :param context: security context :param instance: nova.objects.instance.Instance This function should use the data there to guide the creation of the new instance. :param nova.objects.ImageMeta image_meta: The metadata of the image of the instance. :param injected_files: User files to inject into instance. :param admin_password: Administrator password to set in instance. :param network_info: instance network information :param block_device_info: Information about block devices to be attached to the instance. """ raise NotImplementedError() def destroy(self, context, instance, network_info, block_device_info=None, destroy_disks=True, migrate_data=None): """Destroy the specified instance from the Hypervisor. If the instance is not found (for example if networking failed), this function should still succeed. It's probably a good idea to log a warning in that case. :param context: security context :param instance: Instance object as returned by DB layer. :param network_info: instance network information :param block_device_info: Information about block devices that should be detached from the instance. :param destroy_disks: Indicates if disks should be destroyed :param migrate_data: implementation specific params """ raise NotImplementedError() def cleanup(self, context, instance, network_info, block_device_info=None, destroy_disks=True, migrate_data=None, destroy_vifs=True): """Cleanup the instance resources . Instance should have been destroyed from the Hypervisor before calling this method. :param context: security context :param instance: Instance object as returned by DB layer. :param network_info: instance network information :param block_device_info: Information about block devices that should be detached from the instance. :param destroy_disks: Indicates if disks should be destroyed :param migrate_data: implementation specific params """ raise NotImplementedError() def reboot(self, context, instance, network_info, reboot_type, block_device_info=None, bad_volumes_callback=None): """Reboot the specified instance. After this is called successfully, the instance's state goes back to power_state.RUNNING. The virtualization platform should ensure that the reboot action has completed successfully even in cases in which the underlying domain/vm is paused or halted/stopped. :param instance: nova.objects.instance.Instance :param network_info: instance network information :param reboot_type: Either a HARD or SOFT reboot :param block_device_info: Info pertaining to attached volumes :param bad_volumes_callback: Function to handle any bad volumes encountered """ raise NotImplementedError() def get_console_pool_info(self, console_type): # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def get_console_output(self, context, instance): """Get console output for an instance :param context: security context :param instance: nova.objects.instance.Instance """ raise NotImplementedError() def get_vnc_console(self, context, instance): """Get connection info for a vnc console. :param context: security context :param instance: nova.objects.instance.Instance :returns an instance of console.type.ConsoleVNC """ raise NotImplementedError() def get_spice_console(self, context, instance): """Get connection info for a spice console. :param context: security context :param instance: nova.objects.instance.Instance :returns an instance of console.type.ConsoleSpice """ raise NotImplementedError() def get_rdp_console(self, context, instance): """Get connection info for a rdp console. :param context: security context :param instance: nova.objects.instance.Instance :returns an instance of console.type.ConsoleRDP """ raise NotImplementedError() def get_serial_console(self, context, instance): """Get connection info for a serial console. :param context: security context :param instance: nova.objects.instance.Instance :returns an instance of console.type.ConsoleSerial """ raise NotImplementedError() def get_mks_console(self, context, instance): """Get connection info for a MKS console. :param context: security context :param instance: nova.objects.instance.Instance :returns an instance of console.type.ConsoleMKS """ raise NotImplementedError() def get_diagnostics(self, instance): """Return diagnostics data about the given instance. :param nova.objects.instance.Instance instance: The instance to which the diagnostic data should be returned. :return: Has a big overlap to the return value of the newer interface :func:`get_instance_diagnostics` :rtype: dict """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def get_instance_diagnostics(self, instance): """Return diagnostics data about the given instance. :param nova.objects.instance.Instance instance: The instance to which the diagnostic data should be returned. :return: Has a big overlap to the return value of the older interface :func:`get_diagnostics` :rtype: nova.virt.diagnostics.Diagnostics """ raise NotImplementedError() def get_all_bw_counters(self, instances): """Return bandwidth usage counters for each interface on each running VM. :param instances: nova.objects.instance.InstanceList """ raise NotImplementedError() def get_all_volume_usage(self, context, compute_host_bdms): """Return usage info for volumes attached to vms on a given host.- """ raise NotImplementedError() def get_host_ip_addr(self): """Retrieves the IP address of the dom0 """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def attach_volume(self, context, connection_info, instance, mountpoint, disk_bus=None, device_type=None, encryption=None): """Attach the disk to the instance at mountpoint using info.""" raise NotImplementedError() def detach_volume(self, connection_info, instance, mountpoint, encryption=None): """Detach the disk attached to the instance.""" raise NotImplementedError() def swap_volume(self, old_connection_info, new_connection_info, instance, mountpoint, resize_to): """Replace the volume attached to the given `instance`. :param dict old_connection_info: The volume for this connection gets detached from the given `instance`. :param dict new_connection_info: The volume for this connection gets attached to the given 'instance'. :param nova.objects.instance.Instance instance: The instance whose volume gets replaced by another one. :param str mountpoint: The mountpoint in the instance where the volume for `old_connection_info` is attached to. :param int resize_to: If the new volume is larger than the old volume, it gets resized to the given size (in Gigabyte) of `resize_to`. :return: None """ raise NotImplementedError() def attach_interface(self, instance, image_meta, vif): """Use hotplug to add a network interface to a running instance. The counter action to this is :func:`detach_interface`. :param nova.objects.instance.Instance instance: The instance which will get an additional network interface. :param nova.objects.ImageMeta image_meta: The metadata of the image of the instance. :param nova.network.model.NetworkInfo vif: The object which has the information about the interface to attach. :raise nova.exception.NovaException: If the attach fails. :return: None """ raise NotImplementedError() def detach_interface(self, instance, vif): """Use hotunplug to remove a network interface from a running instance. The counter action to this is :func:`attach_interface`. :param nova.objects.instance.Instance instance: The instance which gets a network interface removed. :param nova.network.model.NetworkInfo vif: The object which has the information about the interface to detach. :raise nova.exception.NovaException: If the detach fails. :return: None """ raise NotImplementedError() def migrate_disk_and_power_off(self, context, instance, dest, flavor, network_info, block_device_info=None, timeout=0, retry_interval=0): """Transfers the disk of a running instance in multiple phases, turning off the instance before the end. :param nova.objects.instance.Instance instance: The instance whose disk should be migrated. :param str dest: The IP address of the destination host. :param nova.objects.flavor.Flavor flavor: The flavor of the instance whose disk get migrated. :param nova.network.model.NetworkInfo network_info: The network information of the given `instance`. :param dict block_device_info: Information about the block devices. :param int timeout: The time in seconds to wait for the guest OS to shutdown. :param int retry_interval: How often to signal guest while waiting for it to shutdown. :return: A list of disk information dicts in JSON format. :rtype: str """ raise NotImplementedError() def snapshot(self, context, instance, image_id, update_task_state): """Snapshots the specified instance. :param context: security context :param instance: nova.objects.instance.Instance :param image_id: Reference to a pre-created image that will hold the snapshot. """ raise NotImplementedError() def post_interrupted_snapshot_cleanup(self, context, instance): """Cleans up any resources left after an interrupted snapshot. :param context: security context :param instance: nova.objects.instance.Instance """ pass def finish_migration(self, context, migration, instance, disk_info, network_info, image_meta, resize_instance, block_device_info=None, power_on=True): """Completes a resize/migration. :param context: the context for the migration/resize :param migration: the migrate/resize information :param instance: nova.objects.instance.Instance being migrated/resized :param disk_info: the newly transferred disk information :param network_info: instance network information :param nova.objects.ImageMeta image_meta: The metadata of the image of the instance. :param resize_instance: True if the instance is being resized, False otherwise :param block_device_info: instance volume block device info :param power_on: True if the instance should be powered on, False otherwise """ raise NotImplementedError() def confirm_migration(self, migration, instance, network_info): """Confirms a resize/migration, destroying the source VM. :param instance: nova.objects.instance.Instance """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def finish_revert_migration(self, context, instance, network_info, block_device_info=None, power_on=True): """Finish reverting a resize/migration. :param context: the context for the finish_revert_migration :param instance: nova.objects.instance.Instance being migrated/resized :param network_info: instance network information :param block_device_info: instance volume block device info :param power_on: True if the instance should be powered on, False otherwise """ raise NotImplementedError() def pause(self, instance): """Pause the given instance. A paused instance doesn't use CPU cycles of the host anymore. The state of the VM could be stored in the memory or storage space of the host, depending on the underlying hypervisor technology. A "stronger" version of `pause` is :func:'suspend'. The counter action for `pause` is :func:`unpause`. :param nova.objects.instance.Instance instance: The instance which should be paused. :return: None """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def unpause(self, instance): """Unpause the given paused instance. The paused instance gets unpaused and will use CPU cycles of the host again. The counter action for 'unpause' is :func:`pause`. Depending on the underlying hypervisor technology, the guest has the same state as before the 'pause'. :param nova.objects.instance.Instance instance: The instance which should be unpaused. :return: None """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def suspend(self, context, instance): """Suspend the specified instance. A suspended instance doesn't use CPU cycles or memory of the host anymore. The state of the instance could be persisted on the host and allocate storage space this way. A "softer" way of `suspend` is :func:`pause`. The counter action for `suspend` is :func:`resume`. :param nova.context.RequestContext context: The context for the suspend. :param nova.objects.instance.Instance instance: The instance to suspend. :return: None """ raise NotImplementedError() def resume(self, context, instance, network_info, block_device_info=None): """resume the specified suspended instance. The suspended instance gets resumed and will use CPU cycles and memory of the host again. The counter action for 'resume' is :func:`suspend`. Depending on the underlying hypervisor technology, the guest has the same state as before the 'suspend'. :param nova.context.RequestContext context: The context for the resume. :param nova.objects.instance.Instance instance: The suspended instance to resume. :param nova.network.model.NetworkInfo network_info: Necessary network information for the resume. :param dict block_device_info: Instance volume block device info. :return: None """ raise NotImplementedError() def resume_state_on_host_boot(self, context, instance, network_info, block_device_info=None): """resume guest state when a host is booted. :param instance: nova.objects.instance.Instance """ raise NotImplementedError() def rescue(self, context, instance, network_info, image_meta, rescue_password): """Rescue the specified instance. :param nova.context.RequestContext context: The context for the rescue. :param nova.objects.instance.Instance instance: The instance being rescued. :param nova.network.model.NetworkInfo network_info: Necessary network information for the resume. :param nova.objects.ImageMeta image_meta: The metadata of the image of the instance. :param rescue_password: new root password to set for rescue. """ raise NotImplementedError() def set_bootable(self, instance, is_bootable): """Set the ability to power on/off an instance. :param instance: nova.objects.instance.Instance """ raise NotImplementedError() def unrescue(self, instance, network_info): """Unrescue the specified instance. :param instance: nova.objects.instance.Instance """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def power_off(self, instance, timeout=0, retry_interval=0): """Power off the specified instance. :param instance: nova.objects.instance.Instance :param timeout: time to wait for GuestOS to shutdown :param retry_interval: How often to signal guest while waiting for it to shutdown """ raise NotImplementedError() def power_on(self, context, instance, network_info, block_device_info=None): """Power on the specified instance. :param instance: nova.objects.instance.Instance """ raise NotImplementedError() def trigger_crash_dump(self, instance): """Trigger crash dump mechanism on the given instance. Stalling instances can be triggered to dump the crash data. How the guest OS reacts in details, depends on the configuration of it. :param nova.objects.instance.Instance instance: The instance where the crash dump should be triggered. :return: None """ raise NotImplementedError() def soft_delete(self, instance): """Soft delete the specified instance. A soft-deleted instance doesn't allocate any resources anymore, but is still available as a database entry. The counter action :func:`restore` uses the database entry to create a new instance based on that. :param nova.objects.instance.Instance instance: The instance to soft-delete. :return: None """ raise NotImplementedError() def restore(self, instance): """Restore the specified soft-deleted instance. The restored instance will be automatically booted. The counter action for `restore` is :func:`soft_delete`. :param nova.objects.instance.Instance instance: The soft-deleted instance which should be restored from the soft-deleted data. :return: None """ raise NotImplementedError() def get_available_resource(self, nodename): """Retrieve resource information. This method is called when nova-compute launches, and as part of a periodic task that records the results in the DB. :param nodename: node which the caller want to get resources from a driver that manages only one node can safely ignore this :returns: Dictionary describing resources """ raise NotImplementedError() def pre_live_migration(self, context, instance, block_device_info, network_info, disk_info, migrate_data=None): """Prepare an instance for live migration :param context: security context :param instance: nova.objects.instance.Instance object :param block_device_info: instance block device information :param network_info: instance network information :param disk_info: instance disk information :param migrate_data: a LiveMigrateData object """ raise NotImplementedError() def live_migration(self, context, instance, dest, post_method, recover_method, block_migration=False, migrate_data=None): """Live migration of an instance to another host. :param context: security context :param instance: nova.db.sqlalchemy.models.Instance object instance object that is migrated. :param dest: destination host :param post_method: post operation method. expected nova.compute.manager._post_live_migration. :param recover_method: recovery method when any exception occurs. expected nova.compute.manager._rollback_live_migration. :param block_migration: if true, migrate VM disk. :param migrate_data: a LiveMigrateData object """ raise NotImplementedError() def live_migration_force_complete(self, instance): """Force live migration to complete :param instance: Instance being live migrated """ raise NotImplementedError() def live_migration_abort(self, instance): """Abort an in-progress live migration. :param instance: instance that is live migrating """ raise NotImplementedError() def rollback_live_migration_at_destination(self, context, instance, network_info, block_device_info, destroy_disks=True, migrate_data=None): """Clean up destination node after a failed live migration. :param context: security context :param instance: instance object that was being migrated :param network_info: instance network information :param block_device_info: instance block device information :param destroy_disks: if true, destroy disks at destination during cleanup :param migrate_data: a LiveMigrateData object """ raise NotImplementedError() def post_live_migration(self, context, instance, block_device_info, migrate_data=None): """Post operation of live migration at source host. :param context: security context :instance: instance object that was migrated :block_device_info: instance block device information :param migrate_data: a LiveMigrateData object """ pass def post_live_migration_at_source(self, context, instance, network_info): """Unplug VIFs from networks at source. :param context: security context :param instance: instance object reference :param network_info: instance network information """ raise NotImplementedError(_("Hypervisor driver does not support " "post_live_migration_at_source method")) def post_live_migration_at_destination(self, context, instance, network_info, block_migration=False, block_device_info=None): """Post operation of live migration at destination host. :param context: security context :param instance: instance object that is migrated :param network_info: instance network information :param block_migration: if true, post operation of block_migration. """ raise NotImplementedError() def check_instance_shared_storage_local(self, context, instance): """Check if instance files located on shared storage. This runs check on the destination host, and then calls back to the source host to check the results. :param context: security context :param instance: nova.objects.instance.Instance object """ raise NotImplementedError() def check_instance_shared_storage_remote(self, context, data): """Check if instance files located on shared storage. :param context: security context :param data: result of check_instance_shared_storage_local """ raise NotImplementedError() def check_instance_shared_storage_cleanup(self, context, data): """Do cleanup on host after check_instance_shared_storage calls :param context: security context :param data: result of check_instance_shared_storage_local """ pass def check_can_live_migrate_destination(self, context, instance, src_compute_info, dst_compute_info, block_migration=False, disk_over_commit=False): """Check if it is possible to execute live migration. This runs checks on the destination host, and then calls back to the source host to check the results. :param context: security context :param instance: nova.db.sqlalchemy.models.Instance :param src_compute_info: Info about the sending machine :param dst_compute_info: Info about the receiving machine :param block_migration: if true, prepare for block migration :param disk_over_commit: if true, allow disk over commit :returns: a LiveMigrateData object (hypervisor-dependent) """ raise NotImplementedError() def cleanup_live_migration_destination_check(self, context, dest_check_data): """Do required cleanup on dest host after check_can_live_migrate calls :param context: security context :param dest_check_data: result of check_can_live_migrate_destination """ raise NotImplementedError() def check_can_live_migrate_source(self, context, instance, dest_check_data, block_device_info=None): """Check if it is possible to execute live migration. This checks if the live migration can succeed, based on the results from check_can_live_migrate_destination. :param context: security context :param instance: nova.db.sqlalchemy.models.Instance :param dest_check_data: result of check_can_live_migrate_destination :param block_device_info: result of _get_instance_block_device_info :returns: a LiveMigrateData object """ raise NotImplementedError() def get_instance_disk_info(self, instance, block_device_info=None): """Retrieve information about actual disk sizes of an instance. :param instance: nova.objects.Instance :param block_device_info: Optional; Can be used to filter out devices which are actually volumes. :return: json strings with below format:: "[{'path':'disk', 'type':'raw', 'virt_disk_size':'10737418240', 'backing_file':'backing_file', 'disk_size':'83886080' 'over_committed_disk_size':'10737418240'}, ...]" """ raise NotImplementedError() def refresh_security_group_rules(self, security_group_id): """This method is called after a change to security groups. All security groups and their associated rules live in the datastore, and calling this method should apply the updated rules to instances running the specified security group. An error should be raised if the operation cannot complete. """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def refresh_instance_security_rules(self, instance): """Refresh security group rules Gets called when an instance gets added to or removed from the security group the instance is a member of or if the group gains or loses a rule. """ raise NotImplementedError() def reset_network(self, instance): """reset networking for specified instance.""" # TODO(Vek): Need to pass context in for access to auth_token pass def ensure_filtering_rules_for_instance(self, instance, network_info): """Setting up filtering rules and waiting for its completion. To migrate an instance, filtering rules to hypervisors and firewalls are inevitable on destination host. ( Waiting only for filtering rules to hypervisor, since filtering rules to firewall rules can be set faster). Concretely, the below method must be called. - setup_basic_filtering (for nova-basic, etc.) - prepare_instance_filter(for nova-instance-instance-xxx, etc.) to_xml may have to be called since it defines PROJNET, PROJMASK. but libvirt migrates those value through migrateToURI(), so , no need to be called. Don't use thread for this method since migration should not be started when setting-up filtering rules operations are not completed. :param instance: nova.objects.instance.Instance object """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def filter_defer_apply_on(self): """Defer application of IPTables rules.""" pass def filter_defer_apply_off(self): """Turn off deferral of IPTables rules and apply the rules now.""" pass def unfilter_instance(self, instance, network_info): """Stop filtering instance.""" # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def set_admin_password(self, instance, new_pass): """Set the root password on the specified instance. :param instance: nova.objects.instance.Instance :param new_pass: the new password """ raise NotImplementedError() def inject_file(self, instance, b64_path, b64_contents): """Writes a file on the specified instance. The first parameter is an instance of nova.compute.service.Instance, and so the instance is being specified as instance.name. The second parameter is the base64-encoded path to which the file is to be written on the instance; the third is the contents of the file, also base64-encoded. NOTE(russellb) This method is deprecated and will be removed once it can be removed from nova.compute.manager. """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def change_instance_metadata(self, context, instance, diff): """Applies a diff to the instance metadata. This is an optional driver method which is used to publish changes to the instance's metadata to the hypervisor. If the hypervisor has no means of publishing the instance metadata to the instance, then this method should not be implemented. :param context: security context :param instance: nova.objects.instance.Instance """ pass def inject_network_info(self, instance, nw_info): """inject network info for specified instance.""" # TODO(Vek): Need to pass context in for access to auth_token pass def poll_rebooting_instances(self, timeout, instances): """Perform a reboot on all given 'instances'. Reboots the given `instances` which are longer in the rebooting state than `timeout` seconds. :param int timeout: The timeout (in seconds) for considering rebooting instances to be stuck. :param list instances: A list of nova.objects.instance.Instance objects that have been in rebooting state longer than the configured timeout. :return: None """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def host_power_action(self, action): """Reboots, shuts down or powers up the host. :param str action: The action the host should perform. The valid actions are: ""startup", "shutdown" and "reboot". :return: The result of the power action :rtype: : str """ raise NotImplementedError() def host_maintenance_mode(self, host, mode): """Start/Stop host maintenance window. On start, it triggers the migration of all instances to other hosts. Consider the combination with :func:`set_host_enabled`. :param str host: The name of the host whose maintenance mode should be changed. :param bool mode: If `True`, go into maintenance mode. If `False`, leave the maintenance mode. :return: "on_maintenance" if switched to maintenance mode or "off_maintenance" if maintenance mode got left. :rtype: str """ raise NotImplementedError() def set_host_enabled(self, enabled): """Sets the ability of this host to accept new instances. :param bool enabled: If this is `True`, the host will accept new instances. If it is `False`, the host won't accept new instances. :return: If the host can accept further instances, return "enabled", if further instances shouldn't be scheduled to this host, return "disabled". :rtype: str """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def get_host_uptime(self): """Returns the result of calling the Linux command `uptime` on this host. :return: A text which contains the uptime of this host since the last boot. :rtype: str """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def plug_vifs(self, instance, network_info): """Plug virtual interfaces (VIFs) into the given `instance` at instance boot time. The counter action is :func:`unplug_vifs`. :param nova.objects.instance.Instance instance: The instance which gets VIFs plugged. :param nova.network.model.NetworkInfo network_info: The object which contains information about the VIFs to plug. :return: None """ # TODO(Vek): Need to pass context in for access to auth_token raise NotImplementedError() def unplug_vifs(self, instance, network_info): # NOTE(markus_z): 2015-08-18 # The compute manager doesn't use this interface, which seems odd # since the manager should be the controlling thing here. """Unplug virtual interfaces (VIFs) from networks. The counter action is :func:`plug_vifs`. :param nova.objects.instance.Instance instance: The instance which gets VIFs unplugged. :param nova.network.model.NetworkInfo network_info: The object which contains information about the VIFs to unplug. :return: None """ raise NotImplementedError() def get_host_cpu_stats(self): """Get the currently known host CPU stats. :returns: a dict containing the CPU stat info, eg: | {'kernel': kern, | 'idle': idle, | 'user': user, | 'iowait': wait, | 'frequency': freq}, where kern and user indicate the cumulative CPU time (nanoseconds) spent by kernel and user processes respectively, idle indicates the cumulative idle CPU time (nanoseconds), wait indicates the cumulative I/O wait CPU time (nanoseconds), since the host is booting up; freq indicates the current CPU frequency (MHz). All values are long integers. """ raise NotImplementedError() def block_stats(self, instance, disk_id): """Return performance counters associated with the given disk_id on the given instance. These are returned as [rd_req, rd_bytes, wr_req, wr_bytes, errs], where rd indicates read, wr indicates write, req is the total number of I/O requests made, bytes is the total number of bytes transferred, and errs is the number of requests held up due to a full pipeline. All counters are long integers. This method is optional. On some platforms (e.g. XenAPI) performance statistics can be retrieved directly in aggregate form, without Nova having to do the aggregation. On those platforms, this method is unused. Note that this function takes an instance ID. """ raise NotImplementedError() def deallocate_networks_on_reschedule(self, instance): """Does the driver want networks deallocated on reschedule?""" return False def macs_for_instance(self, instance): """What MAC addresses must this instance have? Some hypervisors (such as bare metal) cannot do freeform virtualization of MAC addresses. This method allows drivers to return a set of MAC addresses that the instance is to have. allocate_for_instance will take this into consideration when provisioning networking for the instance. Mapping of MAC addresses to actual networks (or permitting them to be freeform) is up to the network implementation layer. For instance, with openflow switches, fixed MAC addresses can still be virtualized onto any L2 domain, with arbitrary VLANs etc, but regular switches require pre-configured MAC->network mappings that will match the actual configuration. Most hypervisors can use the default implementation which returns None. Hypervisors with MAC limits should return a set of MAC addresses, which will be supplied to the allocate_for_instance call by the compute manager, and it is up to that call to ensure that all assigned network details are compatible with the set of MAC addresses. This is called during spawn_instance by the compute manager. :return: None, or a set of MAC ids (e.g. set(['12:34:56:78:90:ab'])). None means 'no constraints', a set means 'these and only these MAC addresses'. """ return None def dhcp_options_for_instance(self, instance): """Get DHCP options for this instance. Some hypervisors (such as bare metal) require that instances boot from the network, and manage their own TFTP service. This requires passing the appropriate options out to the DHCP service. Most hypervisors can use the default implementation which returns None. This is called during spawn_instance by the compute manager. Note that the format of the return value is specific to the Neutron client API. :return: None, or a set of DHCP options, eg: | [{'opt_name': 'bootfile-name', | 'opt_value': '/tftpboot/path/to/config'}, | {'opt_name': 'server-ip-address', | 'opt_value': '1.2.3.4'}, | {'opt_name': 'tftp-server', | 'opt_value': '1.2.3.4'} | ] """ return None def manage_image_cache(self, context, all_instances): """Manage the driver's local image cache. Some drivers chose to cache images for instances on disk. This method is an opportunity to do management of that cache which isn't directly related to other calls into the driver. The prime example is to clean the cache and remove images which are no longer of interest. :param all_instances: nova.objects.instance.InstanceList """ pass def add_to_aggregate(self, context, aggregate, host, **kwargs): """Add a compute host to an aggregate. The counter action to this is :func:`remove_from_aggregate` :param nova.context.RequestContext context: The security context. :param nova.objects.aggregate.Aggregate aggregate: The aggregate which should add the given `host` :param str host: The name of the host to add to the given `aggregate`. :param dict kwargs: A free-form thingy... :return: None """ # NOTE(jogo) Currently only used for XenAPI-Pool raise NotImplementedError() def remove_from_aggregate(self, context, aggregate, host, **kwargs): """Remove a compute host from an aggregate. The counter action to this is :func:`add_to_aggregate` :param nova.context.RequestContext context: The security context. :param nova.objects.aggregate.Aggregate aggregate: The aggregate which should remove the given `host` :param str host: The name of the host to remove from the given `aggregate`. :param dict kwargs: A free-form thingy... :return: None """ raise NotImplementedError() def undo_aggregate_operation(self, context, op, aggregate, host, set_error=True): """Undo for Resource Pools.""" raise NotImplementedError() def get_volume_connector(self, instance): """Get connector information for the instance for attaching to volumes. Connector information is a dictionary representing the ip of the machine that will be making the connection, the name of the iscsi initiator and the hostname of the machine as follows:: { 'ip': ip, 'initiator': initiator, 'host': hostname } """ raise NotImplementedError() def get_available_nodes(self, refresh=False): """Returns nodenames of all nodes managed by the compute service. This method is for multi compute-nodes support. If a driver supports multi compute-nodes, this method returns a list of nodenames managed by the service. Otherwise, this method should return [hypervisor_hostname]. """ raise NotImplementedError() def node_is_available(self, nodename): """Return whether this compute service manages a particular node.""" if nodename in self.get_available_nodes(): return True # Refresh and check again. return nodename in self.get_available_nodes(refresh=True) def get_per_instance_usage(self): """Get information about instance resource usage. :returns: dict of nova uuid => dict of usage info """ return {} def instance_on_disk(self, instance): """Checks access of instance files on the host. :param instance: nova.objects.instance.Instance to lookup Returns True if files of an instance with the supplied ID accessible on the host, False otherwise. .. note:: Used in rebuild for HA implementation and required for validation of access to instance shared disk files """ return False def register_event_listener(self, callback): """Register a callback to receive events. Register a callback to receive asynchronous event notifications from hypervisors. The callback will be invoked with a single parameter, which will be an instance of the nova.virt.event.Event class. """ self._compute_event_callback = callback def emit_event(self, event): """Dispatches an event to the compute manager. Invokes the event callback registered by the compute manager to dispatch the event. This must only be invoked from a green thread. """ if not self._compute_event_callback: LOG.debug("Discarding event %s", str(event)) return if not isinstance(event, virtevent.Event): raise ValueError( _("Event must be an instance of nova.virt.event.Event")) try: LOG.debug("Emitting event %s", str(event)) self._compute_event_callback(event) except Exception as ex: LOG.error(_LE("Exception dispatching event %(event)s: %(ex)s"), {'event': event, 'ex': ex}) def delete_instance_files(self, instance): """Delete any lingering instance files for an instance. :param instance: nova.objects.instance.Instance :returns: True if the instance was deleted from disk, False otherwise. """ return True @property def need_legacy_block_device_info(self): """Tell the caller if the driver requires legacy block device info. Tell the caller whether we expect the legacy format of block device info to be passed in to methods that expect it. """ return True def volume_snapshot_create(self, context, instance, volume_id, create_info): """Snapshots volumes attached to a specified instance. The counter action to this is :func:`volume_snapshot_delete` :param nova.context.RequestContext context: The security context. :param nova.objects.instance.Instance instance: The instance that has the volume attached :param uuid volume_id: Volume to be snapshotted :param create_info: The data needed for nova to be able to attach to the volume. This is the same data format returned by Cinder's initialize_connection() API call. In the case of doing a snapshot, it is the image file Cinder expects to be used as the active disk after the snapshot operation has completed. There may be other data included as well that is needed for creating the snapshot. """ raise NotImplementedError() def volume_snapshot_delete(self, context, instance, volume_id, snapshot_id, delete_info): """Deletes a snapshot of a volume attached to a specified instance. The counter action to this is :func:`volume_snapshot_create` :param nova.context.RequestContext context: The security context. :param nova.objects.instance.Instance instance: The instance that has the volume attached. :param uuid volume_id: Attached volume associated with the snapshot :param uuid snapshot_id: The snapshot to delete. :param dict delete_info: Volume backend technology specific data needed to be able to complete the snapshot. For example, in the case of qcow2 backed snapshots, this would include the file being merged, and the file being merged into (if appropriate). :return: None """ raise NotImplementedError() def default_root_device_name(self, instance, image_meta, root_bdm): """Provide a default root device name for the driver. :param nova.objects.instance.Instance instance: The instance to get the root device for. :param nova.objects.ImageMeta image_meta: The metadata of the image of the instance. :param nova.objects.BlockDeviceMapping root_bdm: The description of the root device. """ raise NotImplementedError() def default_device_names_for_instance(self, instance, root_device_name, *block_device_lists): """Default the missing device names in the block device mapping.""" raise NotImplementedError() def get_device_name_for_instance(self, instance, bdms, block_device_obj): """Get the next device name based on the block device mapping. :param instance: nova.objects.instance.Instance that volume is requesting a device name :param bdms: a nova.objects.BlockDeviceMappingList for the instance :param block_device_obj: A nova.objects.BlockDeviceMapping instance with all info about the requested block device. device_name does not need to be set, and should be decided by the driver implementation if not set. :returns: The chosen device name. """ raise NotImplementedError() def is_supported_fs_format(self, fs_type): """Check whether the file format is supported by this driver :param fs_type: the file system type to be checked, the validate values are defined at disk API module. """ # NOTE(jichenjc): Return False here so that every hypervisor # need to define their supported file system # type and implement this function at their # virt layer. return False def quiesce(self, context, instance, image_meta): """Quiesce the specified instance to prepare for snapshots. If the specified instance doesn't support quiescing, InstanceQuiesceNotSupported is raised. When it fails to quiesce by other errors (e.g. agent timeout), NovaException is raised. :param context: request context :param instance: nova.objects.instance.Instance to be quiesced :param nova.objects.ImageMeta image_meta: The metadata of the image of the instance. """ raise NotImplementedError() def unquiesce(self, context, instance, image_meta): """Unquiesce the specified instance after snapshots. If the specified instance doesn't support quiescing, InstanceQuiesceNotSupported is raised. When it fails to quiesce by other errors (e.g. agent timeout), NovaException is raised. :param context: request context :param instance: nova.objects.instance.Instance to be unquiesced :param nova.objects.ImageMeta image_meta: The metadata of the image of the instance. """ raise NotImplementedError() def network_binding_host_id(self, context, instance): """Get host ID to associate with network ports. :param context: request context :param instance: nova.objects.instance.Instance that the network ports will be associated with :returns: a string representing the host ID """ return instance.get('host') def load_compute_driver(virtapi, compute_driver=None): """Load a compute driver module. Load the compute driver module specified by the compute_driver configuration option or, if supplied, the driver name supplied as an argument. Compute drivers constructors take a VirtAPI object as their first object and this must be supplied. :param virtapi: a VirtAPI instance :param compute_driver: a compute driver name to override the config opt :returns: a ComputeDriver instance """ if not compute_driver: compute_driver = CONF.compute_driver if not compute_driver: LOG.error(_LE("Compute driver option required, but not specified")) sys.exit(1) LOG.info(_LI("Loading compute driver '%s'"), compute_driver) try: driver = importutils.import_object( 'nova.virt.%s' % compute_driver, virtapi) return utils.check_isinstance(driver, ComputeDriver) except ImportError: LOG.exception(_LE("Unable to load the virtualization driver")) sys.exit(1) def is_xenapi(): return CONF.compute_driver == 'xenapi.XenAPIDriver'
39.205286
79
0.651246
59,183
0.927822
0
0
295
0.004625
0
0
46,855
0.734554
8a0afacd436c5c382b382e786080775c8a2d6bf7
5,581
py
Python
otp/chat/ChatInputNormal.py
P1ayerOne/src
3a4343e29f844fe95da7d51aaee7fb680d02bf72
[ "BSD-3-Clause" ]
null
null
null
otp/chat/ChatInputNormal.py
P1ayerOne/src
3a4343e29f844fe95da7d51aaee7fb680d02bf72
[ "BSD-3-Clause" ]
null
null
null
otp/chat/ChatInputNormal.py
P1ayerOne/src
3a4343e29f844fe95da7d51aaee7fb680d02bf72
[ "BSD-3-Clause" ]
null
null
null
from direct.showbase import DirectObject from otp.otpbase import OTPGlobals import sys from direct.gui.DirectGui import * from pandac.PandaModules import * from otp.otpbase import OTPLocalizer class ChatInputNormal(DirectObject.DirectObject): ExecNamespace = None def __init__(self, chatMgr): self.chatMgr = chatMgr self.normalPos = Vec3(-1.083, 0, 0.804) self.whisperPos = Vec3(0.0, 0, 0.71) self.whisperAvatarName = None self.whisperAvatarId = None self.toPlayer = 0 wantHistory = 0 if __dev__: wantHistory = 1 self.wantHistory = base.config.GetBool('want-chat-history', wantHistory) self.history = [''] self.historySize = base.config.GetInt('chat-history-size', 10) self.historyIndex = 0 return def typeCallback(self, extraArgs): messenger.send('enterNormalChat') def delete(self): self.ignore('arrow_up-up') self.ignore('arrow_down-up') self.chatFrame.destroy() del self.chatFrame del self.chatButton del self.cancelButton del self.chatEntry del self.whisperLabel del self.chatMgr def activateByData(self, whisperAvatarId = None, toPlayer = 0): self.toPlayer = toPlayer self.whisperAvatarId = whisperAvatarId self.whisperAvatarName = base.talkAssistant.findName(self.whisperAvatarId, self.toPlayer) if self.whisperAvatarId: self.chatFrame.setPos(self.whisperPos) self.whisperLabel['text'] = OTPLocalizer.ChatInputWhisperLabel % self.whisperAvatarName self.whisperLabel.show() else: self.chatFrame.setPos(self.normalPos) self.whisperLabel.hide() self.chatEntry['focus'] = 1 self.chatFrame.show() if self.wantHistory: self.accept('arrow_up-up', self.getPrevHistory) self.accept('arrow_down-up', self.getNextHistory) def deactivate(self): self.chatEntry.set('') self.chatEntry['focus'] = 0 self.chatFrame.hide() self.whisperLabel.hide() base.win.closeIme() self.ignore('arrow_up-up') self.ignore('arrow_down-up') def checkForOverRide(self): return False def sendChat(self, text): if self.checkForOverRide(): self.chatEntry.enterText('') return self.deactivate() self.chatMgr.fsm.request('mainMenu') if text: if self.toPlayer: if self.whisperAvatarId: self.whisperAvatarName = None self.whisperAvatarId = None self.toPlayer = 0 elif self.whisperAvatarId: self.chatMgr.sendWhisperString(text, self.whisperAvatarId) self.whisperAvatarName = None self.whisperAvatarId = None else: if self.chatMgr.execChat: if text[0] == '>': text = self.__execMessage(text[1:]) base.localAvatar.setChatAbsolute(text, CFSpeech | CFTimeout) return base.talkAssistant.sendOpenTalk(text) if self.wantHistory: self.addToHistory(text) return def chatOverflow(self, overflowText): self.sendChat(self.chatEntry.get()) def __execMessage(self, message): if not ChatInputNormal.ExecNamespace: ChatInputNormal.ExecNamespace = {} exec('from pandac.PandaModules import *', globals(), self.ExecNamespace) self.importExecNamespace() try: if not isClient(): print('EXECWARNING ChatInputNormal eval: %s' % message) printStack() return str(eval(message, globals(), ChatInputNormal.ExecNamespace)) except SyntaxError: try: if not isClient(): print('EXECWARNING ChatInputNormal exec: %s' % message) printStack() exec(message, globals(), ChatInputNormal.ExecNamespace) return 'ok' except: exception = sys.exc_info()[0] extraInfo = sys.exc_info()[1] if extraInfo: return str(extraInfo) else: return str(exception) except: exception = sys.exc_info()[0] extraInfo = sys.exc_info()[1] if extraInfo: return str(extraInfo) else: return str(exception) def cancelButtonPressed(self): self.chatEntry.set('') self.chatMgr.fsm.request('mainMenu') def chatButtonPressed(self): self.sendChat(self.chatEntry.get()) def importExecNamespace(self): pass def addToHistory(self, text): self.history = [text] + self.history[:self.historySize - 1] self.historyIndex = 0 def getPrevHistory(self): self.chatEntry.set(self.history[self.historyIndex]) self.historyIndex += 1 self.historyIndex %= len(self.history) def getNextHistory(self): self.chatEntry.set(self.history[self.historyIndex]) self.historyIndex -= 1 self.historyIndex %= len(self.history) def setPos(self, posX, posY = None, posZ = None): if posX and posY and posZ: self.chatFrame.setPos(posX, posY, posZ) else: self.chatFrame.setPos(posX)
34.450617
99
0.583766
5,386
0.96506
0
0
0
0
0
0
305
0.05465
8a0b53a65038120d7c635ea3a3f7ba3752ca109e
14,068
py
Python
train_text_summarizer.py
stevaras2/bert
1efaa300eb91dea85c40de5e1586e8d2c94b89bb
[ "Apache-2.0" ]
1
2019-11-28T10:03:09.000Z
2019-11-28T10:03:09.000Z
train_text_summarizer.py
stevaras2/bert
1efaa300eb91dea85c40de5e1586e8d2c94b89bb
[ "Apache-2.0" ]
null
null
null
train_text_summarizer.py
stevaras2/bert
1efaa300eb91dea85c40de5e1586e8d2c94b89bb
[ "Apache-2.0" ]
null
null
null
import argparse import json import numpy as np import pandas as pd import os from sklearn.linear_model import LogisticRegression from sklearn.model_selection import train_test_split from sklearn.metrics import classification_report,f1_score from keras.models import Sequential from keras.layers import Dense, Dropout from keras import backend as K from keras.utils.vis_utils import plot_model from sklearn.externals import joblib import time def f1(y_true, y_pred): def recall(y_true, y_pred): """Recall metric. Only computes a batch-wise average of recall. Computes the recall, a metric for multi-label classification of how many relevant items are selected. """ true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) possible_positives = K.sum(K.round(K.clip(y_true, 0, 1))) recall = true_positives / (possible_positives + K.epsilon()) return recall def precision(y_true, y_pred): """Precision metric. Only computes a batch-wise average of precision. Computes the precision, a metric for multi-label classification of how many selected items are relevant. """ true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1))) precision = true_positives / (predicted_positives + K.epsilon()) return precision precision = precision(y_true, y_pred) recall = recall(y_true, y_pred) return 2*((precision*recall)/(precision+recall+K.epsilon())) def get_embeddings(sentences_list,layer_json): ''' :param sentences_list: the path o the sentences.txt :param layer_json: the path of the json file that contains the embeddings of the sentences :return: Dictionary with key each sentence of the sentences_list and as value the embedding ''' sentences = dict()#dict with key the index of each line of the sentences_list.txt and as value the sentence embeddings = dict()##dict with key the index of each sentence and as value the its embedding sentence_emb = dict()#key:sentence,value:its embedding with open(sentences_list,'r') as file: for index,line in enumerate(file): sentences[index] = line.strip() with open(layer_json, 'r',encoding='utf-8') as f: for line in f: embeddings[json.loads(line)['linex_index']] = np.asarray(json.loads(line)['features']) for key,value in sentences.items(): sentence_emb[value] = embeddings[key] return sentence_emb def train_classifier(sentences_list,layer_json,dataset_csv,filename): ''' :param sentences_list: the path o the sentences.txt :param layer_json: the path of the json file that contains the embeddings of the sentences :param dataset_csv: the path of the dataset :param filename: The path of the pickle file that the model will be stored :return: ''' dataset = pd.read_csv(dataset_csv) bert_dict = get_embeddings(sentences_list,layer_json) length = list() sentence_emb = list() previous_emb = list() next_list = list() section_list = list() label = list() errors = 0 for row in dataset.iterrows(): sentence = row[1][0].strip() previous = row[1][1].strip() nexts = row[1][2].strip() section = row[1][3].strip() if sentence in bert_dict: sentence_emb.append(bert_dict[sentence]) else: sentence_emb.append(np.zeros(768)) print(sentence) errors += 1 if previous in bert_dict: previous_emb.append(bert_dict[previous]) else: previous_emb.append(np.zeros(768)) if nexts in bert_dict: next_list.append(bert_dict[nexts]) else: next_list.append(np.zeros(768)) if section in bert_dict: section_list.append(bert_dict[section]) else: section_list.append(np.zeros(768)) length.append(row[1][4]) label.append(row[1][5]) sentence_emb = np.asarray(sentence_emb) print(sentence_emb.shape) next_emb = np.asarray(next_list) print(next_emb.shape) previous_emb = np.asarray(previous_emb) print(previous_emb.shape) section_emb = np.asarray(section_list) print(sentence_emb.shape) length = np.asarray(length) print(length.shape) label = np.asarray(label) print(errors) features = np.concatenate([sentence_emb, previous_emb, next_emb,section_emb], axis=1) features = np.column_stack([features, length]) # np.append(features,length,axis=1) print(features.shape) X_train, X_val, y_train, y_val = train_test_split(features, label, test_size=0.33, random_state=42) log = LogisticRegression(random_state=0, solver='newton-cg', max_iter=1000, C=0.1) log.fit(X_train, y_train) #save the model _ = joblib.dump(log, filename, compress=9) predictions = log.predict(X_val) print("###########################################") print("Results using embeddings from the",layer_json,"file") print(classification_report(y_val, predictions)) print("F1 score using Logistic Regression:",f1_score(y_val, predictions)) print("###########################################") #train a DNN f1_results = list() for i in range(3): model = Sequential() model.add(Dense(64, activation='relu', trainable=True)) model.add(Dense(128, activation='relu', trainable=True)) model.add(Dropout(0.30)) model.add(Dense(64, activation='relu', trainable=True)) model.add(Dropout(0.25)) model.add(Dense(64, activation='relu', trainable=True)) model.add(Dropout(0.35)) model.add(Dense(1, activation='sigmoid')) # compile network model.compile(loss='binary_crossentropy', optimizer='sgd', metrics=[f1]) # fit network model.fit(X_train, y_train, epochs=100, batch_size=64) loss, f_1 = model.evaluate(X_val, y_val, verbose=1) print('\nTest F1: %f' % (f_1 * 100)) f1_results.append(f_1) model = None print("###########################################") print("Results using embeddings from the", layer_json, "file") # evaluate print(np.mean(f1_results)) print("###########################################") def parameter_tuning_LR(sentences_list,layer_json,dataset_csv): ''' :param sentences_list: the path o the sentences.txt :param layer_json: the path of the json file that contains the embeddings of the sentences :param dataset_csv: the path of the dataset :return: ''' dataset = pd.read_csv(dataset_csv) bert_dict = get_embeddings(sentences_list,layer_json) length = list() sentence_emb = list() previous_emb = list() next_list = list() section_list = list() label = list() errors = 0 for row in dataset.iterrows(): sentence = row[1][0].strip() previous = row[1][1].strip() nexts = row[1][2].strip() section = row[1][3].strip() if sentence in bert_dict: sentence_emb.append(bert_dict[sentence]) else: sentence_emb.append(np.zeros(768)) print(sentence) errors += 1 if previous in bert_dict: previous_emb.append(bert_dict[previous]) else: previous_emb.append(np.zeros(768)) if nexts in bert_dict: next_list.append(bert_dict[nexts]) else: next_list.append(np.zeros(768)) if section in bert_dict: section_list.append(bert_dict[section]) else: section_list.append(np.zeros(768)) length.append(row[1][4]) label.append(row[1][5]) sentence_emb = np.asarray(sentence_emb) print(sentence_emb.shape) next_emb = np.asarray(next_list) print(next_emb.shape) previous_emb = np.asarray(previous_emb) print(previous_emb.shape) section_emb = np.asarray(section_list) print(sentence_emb.shape) length = np.asarray(length) print(length.shape) label = np.asarray(label) print(errors) features = np.concatenate([sentence_emb, previous_emb, next_emb,section_emb], axis=1) features = np.column_stack([features, length]) print(features.shape) X_train, X_val, y_train, y_val = train_test_split(features, label, test_size=0.33, random_state=42) C = [0.1,1,2,5,10] solver = ['newton-cg','saga','sag'] best_params = dict() best_score = 0.0 for c in C: for s in solver: start = time.time() log = LogisticRegression(random_state=0, solver=s, max_iter=1000, C=c) log.fit(X_train, y_train) predictions = log.predict(X_val) print("###########################################") print("LR with C =",c,'and solver = ',s) print("Results using embeddings from the", layer_json, "file") print(classification_report(y_val, predictions)) f1 = f1_score(y_val, predictions) if f1 > best_score: best_score = f1 best_params['c'] = c best_params['solver'] = s print("F1 score using Logistic Regression:",f1) print("###########################################") end = time.time() running_time = end - start print("Running time:"+str(running_time)) def visualize_DNN(file_to_save): ''' Save the DNN architecture to a png file. Better use the Visulize_DNN.ipynd :param file_to_save: the png file that the architecture of the DNN will be saved. :return: None ''' model = Sequential() model.add(Dense(64, activation='relu', trainable=True)) model.add(Dense(128, activation='relu', trainable=True)) model.add(Dropout(0.30)) model.add(Dense(64, activation='relu', trainable=True)) model.add(Dropout(0.25)) model.add(Dense(64, activation='relu', trainable=True)) model.add(Dropout(0.35)) model.add(Dense(1, activation='sigmoid')) plot_model(model, to_file=file_to_save, show_shapes=True) def save_model(sentences_list,layer_json,dataset_csv,pkl): dataset = pd.read_csv(dataset_csv) bert_dict = get_embeddings(sentences_list, layer_json) length = list() sentence_emb = list() previous_emb = list() next_list = list() section_list = list() label = list() errors = 0 for row in dataset.iterrows(): sentence = row[1][0].strip() previous = row[1][1].strip() nexts = row[1][2].strip() section = row[1][3].strip() if sentence in bert_dict: sentence_emb.append(bert_dict[sentence]) else: sentence_emb.append(np.zeros(768)) print(sentence) errors += 1 if previous in bert_dict: previous_emb.append(bert_dict[previous]) else: previous_emb.append(np.zeros(768)) if nexts in bert_dict: next_list.append(bert_dict[nexts]) else: next_list.append(np.zeros(768)) if section in bert_dict: section_list.append(bert_dict[section]) else: section_list.append(np.zeros(768)) length.append(row[1][4]) label.append(row[1][5]) sentence_emb = np.asarray(sentence_emb) print(sentence_emb.shape) next_emb = np.asarray(next_list) print(next_emb.shape) previous_emb = np.asarray(previous_emb) print(previous_emb.shape) section_emb = np.asarray(section_list) print(sentence_emb.shape) length = np.asarray(length) print(length.shape) label = np.asarray(label) print(errors) features = np.concatenate([sentence_emb, previous_emb, next_emb, section_emb], axis=1) features = np.column_stack([features, length]) print(features.shape) log = LogisticRegression(random_state=0, solver='saga', max_iter=1000, C=1) log.fit(features, label) _ = joblib.dump(log, pkl, compress=9) if __name__ == '__main__': #save_model('sentences_list.txt','Fudan_output_layer_-1.json','train_sentences1.csv','summarizer1.pkl') ap = argparse.ArgumentParser() ap.add_argument("-s", "--sentences", required=True, help="sentences list") ap.add_argument("-o", "--output", required=True, help="output") ap.add_argument("-ts", "--train set", required=True, help="path to train set") ap.add_argument("-sp", "--summarizer path", required=True, help="path to save summarizer") args = vars(ap.parse_args()) layer = train_classifier(args['sentences'], args['output'], args['train set'],args['summarizer path']) #layer_1 = train_classifier('sentences_list.txt', 'new_output_layer_-1.json', 'train_sentences1.csv','fine_tune_BERT_sentence_classification1.pkl') #layer_2 = train_classifier('sentences_list.txt','new_output_layer_-2.json','train_sentences1.csv','fine_tune_BERT_sentence_classification2.pkl') #layer_3 = train_classifier('sentences_list.txt','new_output_layer_-3.json','train_sentences1.csv','fine_tune_BERT_sentence_classification3.pkl') #layer_4 = train_classifier('sentences_list.txt','new_output_layer_-4.json','train_sentences1.csv','fine_tune_BERT_sentence_classification4.pkl') #tuning = parameter_tuning_LR('sentences_list.txt','new_output_layer_-1.json','train_sentences1.csv') #layer_1 = train_classifier('sentences_list.txt','output_layer_-1.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl') #layer_2 = train_classifier('sentences_list.txt','output_layer_-2.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl') #layer_3 = train_classifier('sentences_list.txt','output_layer_-3.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl') #layer_4 = train_classifier('sentences_list.txt','output_layer_-4.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl')
35.705584
151
0.649062
0
0
0
0
0
0
0
0
3,939
0.279997
8a0d48bd45e2a77d4024e66ae20d64213df72227
1,493
py
Python
src/test/python/apache/aurora/executor/test_status_manager.py
zmanji/incubator-aurora
9f594f1de6bbf46c74863dd3fc4d2708b7a974f2
[ "Apache-2.0" ]
null
null
null
src/test/python/apache/aurora/executor/test_status_manager.py
zmanji/incubator-aurora
9f594f1de6bbf46c74863dd3fc4d2708b7a974f2
[ "Apache-2.0" ]
null
null
null
src/test/python/apache/aurora/executor/test_status_manager.py
zmanji/incubator-aurora
9f594f1de6bbf46c74863dd3fc4d2708b7a974f2
[ "Apache-2.0" ]
null
null
null
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import time from unittest import TestCase import mock from mesos.interface.mesos_pb2 import TaskState from apache.aurora.executor.common.status_checker import StatusChecker from apache.aurora.executor.status_manager import StatusManager class FakeStatusChecker(StatusChecker): def __init__(self): self.call_count = 0 @property def status(self): if self.call_count == 2: return TaskState.Value('TASK_KILLED') self.call_count += 1 return None class TestStatusManager(TestCase): def setUp(self): self.callback_called = False def test_run(self): checker = FakeStatusChecker() def callback(result): assert result == TaskState.Value('TASK_KILLED') self.callback_called = True mock_time = mock.Mock(spec=time) status_manager = StatusManager(checker, callback, mock_time) status_manager.run() assert mock_time.sleep.call_count == 2 assert self.callback_called is True
29.27451
74
0.750167
699
0.468185
0
0
143
0.09578
0
0
561
0.375754
8a0d98e91f0c9a170743b5f41866a399dbce8684
3,494
py
Python
Supplemental/A5. Collision estimation module/Con_est.py
wangqf1997/Human-injury-based-safety-decision-of-automated-vehicles
b104fdeb3d85e867f6b04c5ae7b5a197e705aeba
[ "CC-BY-4.0" ]
null
null
null
Supplemental/A5. Collision estimation module/Con_est.py
wangqf1997/Human-injury-based-safety-decision-of-automated-vehicles
b104fdeb3d85e867f6b04c5ae7b5a197e705aeba
[ "CC-BY-4.0" ]
null
null
null
Supplemental/A5. Collision estimation module/Con_est.py
wangqf1997/Human-injury-based-safety-decision-of-automated-vehicles
b104fdeb3d85e867f6b04c5ae7b5a197e705aeba
[ "CC-BY-4.0" ]
null
null
null
''' ------------------------------------------------------------------------------------------------- This code accompanies the paper titled "Human injury-based safety decision of automated vehicles" Author: Qingfan Wang, Qing Zhou, Miao Lin, Bingbing Nie Corresponding author: Bingbing Nie ([email protected]) ------------------------------------------------------------------------------------------------- ''' import torch import numpy as np from torch import nn from torch.nn.utils import weight_norm __author__ = "Qingfan Wang" def Collision_cond(veh_striking_list, V1_v, V2_v, delta_angle, veh_param): ''' Estimate the collision condition. ''' (veh_l, veh_w, veh_cgf, veh_cgs, veh_k, veh_m) = veh_param delta_angle_2 = np.arccos(np.abs(np.cos(delta_angle))) if -1e-6 < delta_angle_2 < 1e-6: delta_angle_2 = 1e-6 delta_v1_list = [] delta_v2_list = [] # Estimate the collision condition (delat-v) according to the principal impact direction. for veh_striking in veh_striking_list: if veh_striking[0] == 1: veh_ca = np.arctan(veh_cgf[0] / veh_cgs[0]) veh_a2 = np.abs(veh_cgs[1] - veh_striking[3]) veh_RDS = np.abs(V1_v * np.cos(delta_angle) - V2_v) veh_a1 = np.abs(np.sqrt(veh_cgf[0] ** 2 + veh_cgs[0] ** 2) * np.cos(veh_ca + delta_angle_2)) if (veh_striking[1]+1) in [16, 1, 2, 3, 17, 20, 21] and (veh_striking[2]+1) in [16, 1, 2, 3, 17, 20, 21]: veh_e = 2 / veh_RDS else: veh_e = 0.5 / veh_RDS elif veh_striking[0] == 2: veh_ca = np.arctan(veh_cgf[0] / veh_cgs[0]) veh_a2 = np.abs(veh_cgf[1] - veh_striking[3]) veh_a1 = np.abs(np.sqrt(veh_cgf[0] ** 2 + veh_cgs[0] ** 2) * np.cos(delta_angle_2 - veh_ca + np.pi / 2)) veh_RDS = V1_v * np.sin(delta_angle_2) veh_e = 1.5 / veh_RDS elif veh_striking[0] == 3: veh_ca = np.arctan(veh_cgf[1] / veh_cgs[1]) veh_a1 = np.abs(veh_cgs[0] - veh_striking[3]) veh_RDS = np.abs(V2_v * np.cos(delta_angle) - V1_v) veh_a2 = np.abs(np.sqrt(veh_cgf[1] ** 2 + veh_cgs[1] ** 2) * np.cos(veh_ca + delta_angle_2)) if (veh_striking[1]+1) in [16, 1, 2, 3, 17, 20, 21] and (veh_striking[2]+1) in [16, 1, 2, 3, 17, 20, 21]: veh_e = 2 / veh_RDS else: veh_e = 0.5 / veh_RDS elif veh_striking[0] == 4: veh_ca = np.arctan(veh_cgf[1] / veh_cgs[1]) veh_a1 = np.abs(veh_cgf[0] - veh_striking[3]) veh_a2 = np.abs(np.sqrt(veh_cgf[1] ** 2 + veh_cgs[1] ** 2) * np.cos(delta_angle_2 - veh_ca + np.pi / 2)) veh_RDS = V2_v * np.sin(delta_angle_2) veh_e = 1.5 / veh_RDS # Obtain delta-v based on the plane 2-DOF rigid-body collision model with momentum conservation. veh_y1 = veh_k[0] ** 2 / (veh_a1 ** 2 + veh_k[0] ** 2) veh_y2 = veh_k[1] ** 2 / (veh_a2 ** 2 + veh_k[1] ** 2) delta_v1 = (1 + veh_e) * veh_m[1] * veh_y1 * veh_y2 * veh_RDS / (veh_m[0] * veh_y1 + veh_m[1] * veh_y2) delta_v2 = (1 + veh_e) * veh_m[0] * veh_y1 * veh_y2 * veh_RDS / (veh_m[0] * veh_y1 + veh_m[1] * veh_y2) delta_v1_list.append(delta_v1) delta_v2_list.append(delta_v2) delta_v1_ = max(delta_v1_list) delta_v2_ = max(delta_v2_list) index = delta_v1_list.index(max(delta_v1_list)) return delta_v1_, delta_v2_, index
43.135802
117
0.556955
0
0
0
0
0
0
0
0
654
0.187178
8a0ead4871ddc6b047237522f5f34d4d48742f52
11,790
py
Python
train/train.py
TontonTremblay/pixel-nerf
349b5f3f173cd76def05b6de8aa52c69a4f0c7fa
[ "BSD-2-Clause" ]
null
null
null
train/train.py
TontonTremblay/pixel-nerf
349b5f3f173cd76def05b6de8aa52c69a4f0c7fa
[ "BSD-2-Clause" ]
null
null
null
train/train.py
TontonTremblay/pixel-nerf
349b5f3f173cd76def05b6de8aa52c69a4f0c7fa
[ "BSD-2-Clause" ]
null
null
null
# Training to a set of multiple objects (e.g. ShapeNet or DTU) # tensorboard logs available in logs/<expname> import sys import os sys.path.insert( 0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "src")) ) import warnings import trainlib from model import make_model, loss from render import NeRFRenderer from data import get_split_dataset import util import numpy as np import torch.nn.functional as F import torch from dotmap import DotMap def extra_args(parser): parser.add_argument( "--batch_size", "-B", type=int, default=4, help="Object batch size ('SB')" ) parser.add_argument( "--nviews", "-V", type=str, default="1", help="Number of source views (multiview); put multiple (space delim) to pick randomly per batch ('NV')", ) parser.add_argument( "--freeze_enc", action="store_true", default=None, help="Freeze encoder weights and only train MLP", ) parser.add_argument( "--no_bbox_step", type=int, default=100000, help="Step to stop using bbox sampling", ) parser.add_argument( "--fixed_test", action="store_true", default=None, help="Freeze encoder weights and only train MLP", ) return parser args, conf = util.args.parse_args(extra_args, training=True, default_ray_batch_size=128) device = util.get_cuda(args.gpu_id[0]) dset, val_dset, _ = get_split_dataset(args.dataset_format, args.datadir) print( "dset z_near {}, z_far {}, lindisp {}".format(dset.z_near, dset.z_far, dset.lindisp) ) net = make_model(conf["model"]).to(device=device) net.stop_encoder_grad = args.freeze_enc if args.freeze_enc: print("Encoder frozen") net.encoder.eval() renderer = NeRFRenderer.from_conf(conf["renderer"], lindisp=dset.lindisp,).to( device=device ) # Parallize render_par = renderer.bind_parallel(net, args.gpu_id).eval() nviews = list(map(int, args.nviews.split())) class PixelNeRFTrainer(trainlib.Trainer): def __init__(self): super().__init__(net, dset, val_dset, args, conf["train"], device=device) self.renderer_state_path = "%s/%s/_renderer" % ( self.args.checkpoints_path, self.args.name, ) self.lambda_coarse = conf.get_float("loss.lambda_coarse") self.lambda_fine = conf.get_float("loss.lambda_fine", 1.0) print( "lambda coarse {} and fine {}".format(self.lambda_coarse, self.lambda_fine) ) self.rgb_coarse_crit = loss.get_rgb_loss(conf["loss.rgb"], True) fine_loss_conf = conf["loss.rgb"] if "rgb_fine" in conf["loss"]: print("using fine loss") fine_loss_conf = conf["loss.rgb_fine"] self.rgb_fine_crit = loss.get_rgb_loss(fine_loss_conf, False) if args.resume: if os.path.exists(self.renderer_state_path): renderer.load_state_dict( torch.load(self.renderer_state_path, map_location=device) ) self.z_near = dset.z_near self.z_far = dset.z_far self.use_bbox = args.no_bbox_step > 0 def post_batch(self, epoch, batch): renderer.sched_step(args.batch_size) def extra_save_state(self): torch.save(renderer.state_dict(), self.renderer_state_path) def calc_losses(self, data, is_train=True, global_step=0): if "images" not in data: return {} all_images = data["images"].to(device=device) # (SB, NV, 3, H, W) SB, NV, _, H, W = all_images.shape all_poses = data["poses"].to(device=device) # (SB, NV, 4, 4) all_bboxes = data.get("bbox") # (SB, NV, 4) cmin rmin cmax rmax all_focals = data["focal"] # (SB) all_c = data.get("c") # (SB) if self.use_bbox and global_step >= args.no_bbox_step: self.use_bbox = False print(">>> Stopped using bbox sampling @ iter", global_step) if not is_train or not self.use_bbox: all_bboxes = None all_rgb_gt = [] all_rays = [] curr_nviews = nviews[torch.randint(0, len(nviews), ()).item()] if curr_nviews == 1: image_ord = torch.randint(0, NV, (SB, 1)) else: image_ord = torch.empty((SB, curr_nviews), dtype=torch.long) for obj_idx in range(SB): if all_bboxes is not None: bboxes = all_bboxes[obj_idx] images = all_images[obj_idx] # (NV, 3, H, W) poses = all_poses[obj_idx] # (NV, 4, 4) focal = all_focals[obj_idx] c = None if "c" in data: c = data["c"][obj_idx] if curr_nviews > 1: # Somewhat inefficient, don't know better way image_ord[obj_idx] = torch.from_numpy( np.random.choice(NV, curr_nviews, replace=False) ) images_0to1 = images * 0.5 + 0.5 cam_rays = util.gen_rays( poses, W, H, focal, self.z_near, self.z_far, c=c ) # (NV, H, W, 8) rgb_gt_all = images_0to1 rgb_gt_all = ( rgb_gt_all.permute(0, 2, 3, 1).contiguous().reshape(-1, 3) ) # (NV, H, W, 3) if all_bboxes is not None: pix = util.bbox_sample(bboxes, args.ray_batch_size) pix_inds = pix[..., 0] * H * W + pix[..., 1] * W + pix[..., 2] else: pix_inds = torch.randint(0, NV * H * W, (args.ray_batch_size,)) rgb_gt = rgb_gt_all[pix_inds] # (ray_batch_size, 3) rays = cam_rays.view(-1, cam_rays.shape[-1])[pix_inds].to( device=device ) # (ray_batch_size, 8) all_rgb_gt.append(rgb_gt) all_rays.append(rays) all_rgb_gt = torch.stack(all_rgb_gt) # (SB, ray_batch_size, 3) all_rays = torch.stack(all_rays) # (SB, ray_batch_size, 8) image_ord = image_ord.to(device) src_images = util.batched_index_select_nd( all_images, image_ord ) # (SB, NS, 3, H, W) src_poses = util.batched_index_select_nd(all_poses, image_ord) # (SB, NS, 4, 4) all_bboxes = all_poses = all_images = None net.encode( src_images, src_poses, all_focals.to(device=device), c=all_c.to(device=device) if all_c is not None else None, ) render_dict = DotMap(render_par(all_rays, want_weights=True,)) coarse = render_dict.coarse fine = render_dict.fine using_fine = len(fine) > 0 loss_dict = {} rgb_loss = self.rgb_coarse_crit(coarse.rgb, all_rgb_gt) if rgb_loss.isnan().any().item()==True: raise() loss_dict["rc"] = rgb_loss.item() * self.lambda_coarse if using_fine: fine_loss = self.rgb_fine_crit(fine.rgb, all_rgb_gt) rgb_loss = rgb_loss * self.lambda_coarse + fine_loss * self.lambda_fine loss_dict["rf"] = fine_loss.item() * self.lambda_fine loss = rgb_loss if is_train: loss.backward() loss_dict["t"] = loss.item() return loss_dict def train_step(self, data, global_step): return self.calc_losses(data, is_train=True, global_step=global_step) def eval_step(self, data, global_step): renderer.eval() losses = self.calc_losses(data, is_train=False, global_step=global_step) renderer.train() return losses def vis_step(self, data, global_step, idx=None): if "images" not in data: return {} if idx is None: batch_idx = np.random.randint(0, data["images"].shape[0]) else: print(idx) batch_idx = idx images = data["images"][batch_idx].to(device=device) # (NV, 3, H, W) poses = data["poses"][batch_idx].to(device=device) # (NV, 4, 4) focal = data["focal"][batch_idx : batch_idx + 1] # (1) c = data.get("c") if c is not None: c = c[batch_idx : batch_idx + 1] # (1) NV, _, H, W = images.shape cam_rays = util.gen_rays( poses, W, H, focal, self.z_near, self.z_far, c=c ) # (NV, H, W, 8) images_0to1 = images * 0.5 + 0.5 # (NV, 3, H, W) curr_nviews = nviews[torch.randint(0, len(nviews), (1,)).item()] views_src = np.sort(np.random.choice(NV, curr_nviews, replace=False)) view_dest = np.random.randint(0, NV - curr_nviews) for vs in range(curr_nviews): view_dest += view_dest >= views_src[vs] views_src = torch.from_numpy(views_src) # set renderer net to eval mode renderer.eval() source_views = ( images_0to1[views_src] .permute(0, 2, 3, 1) .cpu() .numpy() .reshape(-1, H, W, 3) ) gt = images_0to1[view_dest].permute(1, 2, 0).cpu().numpy().reshape(H, W, 3) with torch.no_grad(): test_rays = cam_rays[view_dest] # (H, W, 8) test_images = images[views_src] # (NS, 3, H, W) net.encode( test_images.unsqueeze(0), poses[views_src].unsqueeze(0), focal.to(device=device), c=c.to(device=device) if c is not None else None, ) test_rays = test_rays.reshape(1, H * W, -1) render_dict = DotMap(render_par(test_rays, want_weights=True)) coarse = render_dict.coarse fine = render_dict.fine using_fine = len(fine) > 0 alpha_coarse_np = coarse.weights[0].sum(dim=-1).cpu().numpy().reshape(H, W) rgb_coarse_np = coarse.rgb[0].cpu().numpy().reshape(H, W, 3) depth_coarse_np = coarse.depth[0].cpu().numpy().reshape(H, W) if using_fine: alpha_fine_np = fine.weights[0].sum(dim=1).cpu().numpy().reshape(H, W) depth_fine_np = fine.depth[0].cpu().numpy().reshape(H, W) rgb_fine_np = fine.rgb[0].cpu().numpy().reshape(H, W, 3) print("c rgb min {} max {}".format(rgb_coarse_np.min(), rgb_coarse_np.max())) print( "c alpha min {}, max {}".format( alpha_coarse_np.min(), alpha_coarse_np.max() ) ) alpha_coarse_cmap = util.cmap(alpha_coarse_np) / 255 depth_coarse_cmap = util.cmap(depth_coarse_np) / 255 vis_list = [ *source_views, gt, depth_coarse_cmap, rgb_coarse_np, alpha_coarse_cmap, ] vis_coarse = np.hstack(vis_list) vis = vis_coarse if using_fine: print("f rgb min {} max {}".format(rgb_fine_np.min(), rgb_fine_np.max())) print( "f alpha min {}, max {}".format( alpha_fine_np.min(), alpha_fine_np.max() ) ) depth_fine_cmap = util.cmap(depth_fine_np) / 255 alpha_fine_cmap = util.cmap(alpha_fine_np) / 255 vis_list = [ *source_views, gt, depth_fine_cmap, rgb_fine_np, alpha_fine_cmap, ] vis_fine = np.hstack(vis_list) vis = np.vstack((vis_coarse, vis_fine)) rgb_psnr = rgb_fine_np else: rgb_psnr = rgb_coarse_np psnr = util.psnr(rgb_psnr, gt) vals = {"psnr": psnr} print("psnr", psnr) # set the renderer network back to train mode renderer.train() return vis, vals trainer = PixelNeRFTrainer() trainer.start()
33.976945
112
0.566073
9,738
0.825954
0
0
0
0
0
0
1,424
0.12078
8a10a1ae5c36176cfdd1c3ad55656efe8325a99f
20,351
py
Python
napari/_qt/dialogs/qt_plugin_dialog.py
kne42/napari
d61d0be0ef8ea622dd3d6acd270c0529816c11ec
[ "BSD-3-Clause" ]
null
null
null
napari/_qt/dialogs/qt_plugin_dialog.py
kne42/napari
d61d0be0ef8ea622dd3d6acd270c0529816c11ec
[ "BSD-3-Clause" ]
null
null
null
napari/_qt/dialogs/qt_plugin_dialog.py
kne42/napari
d61d0be0ef8ea622dd3d6acd270c0529816c11ec
[ "BSD-3-Clause" ]
null
null
null
import os import sys from pathlib import Path from typing import Sequence from napari_plugin_engine.dist import standard_metadata from napari_plugin_engine.exceptions import PluginError from qtpy.QtCore import QEvent, QProcess, QProcessEnvironment, QSize, Qt, Slot from qtpy.QtGui import QFont, QMovie from qtpy.QtWidgets import ( QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget, ) import napari.resources from ...plugins import plugin_manager from ...plugins.pypi import ( ProjectInfo, iter_napari_plugin_info, normalized_name, ) from ...utils._appdirs import user_plugin_dir, user_site_packages from ...utils.misc import parse_version, running_as_bundled_app from ...utils.translations import trans from ..qthreading import create_worker from ..widgets.qt_eliding_label import ElidingLabel from ..widgets.qt_plugin_sorter import QtPluginSorter from .qt_plugin_report import QtPluginErrReporter # TODO: add error icon and handle pip install errors # TODO: add queue to handle clicks when already processing class Installer: def __init__(self, output_widget: QTextEdit = None): from ...plugins import plugin_manager # create install process self._output_widget = None self.process = QProcess() self.process.setProgram(sys.executable) self.process.setProcessChannelMode(QProcess.MergedChannels) self.process.readyReadStandardOutput.connect(self._on_stdout_ready) # setup process path env = QProcessEnvironment() combined_paths = os.pathsep.join( [user_site_packages(), env.systemEnvironment().value("PYTHONPATH")] ) env.insert("PYTHONPATH", combined_paths) # use path of parent process env.insert( "PATH", QProcessEnvironment.systemEnvironment().value("PATH") ) self.process.setProcessEnvironment(env) self.process.finished.connect(lambda: plugin_manager.discover()) self.process.finished.connect(lambda: plugin_manager.prune()) self.set_output_widget(output_widget) def set_output_widget(self, output_widget: QTextEdit): if output_widget: self._output_widget = output_widget self.process.setParent(output_widget) def _on_stdout_ready(self): if self._output_widget: text = self.process.readAllStandardOutput().data().decode() self._output_widget.append(text) def install(self, pkg_list: Sequence[str]): cmd = ['-m', 'pip', 'install', '--upgrade'] if running_as_bundled_app() and sys.platform.startswith('linux'): cmd += [ '--no-warn-script-location', '--prefix', user_plugin_dir(), ] self.process.setArguments(cmd + list(pkg_list)) if self._output_widget: self._output_widget.clear() self.process.start() def uninstall(self, pkg_list: Sequence[str]): args = ['-m', 'pip', 'uninstall', '-y'] self.process.setArguments(args + list(pkg_list)) if self._output_widget: self._output_widget.clear() self.process.start() for pkg in pkg_list: plugin_manager.unregister(pkg) class PluginListItem(QFrame): def __init__( self, package_name: str, version: str = '', url: str = '', summary: str = '', author: str = '', license: str = "UNKNOWN", *, plugin_name: str = None, parent: QWidget = None, enabled: bool = True, ): super().__init__(parent) self.setup_ui(enabled) if plugin_name: self.plugin_name.setText(plugin_name) self.package_name.setText(f"{package_name} {version}") self.summary.setText(summary) self.package_author.setText(author) self.action_button.setText(trans._("uninstall")) self.action_button.setObjectName("remove_button") self.enabled_checkbox.setChecked(enabled) if PluginError.get(plugin_name=plugin_name): def _show_error(): rep = QtPluginErrReporter( parent=self._get_dialog(), initial_plugin=plugin_name ) rep.setWindowFlags(Qt.Sheet) close = QPushButton(trans._("close"), rep) rep.layout.addWidget(close) rep.plugin_combo.hide() close.clicked.connect(rep.close) rep.open() self.error_indicator.clicked.connect(_show_error) self.error_indicator.show() self.summary.setIndent(18) else: self.summary.setIndent(38) else: self.plugin_name.setText(package_name) self.package_name.setText(version) self.summary.setText(summary) self.package_author.setText(author) self.action_button.setText(trans._("install")) self.enabled_checkbox.hide() def _get_dialog(self) -> QDialog: p = self.parent() while not isinstance(p, QDialog) and p.parent(): p = p.parent() return p def setup_ui(self, enabled=True): self.v_lay = QVBoxLayout(self) self.v_lay.setContentsMargins(-1, 6, -1, 6) self.v_lay.setSpacing(0) self.row1 = QHBoxLayout() self.row1.setSpacing(6) self.enabled_checkbox = QCheckBox(self) self.enabled_checkbox.setChecked(enabled) self.enabled_checkbox.stateChanged.connect(self._on_enabled_checkbox) self.enabled_checkbox.setToolTip(trans._("enable/disable")) sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( self.enabled_checkbox.sizePolicy().hasHeightForWidth() ) self.enabled_checkbox.setSizePolicy(sizePolicy) self.enabled_checkbox.setMinimumSize(QSize(20, 0)) self.enabled_checkbox.setText("") self.row1.addWidget(self.enabled_checkbox) self.plugin_name = QLabel(self) sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Minimum) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( self.plugin_name.sizePolicy().hasHeightForWidth() ) self.plugin_name.setSizePolicy(sizePolicy) font15 = QFont() font15.setPointSize(15) self.plugin_name.setFont(font15) self.row1.addWidget(self.plugin_name) self.package_name = QLabel(self) self.package_name.setAlignment( Qt.AlignRight | Qt.AlignTrailing | Qt.AlignVCenter ) self.row1.addWidget(self.package_name) self.action_button = QPushButton(self) sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( self.action_button.sizePolicy().hasHeightForWidth() ) self.action_button.setSizePolicy(sizePolicy) self.row1.addWidget(self.action_button) self.v_lay.addLayout(self.row1) self.row2 = QHBoxLayout() self.error_indicator = QPushButton() self.error_indicator.setObjectName("warning_icon") self.error_indicator.setCursor(Qt.PointingHandCursor) self.error_indicator.hide() self.row2.addWidget(self.error_indicator) self.row2.setContentsMargins(-1, 4, 0, -1) self.summary = ElidingLabel(parent=self) sizePolicy = QSizePolicy( QSizePolicy.MinimumExpanding, QSizePolicy.Preferred ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( self.summary.sizePolicy().hasHeightForWidth() ) self.summary.setSizePolicy(sizePolicy) self.summary.setObjectName("small_text") self.row2.addWidget(self.summary) self.package_author = QLabel(self) sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( self.package_author.sizePolicy().hasHeightForWidth() ) self.package_author.setSizePolicy(sizePolicy) self.package_author.setObjectName("small_text") self.row2.addWidget(self.package_author) self.v_lay.addLayout(self.row2) def _on_enabled_checkbox(self, state: int): """Called with `state` when checkbox is clicked.""" plugin_manager.set_blocked(self.plugin_name.text(), not state) class QPluginList(QListWidget): def __init__(self, parent: QWidget, installer: Installer): super().__init__(parent) self.installer = installer self.setSortingEnabled(True) @Slot(ProjectInfo) def addItem( self, project_info: ProjectInfo, plugin_name=None, enabled=True ): # don't add duplicates if ( self.findItems(project_info.name, Qt.MatchFixedString) and not plugin_name ): return # including summary here for sake of filtering below. searchable_text = project_info.name + " " + project_info.summary item = QListWidgetItem(searchable_text, parent=self) item.version = project_info.version super().addItem(item) widg = PluginListItem( *project_info, parent=self, plugin_name=plugin_name, enabled=enabled, ) method = getattr( self.installer, 'uninstall' if plugin_name else 'install' ) widg.action_button.clicked.connect(lambda: method([project_info.name])) item.setSizeHint(widg.sizeHint()) self.setItemWidget(item, widg) @Slot(ProjectInfo) def tag_outdated(self, project_info: ProjectInfo): for item in self.findItems(project_info.name, Qt.MatchFixedString): current = item.version latest = project_info.version if parse_version(current) >= parse_version(latest): continue if hasattr(item, 'outdated'): # already tagged it continue item.outdated = True widg = self.itemWidget(item) update_btn = QPushButton( trans._("update (v{latest})", latest=latest), widg ) update_btn.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed) update_btn.clicked.connect( lambda: self.installer.install([item.text()]) ) widg.row1.insertWidget(3, update_btn) def filter(self, text: str): """Filter items to those containing `text`.""" shown = self.findItems(text, Qt.MatchContains) for i in range(self.count()): item = self.item(i) item.setHidden(item not in shown) class QtPluginDialog(QDialog): def __init__(self, parent=None): super().__init__(parent) self.installer = Installer() self.setup_ui() self.installer.set_output_widget(self.stdout_text) self.installer.process.started.connect(self._on_installer_start) self.installer.process.finished.connect(self._on_installer_done) self.refresh() def _on_installer_start(self): self.show_status_btn.setChecked(True) self.working_indicator.show() self.process_error_indicator.hide() def _on_installer_done(self, exit_code, exit_status): self.working_indicator.hide() if exit_code: self.process_error_indicator.show() else: self.show_status_btn.setChecked(False) self.refresh() self.plugin_sorter.refresh() def refresh(self): self.installed_list.clear() self.available_list.clear() # fetch installed from ...plugins import plugin_manager plugin_manager.discover() # since they might not be loaded yet already_installed = set() for plugin_name, mod_name, distname in plugin_manager.iter_available(): # not showing these in the plugin dialog if plugin_name in ('napari_plugin_engine',): continue if distname: already_installed.add(distname) meta = standard_metadata(distname) else: meta = {} self.installed_list.addItem( ProjectInfo( normalized_name(distname or ''), meta.get('version', ''), meta.get('url', ''), meta.get('summary', ''), meta.get('author', ''), meta.get('license', ''), ), plugin_name=plugin_name, enabled=plugin_name in plugin_manager.plugins, ) # self.v_splitter.setSizes([70 * self.installed_list.count(), 10, 10]) # fetch available plugins self.worker = create_worker(iter_napari_plugin_info) def _handle_yield(project_info): if project_info.name in already_installed: self.installed_list.tag_outdated(project_info) else: self.available_list.addItem(project_info) self.worker.yielded.connect(_handle_yield) self.worker.finished.connect(self.working_indicator.hide) self.worker.finished.connect(self._update_count_in_label) self.worker.start() def setup_ui(self): self.resize(1080, 640) vlay_1 = QVBoxLayout(self) self.h_splitter = QSplitter(self) vlay_1.addWidget(self.h_splitter) self.h_splitter.setOrientation(Qt.Horizontal) self.v_splitter = QSplitter(self.h_splitter) self.v_splitter.setOrientation(Qt.Vertical) self.v_splitter.setMinimumWidth(500) self.plugin_sorter = QtPluginSorter(parent=self.h_splitter) self.plugin_sorter.layout().setContentsMargins(2, 0, 0, 0) self.plugin_sorter.hide() installed = QWidget(self.v_splitter) lay = QVBoxLayout(installed) lay.setContentsMargins(0, 2, 0, 2) self.installed_label = QLabel(trans._("Installed Plugins")) self.installed_filter = QLineEdit() self.installed_filter.setPlaceholderText("search...") self.installed_filter.setMaximumWidth(350) self.installed_filter.setClearButtonEnabled(True) mid_layout = QHBoxLayout() mid_layout.addWidget(self.installed_label) mid_layout.addWidget(self.installed_filter) mid_layout.addStretch() lay.addLayout(mid_layout) self.installed_list = QPluginList(installed, self.installer) self.installed_filter.textChanged.connect(self.installed_list.filter) lay.addWidget(self.installed_list) uninstalled = QWidget(self.v_splitter) lay = QVBoxLayout(uninstalled) lay.setContentsMargins(0, 2, 0, 2) self.avail_label = QLabel(trans._("Available Plugins")) self.avail_filter = QLineEdit() self.avail_filter.setPlaceholderText("search...") self.avail_filter.setMaximumWidth(350) self.avail_filter.setClearButtonEnabled(True) mid_layout = QHBoxLayout() mid_layout.addWidget(self.avail_label) mid_layout.addWidget(self.avail_filter) mid_layout.addStretch() lay.addLayout(mid_layout) self.available_list = QPluginList(uninstalled, self.installer) self.avail_filter.textChanged.connect(self.available_list.filter) lay.addWidget(self.available_list) self.stdout_text = QTextEdit(self.v_splitter) self.stdout_text.setReadOnly(True) self.stdout_text.setObjectName("pip_install_status") self.stdout_text.hide() buttonBox = QHBoxLayout() self.working_indicator = QLabel(trans._("loading ..."), self) sp = self.working_indicator.sizePolicy() sp.setRetainSizeWhenHidden(True) self.working_indicator.setSizePolicy(sp) self.process_error_indicator = QLabel(self) self.process_error_indicator.setObjectName("error_label") self.process_error_indicator.hide() load_gif = str(Path(napari.resources.__file__).parent / "loading.gif") mov = QMovie(load_gif) mov.setScaledSize(QSize(18, 18)) self.working_indicator.setMovie(mov) mov.start() self.direct_entry_edit = QLineEdit(self) self.direct_entry_edit.installEventFilter(self) self.direct_entry_edit.setPlaceholderText( trans._('install by name/url, or drop file...') ) self.direct_entry_btn = QPushButton(trans._("Install"), self) self.direct_entry_btn.clicked.connect(self._install_packages) self.show_status_btn = QPushButton(trans._("Show Status"), self) self.show_status_btn.setFixedWidth(100) self.show_sorter_btn = QPushButton(trans._("<< Show Sorter"), self) self.close_btn = QPushButton(trans._("Close"), self) self.close_btn.clicked.connect(self.accept) buttonBox.addWidget(self.show_status_btn) buttonBox.addWidget(self.working_indicator) buttonBox.addWidget(self.direct_entry_edit) buttonBox.addWidget(self.direct_entry_btn) buttonBox.addWidget(self.process_error_indicator) buttonBox.addSpacing(60) buttonBox.addWidget(self.show_sorter_btn) buttonBox.addWidget(self.close_btn) buttonBox.setContentsMargins(0, 0, 4, 0) vlay_1.addLayout(buttonBox) self.show_status_btn.setCheckable(True) self.show_status_btn.setChecked(False) self.show_status_btn.toggled.connect(self._toggle_status) self.show_sorter_btn.setCheckable(True) self.show_sorter_btn.setChecked(False) self.show_sorter_btn.toggled.connect(self._toggle_sorter) self.v_splitter.setStretchFactor(1, 2) self.h_splitter.setStretchFactor(0, 2) self.avail_filter.setFocus() def _update_count_in_label(self): count = self.available_list.count() self.avail_label.setText( trans._("Available Plugins ({count})", count=count) ) def eventFilter(self, watched, event): if event.type() == QEvent.DragEnter: # we need to accept this event explicitly to be able # to receive QDropEvents! event.accept() if event.type() == QEvent.Drop: md = event.mimeData() if md.hasUrls(): files = [url.toLocalFile() for url in md.urls()] self.direct_entry_edit.setText(files[0]) return True return super().eventFilter(watched, event) def _toggle_sorter(self, show): if show: self.show_sorter_btn.setText(trans._(">> Hide Sorter")) self.plugin_sorter.show() else: self.show_sorter_btn.setText(trans._("<< Show Sorter")) self.plugin_sorter.hide() def _toggle_status(self, show): if show: self.show_status_btn.setText(trans._("Hide Status")) self.stdout_text.show() else: self.show_status_btn.setText(trans._("Show Status")) self.stdout_text.hide() def _install_packages(self, packages: Sequence[str] = ()): if not packages: _packages = self.direct_entry_edit.text() if os.path.exists(_packages): packages = [_packages] else: packages = _packages.split() self.direct_entry_edit.clear() if packages: self.installer.install(packages) if __name__ == "__main__": from qtpy.QtWidgets import QApplication app = QApplication([]) w = QtPluginDialog() w.show() app.exec_()
37.617375
79
0.640214
18,993
0.933271
0
0
1,838
0.090315
0
0
1,339
0.065795
8a112375ff4d16de8957c825f7c7971fdb15e0cc
1,179
py
Python
hata/discord/webhook/utils.py
WizzyBots/hata
f6991afc0bebf7dad932888a536f4d010f8663c7
[ "0BSD" ]
1
2022-03-02T03:59:57.000Z
2022-03-02T03:59:57.000Z
hata/discord/webhook/utils.py
m0nk3ybraindead/hata
f87ed3d7009eeae31d6ea158772efd33775c7b1c
[ "0BSD" ]
1
2022-02-08T16:54:39.000Z
2022-02-08T16:54:39.000Z
hata/discord/webhook/utils.py
WizzyBots/hata
f6991afc0bebf7dad932888a536f4d010f8663c7
[ "0BSD" ]
null
null
null
__all__ = ('create_partial_webhook_from_id', ) from scarletio import export from ..core import USERS from .preinstanced import WebhookType from .webhook import Webhook @export def create_partial_webhook_from_id(webhook_id, token, *, type_=WebhookType.bot, channel_id=0): """ Creates a partial webhook from the given parameters. If the webhook with the given `webhook_id` already exists, then returns that instead. Parameters ---------- webhook_id : `int` The identifier number of the webhook. token : `str` The token of the webhook. type_ : ``WebhookType`` = `WebhookType.bot`, Optional (Keyword only) The webhook's type. Defaults to `WebhookType.bot`. channel_id : `int` = `0`, Optional (Keyword only) The webhook's channel's identifier. Defaults to `0`. Returns ------- webhook : ``Webhook`` """ try: webhook = USERS[webhook_id] except KeyError: webhook = Webhook._create_empty(webhook_id) webhook.channel_id = channel_id webhook.type = type_ USERS[webhook_id] = webhook webhook.token = token return webhook
27.418605
115
0.653096
0
0
0
0
1,005
0.852417
0
0
648
0.549618
8a1292fe9e365e4f3b12243aeeeb62b3fcd34222
1,067
py
Python
MIT/600.1x - Introduction to Computer Science and Programming Using Python/Unit 4/Problem Set 4/get_word_score.py
henriqueumeda/-Python-study
28e93a377afa4732037a29eb74d4bc7c9e24b62f
[ "MIT" ]
null
null
null
MIT/600.1x - Introduction to Computer Science and Programming Using Python/Unit 4/Problem Set 4/get_word_score.py
henriqueumeda/-Python-study
28e93a377afa4732037a29eb74d4bc7c9e24b62f
[ "MIT" ]
null
null
null
MIT/600.1x - Introduction to Computer Science and Programming Using Python/Unit 4/Problem Set 4/get_word_score.py
henriqueumeda/-Python-study
28e93a377afa4732037a29eb74d4bc7c9e24b62f
[ "MIT" ]
null
null
null
SCRABBLE_LETTER_VALUES = { 'a': 1, 'b': 3, 'c': 3, 'd': 2, 'e': 1, 'f': 4, 'g': 2, 'h': 4, 'i': 1, 'j': 8, 'k': 5, 'l': 1, 'm': 3, 'n': 1, 'o': 1, 'p': 3, 'q': 10, 'r': 1, 's': 1, 't': 1, 'u': 1, 'v': 4, 'w': 4, 'x': 8, 'y': 4, 'z': 10 } def getWordScore(word, n): """ Returns the score for a word. Assumes the word is a valid word. The score for a word is the sum of the points for letters in the word, multiplied by the length of the word, PLUS 50 points if all n letters are used on the first turn. Letters are scored as in Scrabble; A is worth 1, B is worth 3, C is worth 3, D is worth 2, E is worth 1, and so on (see SCRABBLE_LETTER_VALUES) word: string (lowercase letters) n: integer (HAND_SIZE; i.e., hand size required for additional points) returns: int >= 0 """ total_points = 0 for letter in word: total_points += SCRABBLE_LETTER_VALUES[letter] total_points *= len(word) if len(word) == n: total_points += 50 return total_points print(getWordScore('waybill', 7))
35.566667
115
0.585754
0
0
0
0
0
0
0
0
636
0.596064
8a13575cd76b03c2660c0f973dca2598509c1205
34,179
py
Python
sdk/lusid/models/lusid_instrument.py
rizwansaeed/lusid-sdk-python-preview
52d092d6d4099b8526f0318f3fe1ddc0b943da6a
[ "MIT" ]
null
null
null
sdk/lusid/models/lusid_instrument.py
rizwansaeed/lusid-sdk-python-preview
52d092d6d4099b8526f0318f3fe1ddc0b943da6a
[ "MIT" ]
null
null
null
sdk/lusid/models/lusid_instrument.py
rizwansaeed/lusid-sdk-python-preview
52d092d6d4099b8526f0318f3fe1ddc0b943da6a
[ "MIT" ]
null
null
null
# coding: utf-8 """ LUSID API # Introduction This page documents the [LUSID APIs](https://www.lusid.com/api/swagger), which allows authorised clients to query and update their data within the LUSID platform. SDKs to interact with the LUSID APIs are available in the following languages : * [C#](https://github.com/finbourne/lusid-sdk-csharp) * [Java](https://github.com/finbourne/lusid-sdk-java) * [JavaScript](https://github.com/finbourne/lusid-sdk-js) * [Python](https://github.com/finbourne/lusid-sdk-python) # Data Model The LUSID API has a relatively lightweight but extremely powerful data model. One of the goals of LUSID was not to enforce on clients a single rigid data model but rather to provide a flexible foundation onto which clients can map their own data models. The core entities in LUSID provide a minimal structure and set of relationships, and the data model can be extended using Properties. The LUSID data model is exposed through the LUSID APIs. The APIs provide access to both business objects and the meta data used to configure the systems behaviours. The key business entities are: - * **Portfolios** A portfolio is a container for transactions and holdings (a **Transaction Portfolio**) or constituents (a **Reference Portfolio**). * **Derived Portfolios**. Derived Portfolios allow Portfolios to be created based on other Portfolios, by overriding or adding specific items. * **Holdings** A Holding is a quantity of an Instrument or a balance of cash within a Portfolio. Holdings can only be adjusted via Transactions. * **Transactions** A Transaction is an economic event that occurs in a Portfolio, causing its holdings to change. * **Corporate Actions** A corporate action is a market event which occurs to an Instrument and thus applies to all portfolios which holding the instrument. Examples are stock splits or mergers. * **Constituents** A constituent is a record in a Reference Portfolio containing an Instrument and an associated weight. * **Instruments** An instrument represents a currency, tradable instrument or OTC contract that is attached to a transaction and a holding. * **Properties** All major entities allow additional user defined properties to be associated with them. For example, a Portfolio manager may be associated with a portfolio. Meta data includes: - * **Transaction Types** Transactions are booked with a specific transaction type. The types are client defined and are used to map the Transaction to a series of movements which update the portfolio holdings. * **Properties Types** Types of user defined properties used within the system. ## Scope All data in LUSID is segregated at the client level. Entities in LUSID are identifiable by a unique code. Every entity lives within a logical data partition known as a Scope. Scope is an identity namespace allowing two entities with the same unique code to co-exist within individual address spaces. For example, prices for equities from different vendors may be uploaded into different scopes such as `client/vendor1` and `client/vendor2`. A portfolio may then be valued using either of the price sources by referencing the appropriate scope. LUSID Clients cannot access scopes of other clients. ## Instruments LUSID has its own built-in instrument master which you can use to master your own instrument universe. Every instrument must be created with one or more unique market identifiers, such as [FIGI](https://openfigi.com/). For any non-listed instruments (eg OTCs), you can upload an instrument against a custom ID of your choosing. In addition, LUSID will allocate each instrument a unique 'LUSID instrument identifier'. The LUSID instrument identifier is what is used when uploading transactions, holdings, prices, etc. The API exposes an `instrument/lookup` endpoint which can be used to lookup these LUSID identifiers using their market identifiers. Cash can be referenced using the ISO currency code prefixed with \"`CCY_`\" e.g. `CCY_GBP` ## Instrument Data Instrument data can be uploaded to the system using the [Instrument Properties](#tag/InstrumentProperties) endpoint. | Field|Type|Description | | ---|---|--- | | Key|propertykey|The key of the property. This takes the format {domain}/{scope}/{code} e.g. 'Instrument/system/Name' or 'Transaction/strategy/quantsignal'. | | Value|string|The value of the property. | | EffectiveFrom|datetimeoffset|The effective datetime from which the property is valid. | | EffectiveUntil|datetimeoffset|The effective datetime until which the property is valid. If not supplied this will be valid indefinitely, potentially overwriting values with EffectiveFrom's in the future. | ## Transaction Portfolios Portfolios are the top-level entity containers within LUSID, containing transactions, corporate actions and holdings. The transactions build up the portfolio holdings on which valuations, analytics profit & loss and risk can be calculated. Properties can be associated with Portfolios to add in additional data. Portfolio properties can be changed over time, for example to allow a Portfolio Manager to be linked with a Portfolio. Additionally, portfolios can be securitised and held by other portfolios, allowing LUSID to perform \"drill-through\" into underlying fund holdings ### Derived Portfolios LUSID also allows for a portfolio to be composed of another portfolio via derived portfolios. A derived portfolio can contain its own transactions and also inherits any transactions from its parent portfolio. Any changes made to the parent portfolio are automatically reflected in derived portfolio. Derived portfolios in conjunction with scopes are a powerful construct. For example, to do pre-trade what-if analysis, a derived portfolio could be created a new namespace linked to the underlying live (parent) portfolio. Analysis can then be undertaken on the derived portfolio without affecting the live portfolio. ### Transactions A transaction represents an economic activity against a Portfolio. Transactions are processed according to a configuration. This will tell the LUSID engine how to interpret the transaction and correctly update the holdings. LUSID comes with a set of transaction types you can use out of the box, or you can configure your own set(s) of transactions. For more details see the [LUSID Getting Started Guide for transaction configuration.](https://support.lusid.com/configuring-transaction-types) | Field|Type|Description | | ---|---|--- | | TransactionId|string|The unique identifier for the transaction. | | Type|string|The type of the transaction e.g. 'Buy', 'Sell'. The transaction type should have been pre-configured via the System Configuration API endpoint. If it hasn't been pre-configured the transaction will still be updated or inserted however you will be unable to generate the resultant holdings for the portfolio that contains this transaction as LUSID does not know how to process it. | | InstrumentIdentifiers|map|A set of instrument identifiers to use to resolve the transaction to a unique instrument. | | TransactionDate|dateorcutlabel|The date of the transaction. | | SettlementDate|dateorcutlabel|The settlement date of the transaction. | | Units|decimal|The number of units transacted in the associated instrument. | | TransactionPrice|transactionprice|The price for each unit of the transacted instrument in the transaction currency. | | TotalConsideration|currencyandamount|The total value of the transaction in the settlement currency. | | ExchangeRate|decimal|The exchange rate between the transaction and settlement currency. For example if the transaction currency is in USD and the settlement currency is in GBP this this the USD/GBP rate. | | TransactionCurrency|currency|The transaction currency. | | Properties|map|Set of unique transaction properties and associated values to store with the transaction. Each property must be from the 'Transaction' domain. | | CounterpartyId|string|The identifier for the counterparty of the transaction. | | Source|string|The source of the transaction. This is used to look up the appropriate transaction group set in the transaction type configuration. | From these fields, the following values can be calculated * **Transaction value in Transaction currency**: TotalConsideration / ExchangeRate * **Transaction value in Portfolio currency**: Transaction value in Transaction currency * TradeToPortfolioRate #### Example Transactions ##### A Common Purchase Example Three example transactions are shown in the table below. They represent a purchase of USD denominated IBM shares within a Sterling denominated portfolio. * The first two transactions are for separate buy and fx trades * Buying 500 IBM shares for $71,480.00 * A spot foreign exchange conversion to fund the IBM purchase. (Buy $71,480.00 for &#163;54,846.60) * The third transaction is an alternate version of the above trades. Buying 500 IBM shares and settling directly in Sterling. | Column | Buy Trade | Fx Trade | Buy Trade with foreign Settlement | | ----- | ----- | ----- | ----- | | TransactionId | FBN00001 | FBN00002 | FBN00003 | | Type | Buy | FxBuy | Buy | | InstrumentIdentifiers | { \"figi\", \"BBG000BLNNH6\" } | { \"CCY\", \"CCY_USD\" } | { \"figi\", \"BBG000BLNNH6\" } | | TransactionDate | 2018-08-02 | 2018-08-02 | 2018-08-02 | | SettlementDate | 2018-08-06 | 2018-08-06 | 2018-08-06 | | Units | 500 | 71480 | 500 | | TransactionPrice | 142.96 | 1 | 142.96 | | TradeCurrency | USD | USD | USD | | ExchangeRate | 1 | 0.7673 | 0.7673 | | TotalConsideration.Amount | 71480.00 | 54846.60 | 54846.60 | | TotalConsideration.Currency | USD | GBP | GBP | | Trade/default/TradeToPortfolioRate&ast; | 0.7673 | 0.7673 | 0.7673 | [&ast; This is a property field] ##### A Forward FX Example LUSID has a flexible transaction modelling system, meaning there are a number of different ways of modelling forward fx trades. The default LUSID transaction types are FwdFxBuy and FwdFxSell. Using these transaction types, LUSID will generate two holdings for each Forward FX trade, one for each currency in the trade. An example Forward Fx trade to sell GBP for USD in a JPY-denominated portfolio is shown below: | Column | Forward 'Sell' Trade | Notes | | ----- | ----- | ---- | | TransactionId | FBN00004 | | | Type | FwdFxSell | | | InstrumentIdentifiers | { \"Instrument/default/Currency\", \"GBP\" } | | | TransactionDate | 2018-08-02 | | | SettlementDate | 2019-02-06 | Six month forward | | Units | 10000.00 | Units of GBP | | TransactionPrice | 1 | | | TradeCurrency | GBP | Currency being sold | | ExchangeRate | 1.3142 | Agreed rate between GBP and USD | | TotalConsideration.Amount | 13142.00 | Amount in the settlement currency, USD | | TotalConsideration.Currency | USD | Settlement currency | | Trade/default/TradeToPortfolioRate | 142.88 | Rate between trade currency, GBP and portfolio base currency, JPY | Please note that exactly the same economic behaviour could be modelled using the FwdFxBuy Transaction Type with the amounts and rates reversed. ### Holdings A holding represents a position in an instrument or cash on a given date. | Field|Type|Description | | ---|---|--- | | InstrumentUid|string|The unqiue Lusid Instrument Id (LUID) of the instrument that the holding is in. | | SubHoldingKeys|map|The sub-holding properties which identify the holding. Each property will be from the 'Transaction' domain. These are configured when a transaction portfolio is created. | | Properties|map|The properties which have been requested to be decorated onto the holding. These will be from the 'Instrument' or 'Holding' domain. | | HoldingType|string|The type of the holding e.g. Position, Balance, CashCommitment, Receivable, ForwardFX etc. | | Units|decimal|The total number of units of the holding. | | SettledUnits|decimal|The total number of settled units of the holding. | | Cost|currencyandamount|The total cost of the holding in the transaction currency. | | CostPortfolioCcy|currencyandamount|The total cost of the holding in the portfolio currency. | | Transaction|transaction|The transaction associated with an unsettled holding. | ## Corporate Actions Corporate actions are represented within LUSID in terms of a set of instrument-specific 'transitions'. These transitions are used to specify the participants of the corporate action, and the effect that the corporate action will have on holdings in those participants. ### Corporate Action | Field|Type|Description | | ---|---|--- | | CorporateActionCode|code|The unique identifier of this corporate action | | Description|string| | | AnnouncementDate|datetimeoffset|The announcement date of the corporate action | | ExDate|datetimeoffset|The ex date of the corporate action | | RecordDate|datetimeoffset|The record date of the corporate action | | PaymentDate|datetimeoffset|The payment date of the corporate action | | Transitions|corporateactiontransition[]|The transitions that result from this corporate action | ### Transition | Field|Type|Description | | ---|---|--- | | InputTransition|corporateactiontransitioncomponent|Indicating the basis of the corporate action - which security and how many units | | OutputTransitions|corporateactiontransitioncomponent[]|What will be generated relative to the input transition | ### Example Corporate Action Transitions #### A Dividend Action Transition In this example, for each share of IBM, 0.20 units (or 20 pence) of GBP are generated. | Column | Input Transition | Output Transition | | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"ccy\" : \"CCY_GBP\" } | | Units Factor | 1 | 0.20 | | Cost Factor | 1 | 0 | #### A Split Action Transition In this example, for each share of IBM, we end up with 2 units (2 shares) of IBM, with total value unchanged. | Column | Input Transition | Output Transition | | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000BLNNH6\" } | | Units Factor | 1 | 2 | | Cost Factor | 1 | 1 | #### A Spinoff Action Transition In this example, for each share of IBM, we end up with 1 unit (1 share) of IBM and 3 units (3 shares) of Celestica, with 85% of the value remaining on the IBM share, and 5% in each Celestica share (15% total). | Column | Input Transition | Output Transition 1 | Output Transition 2 | | ----- | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000HBGRF3\" } | | Units Factor | 1 | 1 | 3 | | Cost Factor | 1 | 0.85 | 0.15 | ## Reference Portfolios Reference portfolios are portfolios that contain constituents with weights. They are designed to represent entities such as indices and benchmarks. ### Constituents | Field|Type|Description | | ---|---|--- | | InstrumentIdentifiers|map|Unique instrument identifiers | | InstrumentUid|string|LUSID's internal unique instrument identifier, resolved from the instrument identifiers | | Currency|decimal| | | Weight|decimal| | | FloatingWeight|decimal| | ## Portfolio Groups Portfolio groups allow the construction of a hierarchy from portfolios and groups. Portfolio operations on the group are executed on an aggregated set of portfolios in the hierarchy. For example: * Global Portfolios _(group)_ * APAC _(group)_ * Hong Kong _(portfolio)_ * Japan _(portfolio)_ * Europe _(group)_ * France _(portfolio)_ * Germany _(portfolio)_ * UK _(portfolio)_ In this example **Global Portfolios** is a group that consists of an aggregate of **Hong Kong**, **Japan**, **France**, **Germany** and **UK** portfolios. ## Properties Properties are key-value pairs that can be applied to any entity within a domain (where a domain is `trade`, `portfolio`, `security` etc). Properties must be defined before use with a `PropertyDefinition` and can then subsequently be added to entities. ## Schema A detailed description of the entities used by the API and parameters for endpoints which take a JSON document can be retrieved via the `schema` endpoint. ## Meta data The following headers are returned on all responses from LUSID | Name | Purpose | | --- | --- | | lusid-meta-duration | Duration of the request | | lusid-meta-success | Whether or not LUSID considered the request to be successful | | lusid-meta-requestId | The unique identifier for the request | | lusid-schema-url | Url of the schema for the data being returned | | lusid-property-schema-url | Url of the schema for any properties | # Error Codes | Code|Name|Description | | ---|---|--- | | <a name=\"-10\">-10</a>|Server Configuration Error| | | <a name=\"-1\">-1</a>|Unknown error|An unexpected error was encountered on our side. | | <a name=\"102\">102</a>|Version Not Found| | | <a name=\"103\">103</a>|Api Rate Limit Violation| | | <a name=\"104\">104</a>|Instrument Not Found| | | <a name=\"105\">105</a>|Property Not Found| | | <a name=\"106\">106</a>|Portfolio Recursion Depth| | | <a name=\"108\">108</a>|Group Not Found| | | <a name=\"109\">109</a>|Portfolio Not Found| | | <a name=\"110\">110</a>|Property Schema Not Found| | | <a name=\"111\">111</a>|Portfolio Ancestry Not Found| | | <a name=\"112\">112</a>|Portfolio With Id Already Exists| | | <a name=\"113\">113</a>|Orphaned Portfolio| | | <a name=\"119\">119</a>|Missing Base Claims| | | <a name=\"121\">121</a>|Property Not Defined| | | <a name=\"122\">122</a>|Cannot Delete System Property| | | <a name=\"123\">123</a>|Cannot Modify Immutable Property Field| | | <a name=\"124\">124</a>|Property Already Exists| | | <a name=\"125\">125</a>|Invalid Property Life Time| | | <a name=\"126\">126</a>|Property Constraint Style Excludes Properties| | | <a name=\"127\">127</a>|Cannot Modify Default Data Type| | | <a name=\"128\">128</a>|Group Already Exists| | | <a name=\"129\">129</a>|No Such Data Type| | | <a name=\"130\">130</a>|Undefined Value For Data Type| | | <a name=\"131\">131</a>|Unsupported Value Type Defined On Data Type| | | <a name=\"132\">132</a>|Validation Error| | | <a name=\"133\">133</a>|Loop Detected In Group Hierarchy| | | <a name=\"134\">134</a>|Undefined Acceptable Values| | | <a name=\"135\">135</a>|Sub Group Already Exists| | | <a name=\"138\">138</a>|Price Source Not Found| | | <a name=\"139\">139</a>|Analytic Store Not Found| | | <a name=\"141\">141</a>|Analytic Store Already Exists| | | <a name=\"143\">143</a>|Client Instrument Already Exists| | | <a name=\"144\">144</a>|Duplicate In Parameter Set| | | <a name=\"147\">147</a>|Results Not Found| | | <a name=\"148\">148</a>|Order Field Not In Result Set| | | <a name=\"149\">149</a>|Operation Failed| | | <a name=\"150\">150</a>|Elastic Search Error| | | <a name=\"151\">151</a>|Invalid Parameter Value| | | <a name=\"153\">153</a>|Command Processing Failure| | | <a name=\"154\">154</a>|Entity State Construction Failure| | | <a name=\"155\">155</a>|Entity Timeline Does Not Exist| | | <a name=\"156\">156</a>|Concurrency Conflict Failure| | | <a name=\"157\">157</a>|Invalid Request| | | <a name=\"158\">158</a>|Event Publish Unknown| | | <a name=\"159\">159</a>|Event Query Failure| | | <a name=\"160\">160</a>|Blob Did Not Exist| | | <a name=\"162\">162</a>|Sub System Request Failure| | | <a name=\"163\">163</a>|Sub System Configuration Failure| | | <a name=\"165\">165</a>|Failed To Delete| | | <a name=\"166\">166</a>|Upsert Client Instrument Failure| | | <a name=\"167\">167</a>|Illegal As At Interval| | | <a name=\"168\">168</a>|Illegal Bitemporal Query| | | <a name=\"169\">169</a>|Invalid Alternate Id| | | <a name=\"170\">170</a>|Cannot Add Source Portfolio Property Explicitly| | | <a name=\"171\">171</a>|Entity Already Exists In Group| | | <a name=\"173\">173</a>|Entity With Id Already Exists| | | <a name=\"174\">174</a>|Derived Portfolio Details Do Not Exist| | | <a name=\"176\">176</a>|Portfolio With Name Already Exists| | | <a name=\"177\">177</a>|Invalid Transactions| | | <a name=\"178\">178</a>|Reference Portfolio Not Found| | | <a name=\"179\">179</a>|Duplicate Id| | | <a name=\"180\">180</a>|Command Retrieval Failure| | | <a name=\"181\">181</a>|Data Filter Application Failure| | | <a name=\"182\">182</a>|Search Failed| | | <a name=\"183\">183</a>|Movements Engine Configuration Key Failure| | | <a name=\"184\">184</a>|Fx Rate Source Not Found| | | <a name=\"185\">185</a>|Accrual Source Not Found| | | <a name=\"186\">186</a>|Access Denied| | | <a name=\"187\">187</a>|Invalid Identity Token| | | <a name=\"188\">188</a>|Invalid Request Headers| | | <a name=\"189\">189</a>|Price Not Found| | | <a name=\"190\">190</a>|Invalid Sub Holding Keys Provided| | | <a name=\"191\">191</a>|Duplicate Sub Holding Keys Provided| | | <a name=\"192\">192</a>|Cut Definition Not Found| | | <a name=\"193\">193</a>|Cut Definition Invalid| | | <a name=\"194\">194</a>|Time Variant Property Deletion Date Unspecified| | | <a name=\"195\">195</a>|Perpetual Property Deletion Date Specified| | | <a name=\"196\">196</a>|Time Variant Property Upsert Date Unspecified| | | <a name=\"197\">197</a>|Perpetual Property Upsert Date Specified| | | <a name=\"200\">200</a>|Invalid Unit For Data Type| | | <a name=\"201\">201</a>|Invalid Type For Data Type| | | <a name=\"202\">202</a>|Invalid Value For Data Type| | | <a name=\"203\">203</a>|Unit Not Defined For Data Type| | | <a name=\"204\">204</a>|Units Not Supported On Data Type| | | <a name=\"205\">205</a>|Cannot Specify Units On Data Type| | | <a name=\"206\">206</a>|Unit Schema Inconsistent With Data Type| | | <a name=\"207\">207</a>|Unit Definition Not Specified| | | <a name=\"208\">208</a>|Duplicate Unit Definitions Specified| | | <a name=\"209\">209</a>|Invalid Units Definition| | | <a name=\"210\">210</a>|Invalid Instrument Identifier Unit| | | <a name=\"211\">211</a>|Holdings Adjustment Does Not Exist| | | <a name=\"212\">212</a>|Could Not Build Excel Url| | | <a name=\"213\">213</a>|Could Not Get Excel Version| | | <a name=\"214\">214</a>|Instrument By Code Not Found| | | <a name=\"215\">215</a>|Entity Schema Does Not Exist| | | <a name=\"216\">216</a>|Feature Not Supported On Portfolio Type| | | <a name=\"217\">217</a>|Quote Not Found| | | <a name=\"218\">218</a>|Invalid Quote Identifier| | | <a name=\"219\">219</a>|Invalid Metric For Data Type| | | <a name=\"220\">220</a>|Invalid Instrument Definition| | | <a name=\"221\">221</a>|Instrument Upsert Failure| | | <a name=\"222\">222</a>|Reference Portfolio Request Not Supported| | | <a name=\"223\">223</a>|Transaction Portfolio Request Not Supported| | | <a name=\"224\">224</a>|Invalid Property Value Assignment| | | <a name=\"230\">230</a>|Transaction Type Not Found| | | <a name=\"231\">231</a>|Transaction Type Duplication| | | <a name=\"232\">232</a>|Portfolio Does Not Exist At Given Date| | | <a name=\"233\">233</a>|Query Parser Failure| | | <a name=\"234\">234</a>|Duplicate Constituent| | | <a name=\"235\">235</a>|Unresolved Instrument Constituent| | | <a name=\"236\">236</a>|Unresolved Instrument In Transition| | | <a name=\"237\">237</a>|Missing Side Definitions| | | <a name=\"299\">299</a>|Invalid Recipe| | | <a name=\"300\">300</a>|Missing Recipe| | | <a name=\"301\">301</a>|Dependencies| | | <a name=\"304\">304</a>|Portfolio Preprocess Failure| | | <a name=\"310\">310</a>|Valuation Engine Failure| | | <a name=\"311\">311</a>|Task Factory Failure| | | <a name=\"312\">312</a>|Task Evaluation Failure| | | <a name=\"313\">313</a>|Task Generation Failure| | | <a name=\"314\">314</a>|Engine Configuration Failure| | | <a name=\"315\">315</a>|Model Specification Failure| | | <a name=\"320\">320</a>|Market Data Key Failure| | | <a name=\"321\">321</a>|Market Resolver Failure| | | <a name=\"322\">322</a>|Market Data Failure| | | <a name=\"330\">330</a>|Curve Failure| | | <a name=\"331\">331</a>|Volatility Surface Failure| | | <a name=\"332\">332</a>|Volatility Cube Failure| | | <a name=\"350\">350</a>|Instrument Failure| | | <a name=\"351\">351</a>|Cash Flows Failure| | | <a name=\"352\">352</a>|Reference Data Failure| | | <a name=\"360\">360</a>|Aggregation Failure| | | <a name=\"361\">361</a>|Aggregation Measure Failure| | | <a name=\"370\">370</a>|Result Retrieval Failure| | | <a name=\"371\">371</a>|Result Processing Failure| | | <a name=\"372\">372</a>|Vendor Result Processing Failure| | | <a name=\"373\">373</a>|Vendor Result Mapping Failure| | | <a name=\"374\">374</a>|Vendor Library Unauthorised| | | <a name=\"375\">375</a>|Vendor Connectivity Error| | | <a name=\"376\">376</a>|Vendor Interface Error| | | <a name=\"377\">377</a>|Vendor Pricing Failure| | | <a name=\"378\">378</a>|Vendor Translation Failure| | | <a name=\"379\">379</a>|Vendor Key Mapping Failure| | | <a name=\"380\">380</a>|Vendor Reflection Failure| | | <a name=\"390\">390</a>|Attempt To Upsert Duplicate Quotes| | | <a name=\"391\">391</a>|Corporate Action Source Does Not Exist| | | <a name=\"392\">392</a>|Corporate Action Source Already Exists| | | <a name=\"393\">393</a>|Instrument Identifier Already In Use| | | <a name=\"394\">394</a>|Properties Not Found| | | <a name=\"395\">395</a>|Batch Operation Aborted| | | <a name=\"400\">400</a>|Invalid Iso4217 Currency Code| | | <a name=\"401\">401</a>|Cannot Assign Instrument Identifier To Currency| | | <a name=\"402\">402</a>|Cannot Assign Currency Identifier To Non Currency| | | <a name=\"403\">403</a>|Currency Instrument Cannot Be Deleted| | | <a name=\"404\">404</a>|Currency Instrument Cannot Have Economic Definition| | | <a name=\"405\">405</a>|Currency Instrument Cannot Have Lookthrough Portfolio| | | <a name=\"406\">406</a>|Cannot Create Currency Instrument With Multiple Identifiers| | | <a name=\"407\">407</a>|Specified Currency Is Undefined| | | <a name=\"410\">410</a>|Index Does Not Exist| | | <a name=\"411\">411</a>|Sort Field Does Not Exist| | | <a name=\"413\">413</a>|Negative Pagination Parameters| | | <a name=\"414\">414</a>|Invalid Search Syntax| | | <a name=\"415\">415</a>|Filter Execution Timeout| | | <a name=\"420\">420</a>|Side Definition Inconsistent| | | <a name=\"450\">450</a>|Invalid Quote Access Metadata Rule| | | <a name=\"451\">451</a>|Access Metadata Not Found| | | <a name=\"452\">452</a>|Invalid Access Metadata Identifier| | | <a name=\"460\">460</a>|Standard Resource Not Found| | | <a name=\"461\">461</a>|Standard Resource Conflict| | | <a name=\"462\">462</a>|Calendar Not Found| | | <a name=\"463\">463</a>|Date In A Calendar Not Found| | | <a name=\"464\">464</a>|Invalid Date Source Data| | | <a name=\"465\">465</a>|Invalid Timezone| | | <a name=\"601\">601</a>|Person Identifier Already In Use| | | <a name=\"602\">602</a>|Person Not Found| | | <a name=\"603\">603</a>|Cannot Set Identifier| | | <a name=\"617\">617</a>|Invalid Recipe Specification In Request| | | <a name=\"618\">618</a>|Inline Recipe Deserialisation Failure| | | <a name=\"619\">619</a>|Identifier Types Not Set For Entity| | | <a name=\"620\">620</a>|Cannot Delete All Client Defined Identifiers| | | <a name=\"650\">650</a>|The Order requested was not found.| | | <a name=\"654\">654</a>|The Allocation requested was not found.| | | <a name=\"655\">655</a>|Cannot build the fx forward target with the given holdings.| | | <a name=\"656\">656</a>|Group does not contain expected entities.| | | <a name=\"667\">667</a>|Relation definition already exists| | | <a name=\"673\">673</a>|Missing entitlements for entities in Group| | | <a name=\"674\">674</a>|Next Best Action not found| | | <a name=\"676\">676</a>|Relation definition not defined| | | <a name=\"677\">677</a>|Invalid entity identifier for relation| | | <a name=\"681\">681</a>|Sorting by specified field not supported|One or more of the provided fields to order by were either invalid or not supported. | | <a name=\"682\">682</a>|Too many fields to sort by|The number of fields to sort the data by exceeds the number allowed by the endpoint | | <a name=\"684\">684</a>|Sequence Not Found| | | <a name=\"685\">685</a>|Sequence Already Exists| | | <a name=\"686\">686</a>|Non-cycling sequence has been exhausted| | | <a name=\"687\">687</a>|Legal Entity Identifier Already In Use| | | <a name=\"688\">688</a>|Legal Entity Not Found| | | <a name=\"689\">689</a>|The supplied pagination token is invalid| | | <a name=\"690\">690</a>|Property Type Is Not Supported| | | <a name=\"691\">691</a>|Multiple Tax-lots For Currency Type Is Not Supported| | # noqa: E501 The version of the OpenAPI document: 0.11.2275 Contact: [email protected] Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six class LusidInstrument(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. required_map (dict): The key is attribute name and the value is whether it is 'required' or 'optional'. """ openapi_types = { 'instrument_type': 'str' } attribute_map = { 'instrument_type': 'instrumentType' } required_map = { 'instrument_type': 'required' } discriminator_value_class_map = { 'EquityOption': 'EquityOption', 'InstrumentLeg': 'InstrumentLeg', 'InterestRateSwaption': 'InterestRateSwaption', 'FxForward': 'FxForward', 'InterestRateSwap': 'InterestRateSwap', 'ExoticInstrument': 'ExoticInstrument', 'FxOption': 'FxOption', 'Bond': 'Bond', 'TermDeposit': 'TermDeposit', 'CreditDefaultSwap': 'CreditDefaultSwap', 'Future': 'Future' } def __init__(self, instrument_type=None): # noqa: E501 """ LusidInstrument - a model defined in OpenAPI :param instrument_type: The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashflowLeg, Unknown, TermDeposit (required) :type instrument_type: str """ # noqa: E501 self._instrument_type = None self.discriminator = 'instrument_type' self.instrument_type = instrument_type @property def instrument_type(self): """Gets the instrument_type of this LusidInstrument. # noqa: E501 The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashflowLeg, Unknown, TermDeposit # noqa: E501 :return: The instrument_type of this LusidInstrument. # noqa: E501 :rtype: str """ return self._instrument_type @instrument_type.setter def instrument_type(self, instrument_type): """Sets the instrument_type of this LusidInstrument. The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashflowLeg, Unknown, TermDeposit # noqa: E501 :param instrument_type: The instrument_type of this LusidInstrument. # noqa: E501 :type: str """ if instrument_type is None: raise ValueError("Invalid value for `instrument_type`, must not be `None`") # noqa: E501 allowed_values = ["QuotedSecurity", "InterestRateSwap", "FxForward", "Future", "ExoticInstrument", "FxOption", "CreditDefaultSwap", "InterestRateSwaption", "Bond", "EquityOption", "FixedLeg", "FloatingLeg", "BespokeCashflowLeg", "Unknown", "TermDeposit"] # noqa: E501 if instrument_type not in allowed_values: raise ValueError( "Invalid value for `instrument_type` ({0}), must be one of {1}" # noqa: E501 .format(instrument_type, allowed_values) ) self._instrument_type = instrument_type def get_real_child_model(self, data): """Returns the real base class specified by the discriminator""" discriminator_key = self.attribute_map[self.discriminator] discriminator_value = data[discriminator_key] return self.discriminator_value_class_map.get(discriminator_value) def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, LusidInstrument): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
221.941558
28,647
0.692179
5,304
0.155183
0
0
1,735
0.050762
0
0
31,720
0.928055
8a13a931088f76e07468fa49084284d44b5cf0eb
936
py
Python
autolatex-master/exemplos_codigo/certificados/certificados.py
luizgui05/autolatex.
366eb3d88b7e60c119737f958e35cce99e8775e9
[ "MIT" ]
null
null
null
autolatex-master/exemplos_codigo/certificados/certificados.py
luizgui05/autolatex.
366eb3d88b7e60c119737f958e35cce99e8775e9
[ "MIT" ]
null
null
null
autolatex-master/exemplos_codigo/certificados/certificados.py
luizgui05/autolatex.
366eb3d88b7e60c119737f958e35cce99e8775e9
[ "MIT" ]
null
null
null
import os import sys import sqlite3 con = None filename = 'certificado' # Abrir banco de dados para ler nomes. try: con = sqlite3.connect('math.db') cur = con.cursor() cur.execute('select * from math') data = cur.fetchall() except sqlite3.Error, e: print "Error %s:" % e.args[0] sys.exit(1) finally: if con: con.close() # Gerar um certificado para cada nome. for row in data: f = open(filename+'.tex','r+') old = f.readlines() if old[0][1:4] == 'def': offset = 1 else: offset = 0 f.seek(0) f.write('\\def\\name {'+row[0]+'}\n') f.writelines(old[offset:]) f.close() # Compilar arquivo LaTeX try: os.system('pdflatex '+filename+'.tex') os.system('mv '+filename+'.pdf '+filename+'_'+row[0].replace(' ','_')+'.pdf') #os.system('xdg-open '+filename+'.pdf &') except OSError: print('LaTeX not installed.')
20.8
85
0.569444
0
0
0
0
0
0
0
0
295
0.315171
8a13fa0bd9273ba0ef6fc5a2231a5c8269835d8e
280
py
Python
nanoepiseg/main_list_chunks.py
snajder-r/nanoepiseg
2fe36a82e5b899330da5db6559eb45fe12cad37c
[ "MIT" ]
null
null
null
nanoepiseg/main_list_chunks.py
snajder-r/nanoepiseg
2fe36a82e5b899330da5db6559eb45fe12cad37c
[ "MIT" ]
null
null
null
nanoepiseg/main_list_chunks.py
snajder-r/nanoepiseg
2fe36a82e5b899330da5db6559eb45fe12cad37c
[ "MIT" ]
null
null
null
from pathlib import Path from meth5.meth5 import MetH5File def main(m5file:Path, chunk_size:int, quiet:bool): with MetH5File(m5file, "r", chunk_size=chunk_size) as f: for chrom in f.get_chromosomes(): print(f"{chrom}: {f[chrom].get_number_of_chunks()}")
31.111111
64
0.696429
0
0
0
0
0
0
0
0
48
0.171429
8a14e512e0f7f79c5bcbfd4af00b8cc29f035958
6,376
py
Python
qscatv2/make_seasonal_images.py
tmilliman/sir_to_netcdf
d4641cdc5a9e92a55c0edb2dc6cd8c0e2da6f1fa
[ "MIT" ]
null
null
null
qscatv2/make_seasonal_images.py
tmilliman/sir_to_netcdf
d4641cdc5a9e92a55c0edb2dc6cd8c0e2da6f1fa
[ "MIT" ]
null
null
null
qscatv2/make_seasonal_images.py
tmilliman/sir_to_netcdf
d4641cdc5a9e92a55c0edb2dc6cd8c0e2da6f1fa
[ "MIT" ]
null
null
null
#!/usr/bin/env python # script to make seasonal means and stddev images of 4-day sig0 # values. import os import sys import glob import numpy as np import sirpy2 as sp2 import argparse from osgeo import gdal DATADIR = "./" NODATA_VALUE = -9999.0 Q2M = { "JAS": list(range(7, 10)), "OND": list(range(10, 13)), "JFM": list(range(1, 4)), "AMJ": list(range(4, 7)), } # this allows GDAL to throw Python Exceptions gdal.UseExceptions() def db2pr(dbvalue): pr = 10 ** (dbvalue / 10.0) return pr if __name__ == "__main__": # set up arguments parser = argparse.ArgumentParser( "script to make quarterly " + "means and stdevs of qscat dB values" ) parser.add_argument( "-v", "--verbose", help="increase output verbosity", action="store_true", default=False, ) parser.add_argument( "-q", "--quarter", nargs="?", choices=("JAS", "OND", "JFM", "AMJ"), default="JAS", const="JAS", help="Quarter for aggregation. Default=JAS", ) parser.add_argument("region", help="BYU region string (e.g. SAm, NAm, Ama, etc.)") parser.add_argument( "year", type=int, help="Year e.g. 1999 (qscat data start in 1999)" ) args = parser.parse_args() verbose = args.verbose year = args.year quarter = args.quarter # region list (LAEA regions only) valid_region_list = [ "Grn", "Ala", "CAm", "NAm", "SAm", "NAf", "SAf", "Sib", "Eur", "SAs", "ChJ", "Ind", "Aus", "Ber", ] region = args.region try: region_index = valid_region_list.index(region) except Exception: sys.stderr.write("Region not valid.\n") sys.stderr.write("Valid regions are:\n") sys.stderr.write("{}\n".format(valid_region_list)) sys.exit(1) if verbose: print("region: {}".format(region)) print("year: {}".format(year)) print("quarter: {}".format(quarter)) # set data dir indir = os.path.join(DATADIR, "geotiffs", region, str(year)) outdir = indir if year == 1999: year2 = 99 else: year2 = "{:02d}".format(year - 2000) monthlist = Q2M[quarter] # make a list of files for this year filepatt = "quev-a-{}{}-*.tif".format(region, year2) globpatt = os.path.join(indir, filepatt) if verbose: print("glob pattern: {}".format(globpatt)) filelist = glob.glob(globpatt) qlist = [] for filepath in filelist: fn = os.path.basename(filepath) if verbose: print(fn) fn_dt = sp2.fn2dt(fn, date_flag="center") iyear = fn_dt.year imonth = fn_dt.month iday = fn_dt.day if imonth in monthlist: qlist.append(fn) if verbose: print("{}: {}-{}-{}".format(fn, iyear, imonth, iday)) print("{}-{}: {}".format(year, quarter, qlist)) if len(qlist) == 0: warnmsg = "No images found for this quarter.\n" sys.stdout.write(warnmsg) sys.exit(0) # loop over images for this quarter db_quarter = [] for i, image in enumerate(qlist): a_imgpath = os.path.join(indir, image) try: a_ds = gdal.Open(a_imgpath) except Exception: print("Unable to open {}".format(a_imgpath)) sys.exit(1) try: srcband = a_ds.GetRasterBand(1) except Exception: print("Band ({}) not found".format(1)) sys.exit(1) a_data = srcband.ReadAsArray() a_mask = a_data == NODATA_VALUE # if this is the first image get projection and geotransform if i == 0: prj = a_ds.GetProjection() gt = a_ds.GetGeoTransform() ny, nx = a_data.shape db_data = a_data db_masked = np.ma.MaskedArray(db_data, a_mask) # add image to db_quarter list db_quarter.append(db_masked) # close datasets a_ds = None # stack list into array and find mean and std dbarray = np.ma.stack(db_quarter, axis=2) dbmean = np.ma.mean(dbarray, axis=2) dbstd = np.ma.std(dbarray, axis=2) print(dbmean.shape) # finally, save as a geotiff output_format = "GTiff" driver = gdal.GetDriverByName(output_format) dst_filename = "{}-quev-mean-db-{}-{}.tif" dst_filename = dst_filename.format(region, year, quarter) dst_dir = os.path.join(DATADIR, "geotiffs", region, str(year)) dst_path = os.path.join(dst_dir, dst_filename) if verbose: print("Output file for sig0 means: {}".format(dst_path)) dst_ds = driver.Create(dst_path, nx, ny, 1, gdal.GDT_Float32) dst_data = np.ma.filled(dbmean, fill_value=NODATA_VALUE) dst_ds.GetRasterBand(1).WriteArray(dst_data) dst_ds.GetRasterBand(1).SetNoDataValue(NODATA_VALUE) print("gt: {}".format(gt)) dst_ds.SetGeoTransform(gt) dst_ds.SetProjection(prj) dst_ds = None dbmean_min = dbmean.min() dbmean_max = dbmean.max() dbmean_median = np.ma.median(dbmean) print("Quarterly ({}) Mean Stats".format(quarter)) print(" Min: {}".format(dbmean_min)) print(" Max: {}".format(dbmean_max)) print(" Median: {}".format(dbmean_median)) # repeat for standard deviation output_format = "GTiff" driver = gdal.GetDriverByName(output_format) dst_filename = "{}-quev-std-db-{}-{}.tif".format(region, year, quarter) dst_dir = os.path.join(DATADIR, "geotiffs", region, str(year)) dst_path = os.path.join(dst_dir, dst_filename) if verbose: print("Output file: {}".format(dst_path)) dst_ds = driver.Create(dst_path, nx, ny, 1, gdal.GDT_Float32) dst_data = np.ma.filled(dbstd, fill_value=NODATA_VALUE) dst_ds.GetRasterBand(1).WriteArray(dst_data) dst_ds.GetRasterBand(1).SetNoDataValue(NODATA_VALUE) print("gt: {}".format(gt)) dst_ds.SetGeoTransform(gt) dst_ds.SetProjection(prj) dst_ds = None dbstd_min = dbstd.min() dbstd_max = dbstd.max() dbstd_median = np.ma.median(dbstd) print("Quarterly ({}) Stdev Stats".format(quarter)) print(" Min: {}".format(dbstd_min)) print(" Max: {}".format(dbstd_max)) print(" Median: {}".format(dbstd_median))
27.601732
86
0.592848
0
0
0
0
0
0
0
0
1,423
0.223181
8a15ab57e7398ab067062419a83d15fd9bf34d36
434
py
Python
ex062.py
noahbarros/Python-Exercises
fafda898473bc984280e201ed11d8ad76cc8624a
[ "MIT" ]
1
2021-07-13T21:41:00.000Z
2021-07-13T21:41:00.000Z
ex062.py
noahbarros/Python-Exercises
fafda898473bc984280e201ed11d8ad76cc8624a
[ "MIT" ]
null
null
null
ex062.py
noahbarros/Python-Exercises
fafda898473bc984280e201ed11d8ad76cc8624a
[ "MIT" ]
null
null
null
primeiro = int(input('Digite o priemiro termo da PA: ')) razão = int(input('Digite a razão da PA: ')) termo = primeiro cont = 1 total = 0 mais = 10 while mais != 0: total += mais while cont <= total: print(f'{termo} ', end='') termo += razão cont += 1 print('Pausa') mais = int(input('Quantos termos você quer usar a mais? ')) print(f'a progressão foi finalizada com {total} termos mostrados')
27.125
66
0.612903
0
0
0
0
0
0
0
0
179
0.407745
8a16b528f332e28d501ffe602ae57113af02e27c
3,720
py
Python
arxml_data_extractor/handler/object_handler.py
Brokdar/ArxmlDataExtractor
2853112cbd4d001418b11ccb99f1db268347dfab
[ "MIT" ]
16
2020-08-16T09:13:35.000Z
2022-03-17T13:39:26.000Z
arxml_data_extractor/handler/object_handler.py
Brokdar/ArxmlDataExtractor
2853112cbd4d001418b11ccb99f1db268347dfab
[ "MIT" ]
null
null
null
arxml_data_extractor/handler/object_handler.py
Brokdar/ArxmlDataExtractor
2853112cbd4d001418b11ccb99f1db268347dfab
[ "MIT" ]
2
2020-10-14T10:54:37.000Z
2021-07-06T01:30:44.000Z
from lxml.etree import Element, QName from typing import Union, List, Any from tqdm import tqdm import logging from arxml_data_extractor.handler import value_handler from arxml_data_extractor.handler.path_handler import PathHandler from arxml_data_extractor.asr.asr_parser import AsrParser from arxml_data_extractor.query.data_query import DataQuery from arxml_data_extractor.query.data_object import DataObject from arxml_data_extractor.query.data_value import DataValue class ObjectHandler(): def __init__(self, parser: AsrParser): self.logger = logging.getLogger() self.path_handler = PathHandler(parser) def handle(self, data_object: DataObject, node: Element = None) -> Union[list, dict]: is_not_root = True if node is None: is_not_root = False node = self.path_handler.parser.root if is_not_root: self.logger.info(f'ObjectHandler - handle DataObject(\'{data_object.name}\')') else: self.logger.info(f'ObjectHandler - [root] handle DataObject(\'{data_object.name}\')') values = [] elements = self.path_handler.elements_by_path(data_object.path, node) for element in tqdm( elements, desc=f'Handle DataObject(\'{data_object.name}\')', disable=is_not_root, bar_format="{desc:<70}{percentage:3.0f}% |{bar:70}| {n_fmt:>4}/{total_fmt}"): if element is not None: self.logger.info( f'ObjectHandler - element found: \'{QName(element).localname}\' at line {element.sourceline - 1}' ) values.append(self.__handle_values(data_object.values, element)) if not values: self.logger.warning( f'ObjectHandler - no values found for DataObject(\'{data_object.name}\')') else: self.logger.info( f'ObjectHandler - values found for DataObject(\'{data_object.name}\'): {len(values)}' ) return values[0] if len(values) == 1 else values def __handle_values(self, values: List[Union[DataValue, DataObject]], node: Element) -> dict: results = {} for value in values: if isinstance(value, DataObject): results[value.name] = self.handle(value, node) elif isinstance(value, DataValue): results[value.name] = self.__handle_value(value.query, node) if results[value.name] is None: self.logger.info( f'ObjectHandler - no value found for DataValue(\'{value.name}\')') else: self.logger.info( f'ObjectHandler - value found: DataValue(\'{value.name}\') = \'{results[value.name]}\'' ) else: error = f'ObjectHandler - invalid value type ({type(value)}). Value must be of type DataObject or DataValue' self.logger.error(error) raise TypeError(error) return results def __handle_value(self, query: DataQuery, node: Element) -> Any: if isinstance(query.path, DataQuery.XPath): if query.path.is_reference: element = self.path_handler.element_by_inline_ref(query.path, node) else: element = self.path_handler.element_by_xpath(query.path.xpath, node) else: # DataQuery.Reference isn't allowed on DataValue return None if element is None: return None return value_handler.handle(query, element)
42.272727
125
0.595968
3,230
0.86828
0
0
0
0
0
0
791
0.212634
8a19876a956cc7df8eee4ce39d6fc5531c4cfc7c
3,401
py
Python
src/api/datamanage/pro/lifecycle/data_trace/data_set_create.py
Chromico/bk-base
be822d9bbee544a958bed4831348185a75604791
[ "MIT" ]
84
2021-06-30T06:20:23.000Z
2022-03-22T03:05:49.000Z
src/api/datamanage/pro/lifecycle/data_trace/data_set_create.py
Chromico/bk-base
be822d9bbee544a958bed4831348185a75604791
[ "MIT" ]
7
2021-06-30T06:21:16.000Z
2022-03-29T07:36:13.000Z
src/api/datamanage/pro/lifecycle/data_trace/data_set_create.py
Chromico/bk-base
be822d9bbee544a958bed4831348185a75604791
[ "MIT" ]
40
2021-06-30T06:21:26.000Z
2022-03-29T12:42:26.000Z
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available. Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. BK-BASE 蓝鲸基础平台 is licensed under the MIT License. License for BK-BASE 蓝鲸基础平台: -------------------------------------------------------------------- Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from copy import deepcopy from datamanage.pro import exceptions as dm_pro_errors from datamanage.utils.api import MetaApi from datamanage.pro.utils.time import utc_to_local, str_to_datetime from datamanage.pro.lifecycle.models_dict import ( DATASET_CREATE_MAPPINGS, DATASET_CREATE_EVENT_INFO_DICT, DataTraceShowType, ComplexSearchBackendType, DataTraceFinishStatus, ) def get_dataset_create_info(dataset_id, dataset_type): """获取数据足迹中和数据创建相关信息 :param dataset_id: 数据id :param dataset_type: 数据类型 :return: 数据创建相关信息 :rtype: list """ # 1)从dgraph中获取数据创建相关信息 data_set_create_info_statement = """ { get_dataset_create_info(func: eq(%s, "%s")){created_by created_at} } """ % ( DATASET_CREATE_MAPPINGS[dataset_type]['data_set_pk'], dataset_id, ) query_result = MetaApi.complex_search( {"backend_type": ComplexSearchBackendType.DGRAPH.value, "statement": data_set_create_info_statement}, raw=True ) create_info_ret = query_result['data']['data']['get_dataset_create_info'] if not (isinstance(create_info_ret, list) and create_info_ret): raise dm_pro_errors.GetDataSetCreateInfoError(message_kv={'dataset_id': dataset_id}) # 2)得到格式化创建信息 create_trace_dict = deepcopy(DATASET_CREATE_EVENT_INFO_DICT) create_trace_dict.update( { "sub_type": dataset_type, "sub_type_alias": DATASET_CREATE_MAPPINGS[dataset_type]['data_set_create_alias'], "description": DATASET_CREATE_MAPPINGS[dataset_type]['data_set_create_alias'], "created_at": utc_to_local(create_info_ret[0]['created_at']), "created_by": create_info_ret[0]['created_by'], "show_type": DataTraceShowType.DISPLAY.value, "datetime": str_to_datetime(utc_to_local(create_info_ret[0]['created_at'])), "status": DataTraceFinishStatus.STATUS, "status_alias": DataTraceFinishStatus.STATUS_ALIAS, } ) return [create_trace_dict]
44.168831
118
0.728021
0
0
0
0
0
0
0
0
2,039
0.576151
8a1ac6639a0c1cc52dd72036d5f1b225c44d7b2f
4,363
py
Python
vectors2.py
shivam13verma/judge-embeddings
9b861319a1240529d25c15799952e32dde2e894e
[ "MIT" ]
null
null
null
vectors2.py
shivam13verma/judge-embeddings
9b861319a1240529d25c15799952e32dde2e894e
[ "MIT" ]
null
null
null
vectors2.py
shivam13verma/judge-embeddings
9b861319a1240529d25c15799952e32dde2e894e
[ "MIT" ]
null
null
null
import locale import glob import os import os.path import requests import tarfile import sys import re import gensim from gensim.models.doc2vec import TaggedDocument from collections import namedtuple from gensim.models import Doc2Vec import gensim.models.doc2vec from collections import OrderedDict import multiprocessing from gensim.test.test_doc2vec import ConcatenatedDoc2Vec import pickle reload(sys) sys.setdefaultencoding("utf-8") #dirname = '/scratch/ap4608/judge_data' #locale.setlocale(locale.LC_ALL, 'C') # # ## Convert text to lower-case and strip punctuation/symbols from words #def normalize_text(text): # norm_text = text.lower() # # # Replace breaks with spaces # norm_text = norm_text.replace('<br />', ' ') # # # Pad punctuation with spaces on both sides # for char in ['.', '"', ',', '(', ')', '!', '?', ';', ':']: # norm_text = norm_text.replace(char, ' ' + char + ' ') # # return norm_text # # ## Concat and normalize test/train data #folders = os.listdir(dirname) #alldata = '' # #for fol in folders: # temp = '' # output = fol.replace('/', '-') + '.txt' # # # Is there a better pattern to use? # txt_files = glob.glob('/'.join([dirname, fol, '*.txt'])) # # for txt in txt_files: # with open(txt, 'r') as t: # control_chars = [chr(0x85)] # t_clean = t.read() # # t_clean = t_clean.replace('\n', ' ') # t_clean = re.sub(r'[^\x00-\x7F]+',' ', t_clean) # # for c in control_chars: # t_clean = t_clean.replace(c, ' ') # # temp += t_clean # # temp += "\n" # # temp_norm = normalize_text(temp) # # if len(temp_norm) == 1: # continue # # with open('/'.join([dirname, output]), 'w') as n: # n.write(temp_norm) # # alldata += temp_norm # #with open('/'.join([dirname, 'alldata-id.txt']), 'w') as f: # for idx, line in enumerate(alldata.splitlines()): # num_line = "_*{0} {1}\n".format(idx, line) # f.write(num_line) # #SentimentDocument = namedtuple('SentimentDocument', 'words tags split sentiment') # #alldocs = [] # will hold all docs in original order #with open(os.path.join(dirname, 'alldata-id.txt')) as alldata: # for line_no, line in enumerate(alldata): # tokens = gensim.utils.to_unicode(line).split() # words = tokens[1:] # tags = [line_no] # `tags = [tokens[0]]` would also work at extra memory cost # split = ['train','test','extra','extra'][line_no//25000] # 25k train, 25k test, 25k extra # sentiment = [1.0, 0.0, 1.0, 0.0, None, None, None, None][line_no//12500] # [12.5K pos, 12.5K neg]*2 then unknown # alldocs.append(SentimentDocument(words, tags, split, sentiment)) # #train_docs = [doc for doc in alldocs if doc.split == 'train'] #test_docs = [doc for doc in alldocs if doc.split == 'test'] #doc_list = alldocs[:] # for reshuffling per pass # #cores = multiprocessing.cpu_count() #assert gensim.models.doc2vec.FAST_VERSION > -1, "this will be painfully slow otherwise" # #simple_models = [ # # PV-DM w/concatenation - window=5 (both sides) approximates paper's 10-word total window size # Doc2Vec(dm=1, dm_concat=1, size=100, window=5, negative=5, hs=0, min_count=2, workers=cores), # # PV-DBOW # Doc2Vec(dm=0, size=100, negative=5, hs=0, min_count=2, workers=cores), # # PV-DM w/average # Doc2Vec(dm=1, dm_mean=1, size=100, window=10, negative=5, hs=0, min_count=2, workers=cores), #] # ## speed setup by sharing results of 1st model's vocabulary scan #simple_models[0].build_vocab(alldocs) # PV-DM/concat requires one special NULL word so it serves as template #for model in simple_models[1:]: # model.reset_from(simple_models[0]) # #models_by_name = OrderedDict((str(model), model) for model in simple_models) # #models_by_name['dbow+dmm'] = ConcatenatedDoc2Vec([simple_models[1], simple_models[2]]) #models_by_name['dbow+dmc'] = ConcatenatedDoc2Vec([simple_models[1], simple_models[0]]) # ## Create a document vector list and save it #doc_vec_list = [x.docvecs for x in simple_models] docvecs = pickle.load(open('docvecs.p', 'rb')) print len(docvecs) print len(docvecs[0]) print docvecs[0] for i,x in enumerate(docvecs): with open('docvecs_'+str(i)+'.txt','w') as f: for vec in x: f.write(vec) f.write("\n") # pickle.dump(models_by_name, open('model.p', 'wb'))
31.388489
121
0.651845
0
0
0
0
0
0
0
0
3,628
0.831538
8a1c71c22813d34b18261a3c040c83b4a288d938
1,272
py
Python
caravan_search_engine/test/test_task.py
crest-cassia/caravan
0a8e606e31d2d36a9379bdc00fafe55cf9144da6
[ "MIT" ]
4
2017-12-27T06:04:46.000Z
2018-04-27T04:07:49.000Z
caravan_search_engine/test/test_task.py
crest-cassia/caravan
0a8e606e31d2d36a9379bdc00fafe55cf9144da6
[ "MIT" ]
null
null
null
caravan_search_engine/test/test_task.py
crest-cassia/caravan
0a8e606e31d2d36a9379bdc00fafe55cf9144da6
[ "MIT" ]
null
null
null
import unittest from caravan.task import Task from caravan.tables import Tables class TestRun(unittest.TestCase): def setUp(self): self.t = Tables.get() self.t.clear() def test_task(self): t = Task(1234, "echo hello world") self.assertEqual(t.id(), 1234) self.assertEqual(t.is_finished(), False) self.assertEqual(t.command(), "echo hello world") t._store_result([1.0, 2.0, 3.0], 0, 3, 111, 222) self.assertTrue(t.is_finished()) self.assertEqual(t.rc(), 0) self.assertEqual(t.rank(), 3) self.assertEqual(t.start_at(), 111) self.assertEqual(t.finish_at(), 222) def test_create(self): for i in range(10): t = Task.create("echo %d" % i) self.assertEqual(t.id(), i) self.assertEqual(t.is_finished(), False) self.assertEqual(len(Task.all()), 10) def test_all(self): tasks = [Task.create("echo %d" % i) for i in range(10)] self.assertEqual(Task.all(), tasks) def test_find(self): tasks = [Task.create("echo %d" % i) for i in range(10)] self.assertEqual(Task.find(5).id(), 5) self.assertEqual(Task.find(5), tasks[5]) if __name__ == '__main__': unittest.main()
30.285714
63
0.589623
1,140
0.896226
0
0
0
0
0
0
73
0.05739
8a1cd65b30b7bbba4f6241ea55e68759c3f56fc4
15,868
py
Python
splash/render_options.py
tashidexiaoL/splashnew
2bbb886bae8fa88c30a4460f41ca940c4b010287
[ "BSD-3-Clause" ]
3,612
2015-01-04T07:22:20.000Z
2022-03-31T07:12:19.000Z
splash/render_options.py
tashidexiaoL/splashnew
2bbb886bae8fa88c30a4460f41ca940c4b010287
[ "BSD-3-Clause" ]
983
2015-01-01T17:54:49.000Z
2022-03-29T05:05:53.000Z
splash/render_options.py
tashidexiaoL/splashnew
2bbb886bae8fa88c30a4460f41ca940c4b010287
[ "BSD-3-Clause" ]
570
2015-01-06T17:48:46.000Z
2022-03-31T12:35:32.000Z
# -*- coding: utf-8 -*- import os import json from splash import defaults from splash.utils import to_bytes, path_join_secure from splash.errors import BadOption class RenderOptions(object): """ Options that control how to render a response. """ _REQUIRED = object() def __init__(self, data, max_timeout): self.data = data self.max_timeout = max_timeout @classmethod def raise_error(cls, argument, description, type='bad_argument', **kwargs): params = { 'type': type, 'argument': argument, 'description': description } params.update(kwargs) raise BadOption(params) @classmethod def fromrequest(cls, request, max_timeout): """ Initialize options from a Twisted Request. """ # 1. GET / POST data data = {key.decode('utf-8'): values[0].decode('utf-8') for key, values in request.args.items()} if request.method == b'POST': content_type = request.getHeader(b'content-type') if content_type: request.content.seek(0) # 2. application/json POST data if b'application/json' in content_type: try: content = request.content.read().decode('utf-8') data.update(json.loads(content)) except ValueError as e: raise BadOption({ 'type': 'invalid_json', 'description': "Can't decode JSON", 'message': str(e), }) # 3. js_source from application/javascript POST requests if b'application/javascript' in content_type: data['js_source'] = request.content.read().decode('utf-8') request.content.seek(0) data['uid'] = id(request) return cls(data, max_timeout) def get_expired_args(self, cache): """ Return a list of argument names from load_args which can't be loaded """ return cache.get_missing(self.get_load_args().items()) def save_args_to_cache(self, cache): """ Process save_args and put all values to cache. Return a list of (name, key) pairs. """ save_args = self.get_save_args() save_values = [self.data.get(name) for name in save_args] keys = cache.add_many(save_values) return list(zip(save_args, keys)) def load_cached_args(self, cache): load_args = self.get_load_args() for name, key in (load_args or {}).items(): self.data[name] = cache[key] def get(self, name, default=_REQUIRED, type=str, range=None): value = self.data.get(name) if value is not None: if type is not None: try: value = type(value) except ValueError: msg = "Argument %r has a wrong type" % (name,) self.raise_error(name, msg, required_type=type.__name__) if range is not None and not (range[0] <= value <= range[1]): self.raise_error(name, 'Argument is out of the allowed range', min=range[0], max=range[1], value=value) return value elif default is self._REQUIRED: self.raise_error(name, 'Required argument is missing: %s' % name, type='argument_required') else: return default def _get_bool(self, name, default=_REQUIRED): return self.get(name, default, type=int, range=(0, 1)) def _get_url(self, name, default=_REQUIRED): url = self.get(name, default, type=None) if isinstance(url, bytes): url = url.decode('utf8') return url def get_uid(self): return self.get('uid') def get_url(self): return self._get_url("url") def get_baseurl(self): return self._get_url("baseurl", default=None) def get_wait(self): return self.get("wait", defaults.WAIT_TIME, type=float, range=(0, self.get_timeout())) def get_timeout(self): default = min(self.max_timeout, defaults.TIMEOUT) return self.get("timeout", default, type=float, range=(0, self.max_timeout)) def get_resource_timeout(self): return self.get("resource_timeout", defaults.RESOURCE_TIMEOUT, type=float, range=(0, 1e6)) def get_response_body(self): return self._get_bool("response_body", defaults.RESPONSE_BODY_ENABLED) def get_request_body(self): return self._get_bool("request_body", defaults.REQUEST_BODY_ENABLED) def get_images(self): return self._get_bool("images", defaults.AUTOLOAD_IMAGES) def get_proxy(self): return self.get("proxy", default=None) def get_js_source(self): return self.get("js_source", default=None) def get_width(self): return self.get("width", None, type=int, range=(1, defaults.MAX_WIDTH)) def get_height(self): return self.get("height", None, type=int, range=(1, defaults.MAX_HEIGTH)) def get_scale_method(self): scale_method = self.get("scale_method", defaults.IMAGE_SCALE_METHOD) allowed_scale_methods = ['raster', 'vector'] if scale_method not in allowed_scale_methods: self.raise_error( argument='scale_method', description="Invalid 'scale_method': %s" % scale_method, allowed=allowed_scale_methods, received=scale_method, ) return scale_method def get_quality(self): return self.get("quality", defaults.JPEG_QUALITY, type=int, range=(0, 100)) def get_http_method(self): method = self.get("http_method", "GET") if method.upper() not in ["POST", "GET"]: self.raise_error("http_method", "Unsupported HTTP method {}".format(method)) return method def get_body(self): body = self.get("body", None, to_bytes) method = self.get("http_method", "GET").upper() if method == 'GET' and body: self.raise_error("body", "GET request should not have a body") return body def get_render_all(self, wait=None): result = self._get_bool("render_all", False) if result == 1 and wait == 0: self.raise_error("render_all", "Pass non-zero 'wait' to render full webpage") return result def get_lua_source(self): return self.get("lua_source") def get_js_profile(self, js_profiles_path): js_profile = self.get("js", default=None) if not js_profile: return js_profile if js_profiles_path is None: self.raise_error('js', 'Javascript profiles are not enabled on server') try: profile_dir = path_join_secure(js_profiles_path, js_profile) except ValueError as e: # security check fails print(e) self.raise_error('js', 'Javascript profile does not exist') if not os.path.isdir(profile_dir): self.raise_error('js', 'Javascript profile does not exist') return profile_dir def get_headers(self): headers = self.get("headers", default=None, type=None) if headers is None: return headers if not isinstance(headers, (list, tuple, dict)): self.raise_error( argument='headers', description="'headers' must be either a JSON array of " "(name, value) pairs or a JSON object" ) if isinstance(headers, (list, tuple)): for el in headers: string_only = all(isinstance(e, str) for e in el) if not (isinstance(el, (list, tuple)) and len(el) == 2 and string_only): self.raise_error( argument='headers', description="'headers' must be either a JSON array of " "(name, value) pairs or a JSON object" ) return headers def get_save_args(self): save_args = self.get("save_args", default=None, type=None) if save_args is None: return [] if isinstance(save_args, str): # comma-separated string save_args = save_args.split(',') if not isinstance(save_args, list): self.raise_error( argument="save_args", description="'save_args' should be either a comma-separated " "string or a JSON array with argument names", ) # JSON array if not all(isinstance(a, str) for a in save_args): self.raise_error( argument="save_args", description="'save_args' should be a list of strings", ) return save_args def get_load_args(self): load_args = self.get("load_args", default=None, type=None) if load_args is None: return {} if isinstance(load_args, str): try: load_args = dict( kv.split("=", 1) for kv in load_args.split(';') ) except ValueError: self.raise_error( argument="load_args", description="'load_args' string value is not a " "semicolon-separated list of name=hash pairs" ) if not isinstance(load_args, dict): self.raise_error( argument="load_args", description="'load_args' should be either a JSON object with " "argument hashes or a semicolon-separated list " "of name=hash pairs" ) return load_args def get_viewport(self, wait=None): viewport = self.get("viewport", defaults.VIEWPORT_SIZE) if viewport == 'full': if wait == 0: self.raise_error("viewport", "Pass non-zero 'wait' to render full webpage") else: try: validate_size_str(viewport) except ValueError as e: self.raise_error("viewport", str(e)) return viewport def get_filters(self, pool=None, adblock_rules=None): filter_names = self.get('filters', '') filter_names = [f for f in filter_names.split(',') if f] if pool is None and adblock_rules is None: # skip validation return filter_names if not filter_names: return filter_names if pool is not None: adblock_rules = pool.network_manager_factory.adblock_rules if adblock_rules is None: self.raise_error( "filters", "Invalid filter names: %s" % (filter_names,) ) if adblock_rules is not None: unknown_filters = adblock_rules.get_unknown_filters(filter_names) if unknown_filters: self.raise_error( "filters", "Invalid filter names: %s" % (unknown_filters,) ) return filter_names def get_allowed_domains(self): allowed_domains = self.get("allowed_domains", default=None) if allowed_domains is not None: return allowed_domains.split(',') def get_allowed_content_types(self): content_types = self.get("allowed_content_types", default=['*']) if isinstance(content_types, str): content_types = list(filter(None, content_types.split(','))) return content_types def get_forbidden_content_types(self): content_types = self.get("forbidden_content_types", default=[]) if isinstance(content_types, str): content_types = list(filter(None, content_types.split(','))) return content_types def get_html5_media(self): return self._get_bool("html5_media", defaults.HTML5_MEDIA_ENABLED) def get_engine(self, browser_engines_enabled=None): engine = self.get("engine", default="webkit", type=str) if engine not in {"webkit", "chromium"}: self.raise_error("engine", "Unknown render engine {}".format(engine)) if browser_engines_enabled is not None: if engine not in browser_engines_enabled: self.raise_error("engine", "Disabled render engine {}".format(engine)) return engine def get_http2(self): engine = self.get_engine() if self.get_engine() == "webkit": default = defaults.WEBKIT_HTTP2_ENABLED else: assert engine == 'chromium' default = defaults.CHROMIUM_HTTP2_ENABLED return self._get_bool("http2", default) def get_common_params(self, js_profiles_path): wait = self.get_wait() return { 'url': self.get_url(), 'baseurl': self.get_baseurl(), 'wait': wait, 'resource_timeout': self.get_resource_timeout(), 'viewport': self.get_viewport(wait), 'render_all': self.get_render_all(wait), 'images': self.get_images(), 'headers': self.get_headers(), 'proxy': self.get_proxy(), 'js_profile': self.get_js_profile(js_profiles_path), 'js_source': self.get_js_source(), 'http_method': self.get_http_method(), 'body': self.get_body(), 'html5_media': self.get_html5_media(), 'http2': self.get_http2(), # 'lua': self.get_lua(), } def get_image_params(self): return { 'width': self.get_width(), 'height': self.get_height(), 'scale_method': self.get_scale_method() } def get_png_params(self): return self.get_image_params() def get_jpeg_params(self): params = {'quality': self.get_quality()} params.update(self.get_image_params()) return params def get_include_params(self): return dict( html=self._get_bool("html", defaults.DO_HTML), iframes=self._get_bool("iframes", defaults.DO_IFRAMES), png=self._get_bool("png", defaults.DO_PNG), jpeg=self._get_bool("jpeg", defaults.DO_JPEG), script=self._get_bool("script", defaults.SHOW_SCRIPT), console=self._get_bool("console", defaults.SHOW_CONSOLE), history=self._get_bool("history", defaults.SHOW_HISTORY), har=self._get_bool("har", defaults.SHOW_HAR), ) def validate_size_str(size_str): """ Validate size string in WxH format. Can be used to validate both viewport and window size strings. Does not special-case ``'full'`` viewport. Raises ``ValueError`` if anything goes wrong. :param size_str: string to validate """ max_width = defaults.VIEWPORT_MAX_WIDTH max_heigth = defaults.VIEWPORT_MAX_HEIGTH max_area = defaults.VIEWPORT_MAX_AREA try: w, h = map(int, size_str.split('x')) except ValueError: raise ValueError("Invalid viewport format: %s" % size_str) else: if not ((0 < w <= max_width) and (0 < h <= max_heigth) and (w * h < max_area)): raise ValueError("Viewport (%dx%d, area=%d) is out of range (%dx%d, area=%d)" % (w, h, w * h, max_width, max_heigth, max_area))
35.578475
91
0.568188
14,839
0.935153
0
0
1,600
0.100832
0
0
3,035
0.191265
8a1d8b11d101fed7641300b2c4ef25ddc8a61c8b
362
py
Python
syntax/func.py
sangumee/Opentutorials-Webn-Python
9f813f8f342ea99ffee6e31f363f175fa023c489
[ "MIT" ]
null
null
null
syntax/func.py
sangumee/Opentutorials-Webn-Python
9f813f8f342ea99ffee6e31f363f175fa023c489
[ "MIT" ]
null
null
null
syntax/func.py
sangumee/Opentutorials-Webn-Python
9f813f8f342ea99ffee6e31f363f175fa023c489
[ "MIT" ]
null
null
null
# code.... a = 1 b = 2 c = 3 s = a+b+c r = s/3 print(r) # code.... ''' def average(): a=1 b=2 c=3 s=a+b+c r=s/3 print(r) average() ''' ''' #input #parameter #argument def average(a,b,c): s=a+b+c r=s/3 print(r) average(10,20,30) ''' def average(a, b, c): s = a+b+c r = s/3 return r print(average(10, 20, 30))
9.05
26
0.466851
0
0
0
0
0
0
0
0
219
0.604972
8a1dc389d59f49c155580d9fe0bb5e5e94a7281e
1,718
py
Python
tools/evolution/codingSnps_filter.py
ramezrawas/galaxy-1
c03748dd49c060a68d07bce56eae33e0ba154414
[ "CC-BY-3.0" ]
1
2019-11-03T11:45:43.000Z
2019-11-03T11:45:43.000Z
tools/evolution/codingSnps_filter.py
ramezrawas/galaxy-1
c03748dd49c060a68d07bce56eae33e0ba154414
[ "CC-BY-3.0" ]
7
2016-12-07T22:19:37.000Z
2019-01-30T15:04:26.000Z
tools/evolution/codingSnps_filter.py
ramezrawas/galaxy-1
c03748dd49c060a68d07bce56eae33e0ba154414
[ "CC-BY-3.0" ]
null
null
null
#!/usr/bin/env python # runs after the job (and after the default post-filter) from galaxy.tools.parameters import DataToolParameter # Older py compatibility try: set() except: from sets import Set as set def validate_input( trans, error_map, param_values, page_param_map ): dbkeys = set() data_param_names = set() data_params = 0 for name, param in page_param_map.items(): if isinstance( param, DataToolParameter ): # for each dataset parameter if param_values.get(name, None) is not None: dbkeys.add( param_values[name].dbkey ) data_params += 1 # check meta data try: param = param_values[name] int( param.metadata.startCol ) int( param.metadata.endCol ) int( param.metadata.chromCol ) if param.metadata.strandCol is not None: int( param.metadata.strandCol ) except: error_msg = ("The attributes of this dataset are not properly set. " "Click the pencil icon in the history item to set the chrom, start, end and strand columns.") error_map[name] = error_msg data_param_names.add( name ) if len( dbkeys ) > 1: for name in data_param_names: error_map[name] = "All datasets must belong to same genomic build, " \ "this dataset is linked to build '%s'" % param_values[name].dbkey if data_params != len(data_param_names): for name in data_param_names: error_map[name] = "A dataset of the appropriate type is required"
40.904762
117
0.586147
0
0
0
0
0
0
0
0
428
0.249127
8a1e3dc4bc93e35762cbfc644a38e3db21861cda
5,290
py
Python
qa/rpc-tests/listtransactions.py
DeftNerd/bitcoinclassic
afff0155e0dd528145818c43f259743f54966d95
[ "MIT" ]
8
2016-03-31T18:47:31.000Z
2021-09-30T05:42:32.000Z
qa/rpc-tests/listtransactions.py
DeftNerd/bitcoinclassic
afff0155e0dd528145818c43f259743f54966d95
[ "MIT" ]
1
2017-10-06T08:55:30.000Z
2017-10-06T08:55:30.000Z
qa/rpc-tests/listtransactions.py
DeftNerd/bitcoinclassic
afff0155e0dd528145818c43f259743f54966d95
[ "MIT" ]
2
2020-02-03T03:38:10.000Z
2021-09-30T05:42:36.000Z
#!/usr/bin/env python2 # Copyright (c) 2014-2015 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # Exercise the listtransactions API from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * def check_array_result(object_array, to_match, expected): """ Pass in array of JSON objects, a dictionary with key/value pairs to match against, and another dictionary with expected key/value pairs. """ num_matched = 0 for item in object_array: all_match = True for key,value in to_match.items(): if item[key] != value: all_match = False if not all_match: continue for key,value in expected.items(): if item[key] != value: raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value))) num_matched = num_matched+1 if num_matched == 0: raise AssertionError("No objects matched %s"%(str(to_match))) class ListTransactionsTest(BitcoinTestFramework): def run_test(self): # Simple send, 0 to 1: txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) self.sync_all() check_array_result(self.nodes[0].listtransactions(), {"txid":txid}, {"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0}) check_array_result(self.nodes[1].listtransactions(), {"txid":txid}, {"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0}) # mine a block, confirmations should change: self.nodes[0].generate(1) self.sync_all() check_array_result(self.nodes[0].listtransactions(), {"txid":txid}, {"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1}) check_array_result(self.nodes[1].listtransactions(), {"txid":txid}, {"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1}) # send-to-self: txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2) check_array_result(self.nodes[0].listtransactions(), {"txid":txid, "category":"send"}, {"amount":Decimal("-0.2")}) check_array_result(self.nodes[0].listtransactions(), {"txid":txid, "category":"receive"}, {"amount":Decimal("0.2")}) # sendmany from node1: twice to self, twice to node2: send_to = { self.nodes[0].getnewaddress() : 0.11, self.nodes[1].getnewaddress() : 0.22, self.nodes[0].getaccountaddress("from1") : 0.33, self.nodes[1].getaccountaddress("toself") : 0.44 } txid = self.nodes[1].sendmany("", send_to) self.sync_all() check_array_result(self.nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.11")}, {"txid":txid} ) check_array_result(self.nodes[0].listtransactions(), {"category":"receive","amount":Decimal("0.11")}, {"txid":txid} ) check_array_result(self.nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.22")}, {"txid":txid} ) check_array_result(self.nodes[1].listtransactions(), {"category":"receive","amount":Decimal("0.22")}, {"txid":txid} ) check_array_result(self.nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.33")}, {"txid":txid} ) check_array_result(self.nodes[0].listtransactions(), {"category":"receive","amount":Decimal("0.33")}, {"txid":txid, "account" : "from1"} ) check_array_result(self.nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.44")}, {"txid":txid, "account" : ""} ) check_array_result(self.nodes[1].listtransactions(), {"category":"receive","amount":Decimal("0.44")}, {"txid":txid, "account" : "toself"} ) multisig = self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True) txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1) self.nodes[1].generate(1) self.sync_all() assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0) check_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True), {"category":"receive","amount":Decimal("0.1")}, {"txid":txid, "account" : "watchonly"} ) if __name__ == '__main__': ListTransactionsTest().main()
48.53211
105
0.542722
4,110
0.776938
0
0
0
0
0
0
1,398
0.264272
8a1ef1c625b2d34cef5abbf769654ee6310e0334
25,352
py
Python
salt/modules/mount.py
aletourneau/salt
d7013a2f64eb4b79592220d76274bc5dde609e08
[ "Apache-2.0" ]
null
null
null
salt/modules/mount.py
aletourneau/salt
d7013a2f64eb4b79592220d76274bc5dde609e08
[ "Apache-2.0" ]
null
null
null
salt/modules/mount.py
aletourneau/salt
d7013a2f64eb4b79592220d76274bc5dde609e08
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- ''' Salt module to manage unix mounts and the fstab file ''' from __future__ import absolute_import # Import python libs import os import re import logging # Import salt libs import salt.utils from salt._compat import string_types from salt.utils import which as _which from salt.exceptions import CommandNotFoundError, CommandExecutionError # Set up logger log = logging.getLogger(__name__) # Define the module's virtual name __virtualname__ = 'mount' def __virtual__(): ''' Only load on POSIX-like systems ''' # Disable on Windows, a specific file module exists: if salt.utils.is_windows(): return False return True def _list_mounts(): ret = {} if __grains__['os'] in ['MacOS', 'Darwin']: mounts = __salt__['cmd.run_stdout']('mount') else: mounts = __salt__['cmd.run_stdout']('mount -l') for line in mounts.split('\n'): comps = re.sub(r"\s+", " ", line).split() ret[comps[2]] = comps[0] return ret def _active_mountinfo(ret): _list = _list_mounts() filename = '/proc/self/mountinfo' if not os.access(filename, os.R_OK): msg = 'File not readable {0}' raise CommandExecutionError(msg.format(filename)) blkid_info = __salt__['disk.blkid']() with salt.utils.fopen(filename) as ifile: for line in ifile: comps = line.split() device = comps[2].split(':') device_name = comps[8] device_uuid = None if device_name: device_uuid = blkid_info.get(device_name, {}).get('UUID') device_uuid = device_uuid and device_uuid.lower() ret[comps[4]] = {'mountid': comps[0], 'parentid': comps[1], 'major': device[0], 'minor': device[1], 'root': comps[3], 'opts': comps[5].split(','), 'fstype': comps[7], 'device': device_name, 'alt_device': _list.get(comps[4], None), 'superopts': comps[9].split(','), 'device_uuid': device_uuid} return ret def _active_mounts(ret): ''' List active mounts on Linux systems ''' _list = _list_mounts() filename = '/proc/self/mounts' if not os.access(filename, os.R_OK): msg = 'File not readable {0}' raise CommandExecutionError(msg.format(filename)) with salt.utils.fopen(filename) as ifile: for line in ifile: comps = line.split() ret[comps[1]] = {'device': comps[0], 'alt_device': _list.get(comps[1], None), 'fstype': comps[2], 'opts': comps[3].split(',')} return ret def _active_mounts_freebsd(ret): ''' List active mounts on FreeBSD systems ''' for line in __salt__['cmd.run_stdout']('mount -p').split('\n'): comps = re.sub(r"\s+", " ", line).split() ret[comps[1]] = {'device': comps[0], 'fstype': comps[2], 'opts': comps[3].split(',')} return ret def _active_mounts_solaris(ret): ''' List active mounts on Solaris systems ''' for line in __salt__['cmd.run_stdout']('mount -v').split('\n'): comps = re.sub(r"\s+", " ", line).split() ret[comps[2]] = {'device': comps[0], 'fstype': comps[4], 'opts': comps[5].split('/')} return ret def _active_mounts_openbsd(ret): ''' List active mounts on OpenBSD systems ''' for line in __salt__['cmd.run_stdout']('mount -v').split('\n'): comps = re.sub(r"\s+", " ", line).split() nod = __salt__['cmd.run_stdout']('ls -l {0}'.format(comps[0])) nod = ' '.join(nod.split()).split(" ") parens = re.findall(r'\((.*?)\)', line, re.DOTALL) ret[comps[3]] = {'device': comps[0], 'fstype': comps[5], 'opts': parens[1].split(", "), 'major': str(nod[4].strip(",")), 'minor': str(nod[5]), 'device_uuid': parens[0]} return ret def _active_mounts_darwin(ret): ''' List active mounts on Mac OS systems ''' for line in __salt__['cmd.run_stdout']('mount').split('\n'): comps = re.sub(r"\s+", " ", line).split() parens = re.findall(r'\((.*?)\)', line, re.DOTALL)[0].split(", ") ret[comps[2]] = {'device': comps[0], 'fstype': parens[0], 'opts': parens[1:]} return ret def active(extended=False): ''' List the active mounts. CLI Example: .. code-block:: bash salt '*' mount.active ''' ret = {} if __grains__['os'] == 'FreeBSD': _active_mounts_freebsd(ret) elif __grains__['os'] == 'Solaris': _active_mounts_solaris(ret) elif __grains__['os'] == 'OpenBSD': _active_mounts_openbsd(ret) elif __grains__['os'] in ['MacOS', 'Darwin']: _active_mounts_darwin(ret) else: if extended: try: _active_mountinfo(ret) except CommandExecutionError: _active_mounts(ret) else: _active_mounts(ret) return ret def fstab(config='/etc/fstab'): ''' List the contents of the fstab CLI Example: .. code-block:: bash salt '*' mount.fstab ''' ret = {} if not os.path.isfile(config): return ret with salt.utils.fopen(config) as ifile: for line in ifile: if line.startswith('#'): # Commented continue if not line.strip(): # Blank line continue comps = line.split() if len(comps) != 6: # Invalid entry continue ret[comps[1]] = {'device': comps[0], 'fstype': comps[2], 'opts': comps[3].split(','), 'dump': comps[4], 'pass': comps[5]} return ret def rm_fstab(name, device, config='/etc/fstab'): ''' Remove the mount point from the fstab CLI Example: .. code-block:: bash salt '*' mount.rm_fstab /mnt/foo ''' contents = fstab(config) if name not in contents: return True # The entry is present, get rid of it lines = [] try: with salt.utils.fopen(config, 'r') as ifile: for line in ifile: if line.startswith('#'): # Commented lines.append(line) continue if not line.strip(): # Blank line lines.append(line) continue comps = line.split() if len(comps) != 6: # Invalid entry lines.append(line) continue comps = line.split() if device: if comps[1] == name and comps[0] == device: continue else: if comps[1] == name: continue lines.append(line) except (IOError, OSError) as exc: msg = "Couldn't read from {0}: {1}" raise CommandExecutionError(msg.format(config, str(exc))) try: with salt.utils.fopen(config, 'w+') as ofile: ofile.writelines(lines) except (IOError, OSError) as exc: msg = "Couldn't write to {0}: {1}" raise CommandExecutionError(msg.format(config, str(exc))) return True def set_fstab( name, device, fstype, opts='defaults', dump=0, pass_num=0, config='/etc/fstab', test=False, **kwargs): ''' Verify that this mount is represented in the fstab, change the mount to match the data passed, or add the mount if it is not present. CLI Example: .. code-block:: bash salt '*' mount.set_fstab /mnt/foo /dev/sdz1 ext4 ''' # Fix the opts type if it is a list if isinstance(opts, list): opts = ','.join(opts) lines = [] change = False present = False if not os.path.isfile(config): raise CommandExecutionError('Bad config file "{0}"'.format(config)) try: with salt.utils.fopen(config, 'r') as ifile: for line in ifile: if line.startswith('#'): # Commented lines.append(line) continue if not line.strip(): # Blank line lines.append(line) continue comps = line.split() if len(comps) != 6: # Invalid entry lines.append(line) continue if comps[1] == name or comps[0] == device: # check to see if there are changes # and fix them if there are any present = True if comps[0] != device: change = True comps[0] = device if comps[1] != name: change = True comps[1] = name if comps[2] != fstype: change = True comps[2] = fstype if comps[3] != opts: change = True comps[3] = opts if comps[4] != str(dump): change = True comps[4] = str(dump) if comps[5] != str(pass_num): change = True comps[5] = str(pass_num) if change: log.debug( 'fstab entry for mount point {0} needs to be ' 'updated'.format(name) ) newline = ( '{0}\t\t{1}\t{2}\t{3}\t{4} {5}\n'.format( device, name, fstype, opts, dump, pass_num ) ) lines.append(newline) else: lines.append(line) except (IOError, OSError) as exc: msg = 'Couldn\'t read from {0}: {1}' raise CommandExecutionError(msg.format(config, str(exc))) if change: if not salt.utils.test_mode(test=test, **kwargs): try: with salt.utils.fopen(config, 'w+') as ofile: # The line was changed, commit it! ofile.writelines(lines) except (IOError, OSError): msg = 'File not writable {0}' raise CommandExecutionError(msg.format(config)) return 'change' if not change: if present: # The right entry is already here return 'present' else: if not salt.utils.test_mode(test=test, **kwargs): # The entry is new, add it to the end of the fstab newline = '{0}\t\t{1}\t{2}\t{3}\t{4} {5}\n'.format(device, name, fstype, opts, dump, pass_num) lines.append(newline) try: with salt.utils.fopen(config, 'w+') as ofile: # The line was changed, commit it! ofile.writelines(lines) except (IOError, OSError): raise CommandExecutionError( 'File not writable {0}'.format( config ) ) return 'new' def rm_automaster(name, device, config='/etc/auto_salt'): ''' Remove the mount point from the auto_master CLI Example: .. code-block:: bash salt '*' mount.rm_automaster /mnt/foo ''' contents = automaster(config) if name not in contents: return True # The entry is present, get rid of it lines = [] try: with salt.utils.fopen(config, 'r') as ifile: for line in ifile: if line.startswith('#'): # Commented lines.append(line) continue if not line.strip(): # Blank line lines.append(line) continue comps = line.split() if len(comps) != 3: # Invalid entry lines.append(line) continue comps = line.split() prefix = "/.." name_chk = comps[0].replace(prefix, "") device_fmt = comps[2].split(":") if device: if name_chk == name and device_fmt[1] == device: continue else: if name_chk == name: continue lines.append(line) except (IOError, OSError) as exc: msg = "Couldn't read from {0}: {1}" raise CommandExecutionError(msg.format(config, str(exc))) try: with salt.utils.fopen(config, 'w+') as ofile: ofile.writelines(lines) except (IOError, OSError) as exc: msg = "Couldn't write to {0}: {1}" raise CommandExecutionError(msg.format(config, str(exc))) # Update automount __salt__['cmd.run']('automount -cv') return True def set_automaster( name, device, fstype, opts='', config='/etc/auto_salt', test=False, **kwargs): ''' Verify that this mount is represented in the auto_salt, change the mount to match the data passed, or add the mount if it is not present. CLI Example: .. code-block:: bash salt '*' mount.set_automaster /mnt/foo /dev/sdz1 ext4 ''' # Fix the opts type if it is a list if isinstance(opts, list): opts = ','.join(opts) lines = [] change = False present = False automaster_file = "/etc/auto_master" if not os.path.isfile(config): __salt__['file.touch'](config) __salt__['file.append'](automaster_file, "/-\t\t\t{0}".format(config)) name = "/..{0}".format(name) device_fmt = "{0}:{1}".format(fstype, device) type_opts = "-fstype={0},{1}".format(fstype, opts) if fstype == 'smbfs': device_fmt = device_fmt.replace(fstype, "") try: with salt.utils.fopen(config, 'r') as ifile: for line in ifile: if line.startswith('#'): # Commented lines.append(line) continue if not line.strip(): # Blank line lines.append(line) continue comps = line.split() if len(comps) != 3: # Invalid entry lines.append(line) continue if comps[0] == name or comps[2] == device_fmt: # check to see if there are changes # and fix them if there are any present = True if comps[0] != name: change = True comps[0] = name if comps[1] != type_opts: change = True comps[1] = type_opts if comps[2] != device_fmt: change = True comps[2] = device_fmt if change: log.debug( 'auto_master entry for mount point {0} needs to be ' 'updated'.format(name) ) newline = ( '{0}\t{1}\t{2}\n'.format( name, type_opts, device_fmt) ) lines.append(newline) else: lines.append(line) except (IOError, OSError) as exc: msg = 'Couldn\'t read from {0}: {1}' raise CommandExecutionError(msg.format(config, str(exc))) if change: if not salt.utils.test_mode(test=test, **kwargs): try: with salt.utils.fopen(config, 'w+') as ofile: # The line was changed, commit it! ofile.writelines(lines) except (IOError, OSError): msg = 'File not writable {0}' raise CommandExecutionError(msg.format(config)) return 'change' if not change: if present: # The right entry is already here return 'present' else: if not salt.utils.test_mode(test=test, **kwargs): # The entry is new, add it to the end of the fstab newline = ( '{0}\t{1}\t{2}\n'.format( name, type_opts, device_fmt) ) lines.append(newline) try: with salt.utils.fopen(config, 'w+') as ofile: # The line was changed, commit it! ofile.writelines(lines) except (IOError, OSError): raise CommandExecutionError( 'File not writable {0}'.format( config ) ) return 'new' def automaster(config='/etc/auto_salt'): ''' List the contents of the fstab CLI Example: .. code-block:: bash salt '*' mount.fstab ''' ret = {} if not os.path.isfile(config): return ret with salt.utils.fopen(config) as ifile: for line in ifile: if line.startswith('#'): # Commented continue if not line.strip(): # Blank line continue comps = line.split() if len(comps) != 3: # Invalid entry continue prefix = "/.." name = comps[0].replace(prefix, "") device_fmt = comps[2].split(":") opts = comps[1].split(',') ret[name] = {'device': device_fmt[1], 'fstype': opts[0], 'opts': opts[1:]} return ret def mount(name, device, mkmnt=False, fstype='', opts='defaults', user=None): ''' Mount a device CLI Example: .. code-block:: bash salt '*' mount.mount /mnt/foo /dev/sdz1 True ''' # Darwin doesn't expect defaults when mounting without other options if 'defaults' in opts and __grains__['os'] in ['MacOS', 'Darwin']: opts = None if isinstance(opts, string_types): opts = opts.split(',') if not os.path.exists(name) and mkmnt: __salt__['file.mkdir'](name=name, user=user) args = '' if opts is not None: lopts = ','.join(opts) args = '-o {0}'.format(lopts) if fstype: args += ' -t {0}'.format(fstype) cmd = 'mount {0} {1} {2} '.format(args, device, name) out = __salt__['cmd.run_all'](cmd, runas=user) if out['retcode']: return out['stderr'] return True def remount(name, device, mkmnt=False, fstype='', opts='defaults', user=None): ''' Attempt to remount a device, if the device is not already mounted, mount is called CLI Example: .. code-block:: bash salt '*' mount.remount /mnt/foo /dev/sdz1 True ''' force_mount = False if __grains__['os'] in ['MacOS', 'Darwin']: if opts == 'defaults': opts = 'noowners' if fstype == 'smbfs': force_mount = True if isinstance(opts, string_types): opts = opts.split(',') mnts = active() if name in mnts: # The mount point is mounted, attempt to remount it with the given data if 'remount' not in opts and __grains__['os'] not in ['OpenBSD', 'MacOS', 'Darwin']: opts.append('remount') if force_mount: # We need to force the mount but first we should unmount umount(name, device, user=user) lopts = ','.join(opts) args = '-o {0}'.format(lopts) if fstype: args += ' -t {0}'.format(fstype) if __grains__['os'] not in ['OpenBSD', 'MacOS', 'Darwin'] or force_mount: cmd = 'mount {0} {1} {2} '.format(args, device, name) else: cmd = 'mount -u {0} {1} {2} '.format(args, device, name) out = __salt__['cmd.run_all'](cmd, runas=user) if out['retcode']: return out['stderr'] return True # Mount a filesystem that isn't already return mount(name, device, mkmnt, fstype, opts, user=user) def umount(name, device=None, user=None): ''' Attempt to unmount a device by specifying the directory it is mounted on CLI Example: .. code-block:: bash salt '*' mount.umount /mnt/foo .. versionadded:: Lithium salt '*' mount.umount /mnt/foo /dev/xvdc1 ''' mnts = active() if name not in mnts: return "{0} does not have anything mounted".format(name) if not device: cmd = 'umount {0}'.format(name) else: cmd = 'umount {0}'.format(device) out = __salt__['cmd.run_all'](cmd, runas=user) if out['retcode']: return out['stderr'] return True def is_fuse_exec(cmd): ''' Returns true if the command passed is a fuse mountable application. CLI Example: .. code-block:: bash salt '*' mount.is_fuse_exec sshfs ''' cmd_path = _which(cmd) # No point in running ldd on a command that doesn't exist if not cmd_path: return False elif not _which('ldd'): raise CommandNotFoundError('ldd') out = __salt__['cmd.run']('ldd {0}'.format(cmd_path)) return 'libfuse' in out def swaps(): ''' Return a dict containing information on active swap CLI Example: .. code-block:: bash salt '*' mount.swaps ''' ret = {} if __grains__['os'] != 'OpenBSD': with salt.utils.fopen('/proc/swaps') as fp_: for line in fp_: if line.startswith('Filename'): continue comps = line.split() ret[comps[0]] = {'type': comps[1], 'size': comps[2], 'used': comps[3], 'priority': comps[4]} else: for line in __salt__['cmd.run_stdout']('swapctl -kl').splitlines(): if line.startswith(('Device', 'Total')): continue swap_type = "file" comps = line.split() if comps[0].startswith('/dev/'): swap_type = "partition" ret[comps[0]] = {'type': swap_type, 'size': comps[1], 'used': comps[2], 'priority': comps[5]} return ret def swapon(name, priority=None): ''' Activate a swap disk CLI Example: .. code-block:: bash salt '*' mount.swapon /root/swapfile ''' ret = {} on_ = swaps() if name in on_: ret['stats'] = on_[name] ret['new'] = False return ret cmd = 'swapon {0}'.format(name) if priority: cmd += ' -p {0}'.format(priority) __salt__['cmd.run'](cmd) on_ = swaps() if name in on_: ret['stats'] = on_[name] ret['new'] = True return ret return ret def swapoff(name): ''' Deactivate a named swap mount CLI Example: .. code-block:: bash salt '*' mount.swapoff /root/swapfile ''' on_ = swaps() if name in on_: if __grains__['os'] != 'OpenBSD': __salt__['cmd.run']('swapoff {0}'.format(name)) else: __salt__['cmd.run']('swapctl -d {0}'.format(name)) on_ = swaps() if name in on_: return False return True return None def is_mounted(name): ''' .. versionadded:: 2014.7.0 Provide information if the path is mounted CLI Example: .. code-block:: bash salt '*' mount.is_mounted /mnt/share ''' active_ = active() if name in active_: return True else: return False
30.109264
92
0.473651
0
0
0
0
0
0
0
0
6,552
0.258441
8a1f6ceee24cfa74cb693e71048a38117f2ad54b
907
py
Python
base/admin.py
ExpertOfNone/expert_of_none
9ff4e4279a570712766546122c014c754f753485
[ "MIT" ]
null
null
null
base/admin.py
ExpertOfNone/expert_of_none
9ff4e4279a570712766546122c014c754f753485
[ "MIT" ]
null
null
null
base/admin.py
ExpertOfNone/expert_of_none
9ff4e4279a570712766546122c014c754f753485
[ "MIT" ]
null
null
null
from django.contrib import admin from base.models import Topic, Photo class EONBaseAdmin(admin.ModelAdmin): def get_changeform_initial_data(self, request): initial = super().get_changeform_initial_data(request) if 'add' in request.META['PATH_INFO']: initial['created_by'] = request.user initial['modified_by'] = request.user return initial def save_model(self, request, obj, form, change): if not obj.created_by: obj.created_by = request.user return super().save_model(request, obj, form, change) class TopicAdmin(EONBaseAdmin): list_display = [ 'name', 'parent_topic', 'top_level', 'modified_by', 'modified', 'created_by', 'created', ] class PhotoAdmin(EONBaseAdmin): # TODO Add Proper List Display pass admin.site.register(Topic, TopicAdmin) admin.site.register(Photo, PhotoAdmin)
21.093023
96
0.675854
747
0.823594
0
0
0
0
0
0
146
0.16097
8a1ff54283494d2484e0c67f1c0d0ff7dcc46387
1,165
py
Python
met/metadataparser/models/entity_type.py
z1digitalstudio/met
7840e7520bb4c3cb0328d5988468eefe6639f950
[ "BSD-2-Clause" ]
11
2016-06-30T13:20:39.000Z
2021-01-14T20:53:15.000Z
met/metadataparser/models/entity_type.py
z1digitalstudio/met
7840e7520bb4c3cb0328d5988468eefe6639f950
[ "BSD-2-Clause" ]
50
2016-02-03T14:49:06.000Z
2022-02-24T01:35:50.000Z
met/metadataparser/models/entity_type.py
z1digitalstudio/met
7840e7520bb4c3cb0328d5988468eefe6639f950
[ "BSD-2-Clause" ]
9
2016-04-22T19:24:36.000Z
2022-01-11T10:30:54.000Z
################################################################# # MET v2 Metadate Explorer Tool # # This Software is Open Source. See License: https://github.com/TERENA/met/blob/master/LICENSE.md # Copyright (c) 2012, TERENA All rights reserved. # # This Software is based on MET v1 developed for TERENA by Yaco Sistemas, http://www.yaco.es/ # MET v2 was developed for TERENA by Tamim Ziai, DAASI International GmbH, http://www.daasi.de # Current version of MET has been revised for performance improvements by Andrea Biancini, # Consortium GARR, http://www.garr.it ########################################################################## from django.db import models from django.utils.translation import ugettext_lazy as _ class EntityType(models.Model): """ Model describing the type of an entity. """ name = models.CharField(blank=False, max_length=20, unique=True, verbose_name=_(u'Name'), db_index=True) xmlname = models.CharField(blank=False, max_length=20, unique=True, verbose_name=_(u'Name in XML'), db_index=True) def __unicode__(self): return self.name
38.833333
97
0.614592
433
0.371674
0
0
0
0
0
0
708
0.607725
8a2004bf04417c6b520430e6ac9ec351a3c37f83
9,312
py
Python
wxpy/bot.py
daimajia/wxpy
2b56fb67b9ccb072538fd778a27a8fef8d9c93e6
[ "MIT" ]
34
2017-03-01T06:32:04.000Z
2021-11-16T12:48:46.000Z
wxpy/bot.py
daimajia/wxpy
2b56fb67b9ccb072538fd778a27a8fef8d9c93e6
[ "MIT" ]
null
null
null
wxpy/bot.py
daimajia/wxpy
2b56fb67b9ccb072538fd778a27a8fef8d9c93e6
[ "MIT" ]
17
2017-03-01T08:41:22.000Z
2021-09-16T06:25:43.000Z
import traceback from pprint import pformat from threading import Thread import itchat import logging from wxpy.chat import Chat from wxpy.chats import Chats from wxpy.friend import Friend from wxpy.group import Group from wxpy.message import MessageConfigs, Messages, Message, MessageConfig from wxpy.mp import MP from wxpy.response import ResponseError from wxpy.user import User from wxpy.utils.constants import SYSTEM from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list logger = logging.getLogger('wxpy') class Robot(object): """ 机器人对象,用于登陆和操作微信账号,涵盖大部分 Web 微信的功能 """ def __init__( self, save_path=None, console_qr=False, qr_path=None, qr_callback=None, login_callback=None, logout_callback=None ): """ :param save_path: | 用于保存或载入登陆状态的文件路径,例如: 'wxpy.pkl',为空则不尝试载入。 | 填写本参数后,可在短时间内重新载入登陆状态,避免重复扫码,失效时会重新要求登陆 :param console_qr: 在终端中显示登陆二维码,需要安装 Pillow 模块 :param qr_path: 保存二维码的路径 :param qr_callback: 获得二维码时的回调,接收参数: uuid, status, qrcode :param login_callback: 登陆时的回调,接收参数同上 :param logout_callback: 登出时的回调,接收参数同上 """ self.core = itchat.Core() itchat.instanceList.append(self) self.core.auto_login( hotReload=bool(save_path), statusStorageDir=save_path, enableCmdQR=console_qr, picDir=qr_path, qrCallback=qr_callback, loginCallback=login_callback, exitCallback=logout_callback ) self.message_configs = MessageConfigs(self) self.messages = Messages(robot=self) self.file_helper = Chat(wrap_user_name('filehelper')) self.file_helper.robot = self self.file_helper.nick_name = '文件传输助手' self.self = Chat(self.core.loginInfo['User']) self.self.robot = self self.save_path = save_path def __repr__(self): return '<{}: {}>'.format(self.__class__.__name__, self.self.name) @handle_response() def logout(self): """ 登出当前账号 """ return self.core.logout() @property def alive(self): """ 当前的登陆状态 :return: 若为登陆状态,则为 True,否则为 False """ return self.core.alive @alive.setter def alive(self, value): self.core.alive = value def dump_login_status(self, save_path=None): return self.core.dump_login_status(save_path or self.save_path) # chats def except_self(self, chats_or_dicts): """ 从聊天对象合集或用户字典列表中排除自身 :param chats_or_dicts: 聊天对象合集或用户字典列表 :return: 排除自身后的列表 """ return list(filter(lambda x: get_user_name(x) != self.self.user_name, chats_or_dicts)) def chats(self, update=False): """ 获取所有聊天对象 :param update: 是否更新 :return: 聊天对象合集 """ return Chats(self.friends(update) + self.groups(update) + self.mps(update), self) def friends(self, update=False): """ 获取所有好友 :param update: 是否更新 :return: 聊天对象合集 """ @handle_response(Friend) def do(): return self.core.get_friends(update=update) ret = do() ret.source = self return ret @handle_response(Group) def groups(self, update=False, contact_only=False): """ 获取所有群聊 :param update: 是否更新 :param contact_only: 是否限于保存为联系人的群聊 :return: 群聊合集 """ return self.core.get_chatrooms(update=update, contactOnly=contact_only) @handle_response(MP) def mps(self, update=False): """ 获取所有公众号 :param update: 是否更新 :return: 聊天对象合集 """ return self.core.get_mps(update=update) @handle_response(User) def user_details(self, user_or_users, chunk_size=50): """ 获取单个或批量获取多个用户的详细信息(地区、性别、签名等),但不可用于群聊成员 :param user_or_users: 单个或多个用户对象或 user_name :param chunk_size: 分配请求时的单批数量,目前为 50 :return: 单个或多个用户用户的详细信息 """ def chunks(): total = ensure_list(user_or_users) for i in range(0, len(total), chunk_size): yield total[i:i + chunk_size] @handle_response() def process_one_chunk(_chunk): return self.core.update_friend(userName=get_user_name(_chunk)) if isinstance(user_or_users, (list, tuple)): ret = list() for chunk in chunks(): chunk_ret = process_one_chunk(chunk) if isinstance(chunk_ret, list): ret += chunk_ret else: ret.append(chunk_ret) return ret else: return process_one_chunk(user_or_users) def search(self, name=None, **attributes): """ 在所有类型的聊天对象中进行搜索 :param name: 名称 (可以是昵称、备注等) :param attributes: 属性键值对,键可以是 sex(性别), province(省份), city(城市) 等。例如可指定 province='广东' :return: 匹配的聊天对象合集 """ return self.chats().search(name, **attributes) # add / create @handle_response() def add_friend(self, user, verify_content=''): """ 添加用户为好友 :param user: 用户对象或用户名 :param verify_content: 验证说明信息 """ return self.core.add_friend( userName=get_user_name(user), status=2, verifyContent=verify_content, autoUpdate=True ) @handle_response() def accept_friend(self, user, verify_content=''): """ 接受用户为好友 :param user: 用户对象或用户名 :param verify_content: 验证说明信息 """ # Todo: 验证好友接口可用性,并在接受好友时直接返回新好友 return self.core.add_friend( userName=get_user_name(user), status=3, verifyContent=verify_content, autoUpdate=True ) def create_group(self, users, topic=None): """ 创建一个新的群聊 :param users: 用户列表 :param topic: 群名称 :return: 若建群成功,返回一个新的群聊对象 """ @handle_response() def request(): return self.core.create_chatroom( memberList=wrap_user_name(users), topic=topic or '' ) ret = request() user_name = ret.get('ChatRoomName') if user_name: return Group(self.core.update_chatroom(userName=user_name)) else: raise ResponseError('Failed to create group:\n{}'.format(pformat(ret))) # messages def _process_message(self, msg): """ 处理接收到的消息 """ if not self.alive: return func, run_async = self.message_configs.get_func(msg) if not func: return def process(): # noinspection PyBroadException try: ret = func(msg) if ret is not None: if isinstance(ret, (tuple, list)): self.core.send( msg=str(ret[0]), toUserName=msg.chat.user_name, mediaId=ret[1] ) else: self.core.send( msg=str(ret), toUserName=msg.chat.user_name ) except: logger.warning( 'An error occurred in registered function, ' 'use `Robot().start(debug=True)` to show detailed information') logger.debug(traceback.format_exc()) if run_async: Thread(target=process).start() else: process() def register( self, chats=None, msg_types=None, except_self=True, run_async=True, enabled=True ): """ 装饰器:用于注册消息配置 :param chats: 单个或列表形式的多个聊天对象或聊天类型,为空时匹配所有聊天对象 :param msg_types: 单个或列表形式的多个消息类型,为空时匹配所有消息类型 (SYSTEM 类消息除外) :param except_self: 排除自己在手机上发送的消息 :param run_async: 异步执行配置的函数,可提高响应速度 :param enabled: 当前配置的默认开启状态,可事后动态开启或关闭 """ def register(func): self.message_configs.append(MessageConfig( robot=self, func=func, chats=chats, msg_types=msg_types, except_self=except_self, run_async=run_async, enabled=enabled )) return func return register def start(self, block=True): """ 开始监听和处理消息 :param block: 是否堵塞线程,为 False 时将在新的线程中运行 """ def listen(): logger.info('{} Auto-reply started.'.format(self)) try: while self.alive: msg = Message(self.core.msgList.get(), self) if msg.type is not SYSTEM: self.messages.append(msg) self._process_message(msg) except KeyboardInterrupt: logger.info('KeyboardInterrupt received, ending...') self.alive = False if self.core.useHotReload: self.dump_login_status() logger.info('Bye.') if block: listen() else: t = Thread(target=listen, daemon=True) t.start()
27.22807
94
0.560997
10,171
0.94861
1,122
0.104645
3,286
0.306473
0
0
3,921
0.365697
8a2014bc56418a4e4967160efe3f9656c573b77f
1,432
py
Python
glue/__init__.py
HPLegion/glue
1843787ccb4de852dfe103ff58473da13faccf5f
[ "BSD-3-Clause" ]
550
2015-01-08T13:51:06.000Z
2022-03-31T11:54:47.000Z
glue/__init__.py
HPLegion/glue
1843787ccb4de852dfe103ff58473da13faccf5f
[ "BSD-3-Clause" ]
1,362
2015-01-03T19:15:52.000Z
2022-03-30T13:23:11.000Z
glue/__init__.py
HPLegion/glue
1843787ccb4de852dfe103ff58473da13faccf5f
[ "BSD-3-Clause" ]
142
2015-01-08T13:08:00.000Z
2022-03-18T13:25:57.000Z
# Set up configuration variables __all__ = ['custom_viewer', 'qglue', 'test'] import os import sys from pkg_resources import get_distribution, DistributionNotFound try: __version__ = get_distribution('glue-core').version except DistributionNotFound: __version__ = 'undefined' from ._mpl_backend import MatplotlibBackendSetter sys.meta_path.append(MatplotlibBackendSetter()) from glue.viewers.custom.helper import custom_viewer # Load user's configuration file from .config import load_configuration env = load_configuration() from .qglue import qglue from .main import load_plugins # noqa def test(no_optional_skip=False): from pytest import main root = os.path.abspath(os.path.dirname(__file__)) args = [root, '-x'] if no_optional_skip: args.append('--no-optional-skip') return main(args=args) from glue._settings_helpers import load_settings load_settings() # In PyQt 5.5+, PyQt overrides the default exception catching and fatally # crashes the Qt application without printing out any details about the error. # Below we revert the exception hook to the original Python one. Note that we # can't just do sys.excepthook = sys.__excepthook__ otherwise PyQt will detect # the default excepthook is in place and override it. def handle_exception(exc_type, exc_value, exc_traceback): sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook = handle_exception
26.036364
78
0.775838
0
0
0
0
0
0
0
0
503
0.351257
8a2036147565ecfe3e374843c7669120715a456c
93
py
Python
run.py
pran01/AlgoVision
40e85f3c55266f43ee103dfa0852a63af306a8d4
[ "MIT" ]
33
2020-10-05T01:04:55.000Z
2021-06-24T01:52:31.000Z
run.py
learning-zones/AlgoVision
9261e00ecb2540d8bb950d47d670bb6b2c69db0f
[ "MIT" ]
14
2020-10-07T03:15:12.000Z
2021-01-15T11:53:29.000Z
run.py
learning-zones/AlgoVision
9261e00ecb2540d8bb950d47d670bb6b2c69db0f
[ "MIT" ]
9
2020-10-05T07:16:45.000Z
2021-03-01T15:44:31.000Z
from algovision import app if(__name__=="__main__"): app.run(debug=True,host='0.0.0.0')
18.6
38
0.688172
0
0
0
0
0
0
0
0
19
0.204301
8a206c0ba5cec93f4c2890bee22ea35305190260
1,477
py
Python
readthedocs/settings/proxito/base.py
rffontenelle/readthedocs.org
a7a9072215551156b9ddc22280cc085944eaa4b0
[ "MIT" ]
null
null
null
readthedocs/settings/proxito/base.py
rffontenelle/readthedocs.org
a7a9072215551156b9ddc22280cc085944eaa4b0
[ "MIT" ]
null
null
null
readthedocs/settings/proxito/base.py
rffontenelle/readthedocs.org
a7a9072215551156b9ddc22280cc085944eaa4b0
[ "MIT" ]
null
null
null
""" Base settings for Proxito Some of these settings will eventually be backported into the main settings file, but currently we have them to be able to run the site with the old middleware for a staged rollout of the proxito code. """ class CommunityProxitoSettingsMixin: ROOT_URLCONF = 'readthedocs.proxito.urls' USE_SUBDOMAIN = True SECURE_REFERRER_POLICY = "no-referrer-when-downgrade" # Allow cookies from cross-site requests on subdomains for now. # As 'Lax' breaks when the page is embedded in an iframe. SESSION_COOKIE_SAMESITE = None @property def DATABASES(self): # This keeps connections to the DB alive, # which reduces latency with connecting to postgres dbs = getattr(super(), 'DATABASES', {}) for db in dbs: dbs[db]['CONN_MAX_AGE'] = 86400 return dbs @property def MIDDLEWARE(self): # noqa # Use our new middleware instead of the old one classes = super().MIDDLEWARE classes = list(classes) classes.append('readthedocs.proxito.middleware.ProxitoMiddleware') middleware_to_remove = ( 'csp.middleware.CSPMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) for mw in middleware_to_remove: if mw in classes: classes.remove(mw) else: log.warning('Failed to remove middleware: %s', mw) return classes
31.425532
81
0.65606
1,237
0.837508
0
0
893
0.604604
0
0
749
0.507109
8a20872ac762ad5db9d06e05df401ef72a6b24c6
69,998
py
Python
model_selection/tests/test_search.py
jessica-tu/jupyter
917e02bc29e0fa06bd8adb25fe5388ac381ec829
[ "PSF-2.0", "Apache-2.0", "BSD-3-Clause-No-Nuclear-License-2014", "MIT", "ECL-2.0", "BSD-3-Clause" ]
null
null
null
model_selection/tests/test_search.py
jessica-tu/jupyter
917e02bc29e0fa06bd8adb25fe5388ac381ec829
[ "PSF-2.0", "Apache-2.0", "BSD-3-Clause-No-Nuclear-License-2014", "MIT", "ECL-2.0", "BSD-3-Clause" ]
null
null
null
model_selection/tests/test_search.py
jessica-tu/jupyter
917e02bc29e0fa06bd8adb25fe5388ac381ec829
[ "PSF-2.0", "Apache-2.0", "BSD-3-Clause-No-Nuclear-License-2014", "MIT", "ECL-2.0", "BSD-3-Clause" ]
null
null
null
"""Test the search module""" from collections.abc import Iterable, Sized from io import StringIO from itertools import chain, product from functools import partial import pickle import sys from types import GeneratorType import re import numpy as np import scipy.sparse as sp import pytest from sklearn.utils.fixes import sp_version from sklearn.utils._testing import assert_raises from sklearn.utils._testing import assert_warns from sklearn.utils._testing import assert_warns_message from sklearn.utils._testing import assert_raise_message from sklearn.utils._testing import assert_array_equal from sklearn.utils._testing import assert_array_almost_equal from sklearn.utils._testing import assert_allclose from sklearn.utils._testing import assert_almost_equal from sklearn.utils._testing import ignore_warnings from sklearn.utils._mocking import CheckingClassifier, MockDataFrame from scipy.stats import bernoulli, expon, uniform from sklearn.base import BaseEstimator, ClassifierMixin from sklearn.base import clone from sklearn.exceptions import NotFittedError from sklearn.datasets import make_classification from sklearn.datasets import make_blobs from sklearn.datasets import make_multilabel_classification from sklearn.model_selection import fit_grid_point from sklearn.model_selection import train_test_split from sklearn.model_selection import KFold from sklearn.model_selection import StratifiedKFold from sklearn.model_selection import StratifiedShuffleSplit from sklearn.model_selection import LeaveOneGroupOut from sklearn.model_selection import LeavePGroupsOut from sklearn.model_selection import GroupKFold from sklearn.model_selection import GroupShuffleSplit from sklearn.model_selection import GridSearchCV from sklearn.model_selection import RandomizedSearchCV from sklearn.model_selection import ParameterGrid from sklearn.model_selection import ParameterSampler from sklearn.model_selection._search import BaseSearchCV from sklearn.model_selection._validation import FitFailedWarning from sklearn.svm import LinearSVC, SVC from sklearn.tree import DecisionTreeRegressor from sklearn.tree import DecisionTreeClassifier from sklearn.cluster import KMeans from sklearn.neighbors import KernelDensity from sklearn.neighbors import KNeighborsClassifier from sklearn.metrics import f1_score from sklearn.metrics import recall_score from sklearn.metrics import accuracy_score from sklearn.metrics import make_scorer from sklearn.metrics import roc_auc_score from sklearn.metrics.pairwise import euclidean_distances from sklearn.impute import SimpleImputer from sklearn.pipeline import Pipeline from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression from sklearn.experimental import enable_hist_gradient_boosting # noqa from sklearn.ensemble import HistGradientBoostingClassifier from sklearn.model_selection.tests.common import OneTimeSplitter # Neither of the following two estimators inherit from BaseEstimator, # to test hyperparameter search on user-defined classifiers. class MockClassifier: """Dummy classifier to test the parameter search algorithms""" def __init__(self, foo_param=0): self.foo_param = foo_param def fit(self, X, Y): assert len(X) == len(Y) self.classes_ = np.unique(Y) return self def predict(self, T): return T.shape[0] def transform(self, X): return X + self.foo_param def inverse_transform(self, X): return X - self.foo_param predict_proba = predict predict_log_proba = predict decision_function = predict def score(self, X=None, Y=None): if self.foo_param > 1: score = 1. else: score = 0. return score def get_params(self, deep=False): return {'foo_param': self.foo_param} def set_params(self, **params): self.foo_param = params['foo_param'] return self class LinearSVCNoScore(LinearSVC): """An LinearSVC classifier that has no score method.""" @property def score(self): raise AttributeError X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]]) y = np.array([1, 1, 2, 2]) def assert_grid_iter_equals_getitem(grid): assert list(grid) == [grid[i] for i in range(len(grid))] @pytest.mark.parametrize("klass", [ParameterGrid, partial(ParameterSampler, n_iter=10)]) @pytest.mark.parametrize( "input, error_type, error_message", [(0, TypeError, r'Parameter .* is not a dict or a list \(0\)'), ([{'foo': [0]}, 0], TypeError, r'Parameter .* is not a dict \(0\)'), ({'foo': 0}, TypeError, "Parameter.* value is not iterable .*" r"\(key='foo', value=0\)")] ) def test_validate_parameter_input(klass, input, error_type, error_message): with pytest.raises(error_type, match=error_message): klass(input) def test_parameter_grid(): # Test basic properties of ParameterGrid. params1 = {"foo": [1, 2, 3]} grid1 = ParameterGrid(params1) assert isinstance(grid1, Iterable) assert isinstance(grid1, Sized) assert len(grid1) == 3 assert_grid_iter_equals_getitem(grid1) params2 = {"foo": [4, 2], "bar": ["ham", "spam", "eggs"]} grid2 = ParameterGrid(params2) assert len(grid2) == 6 # loop to assert we can iterate over the grid multiple times for i in range(2): # tuple + chain transforms {"a": 1, "b": 2} to ("a", 1, "b", 2) points = set(tuple(chain(*(sorted(p.items())))) for p in grid2) assert (points == set(("bar", x, "foo", y) for x, y in product(params2["bar"], params2["foo"]))) assert_grid_iter_equals_getitem(grid2) # Special case: empty grid (useful to get default estimator settings) empty = ParameterGrid({}) assert len(empty) == 1 assert list(empty) == [{}] assert_grid_iter_equals_getitem(empty) assert_raises(IndexError, lambda: empty[1]) has_empty = ParameterGrid([{'C': [1, 10]}, {}, {'C': [.5]}]) assert len(has_empty) == 4 assert list(has_empty) == [{'C': 1}, {'C': 10}, {}, {'C': .5}] assert_grid_iter_equals_getitem(has_empty) def test_grid_search(): # Test that the best estimator contains the right value for foo_param clf = MockClassifier() grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, cv=3, verbose=3) # make sure it selects the smallest parameter in case of ties old_stdout = sys.stdout sys.stdout = StringIO() grid_search.fit(X, y) sys.stdout = old_stdout assert grid_search.best_estimator_.foo_param == 2 assert_array_equal(grid_search.cv_results_["param_foo_param"].data, [1, 2, 3]) # Smoke test the score etc: grid_search.score(X, y) grid_search.predict_proba(X) grid_search.decision_function(X) grid_search.transform(X) # Test exception handling on scoring grid_search.scoring = 'sklearn' assert_raises(ValueError, grid_search.fit, X, y) def test_grid_search_pipeline_steps(): # check that parameters that are estimators are cloned before fitting pipe = Pipeline([('regressor', LinearRegression())]) param_grid = {'regressor': [LinearRegression(), Ridge()]} grid_search = GridSearchCV(pipe, param_grid, cv=2) grid_search.fit(X, y) regressor_results = grid_search.cv_results_['param_regressor'] assert isinstance(regressor_results[0], LinearRegression) assert isinstance(regressor_results[1], Ridge) assert not hasattr(regressor_results[0], 'coef_') assert not hasattr(regressor_results[1], 'coef_') assert regressor_results[0] is not grid_search.best_estimator_ assert regressor_results[1] is not grid_search.best_estimator_ # check that we didn't modify the parameter grid that was passed assert not hasattr(param_grid['regressor'][0], 'coef_') assert not hasattr(param_grid['regressor'][1], 'coef_') @pytest.mark.parametrize("SearchCV", [GridSearchCV, RandomizedSearchCV]) def test_SearchCV_with_fit_params(SearchCV): X = np.arange(100).reshape(10, 10) y = np.array([0] * 5 + [1] * 5) clf = CheckingClassifier(expected_fit_params=['spam', 'eggs']) searcher = SearchCV( clf, {'foo_param': [1, 2, 3]}, cv=2, error_score="raise" ) # The CheckingClassifier generates an assertion error if # a parameter is missing or has length != len(X). err_msg = r"Expected fit parameter\(s\) \['eggs'\] not seen." with pytest.raises(AssertionError, match=err_msg): searcher.fit(X, y, spam=np.ones(10)) err_msg = "Fit parameter spam has length 1; expected" with pytest.raises(AssertionError, match=err_msg): searcher.fit(X, y, spam=np.ones(1), eggs=np.zeros(10)) searcher.fit(X, y, spam=np.ones(10), eggs=np.zeros(10)) @ignore_warnings def test_grid_search_no_score(): # Test grid-search on classifier that has no score function. clf = LinearSVC(random_state=0) X, y = make_blobs(random_state=0, centers=2) Cs = [.1, 1, 10] clf_no_score = LinearSVCNoScore(random_state=0) grid_search = GridSearchCV(clf, {'C': Cs}, scoring='accuracy') grid_search.fit(X, y) grid_search_no_score = GridSearchCV(clf_no_score, {'C': Cs}, scoring='accuracy') # smoketest grid search grid_search_no_score.fit(X, y) # check that best params are equal assert grid_search_no_score.best_params_ == grid_search.best_params_ # check that we can call score and that it gives the correct result assert grid_search.score(X, y) == grid_search_no_score.score(X, y) # giving no scoring function raises an error grid_search_no_score = GridSearchCV(clf_no_score, {'C': Cs}) assert_raise_message(TypeError, "no scoring", grid_search_no_score.fit, [[1]]) def test_grid_search_score_method(): X, y = make_classification(n_samples=100, n_classes=2, flip_y=.2, random_state=0) clf = LinearSVC(random_state=0) grid = {'C': [.1]} search_no_scoring = GridSearchCV(clf, grid, scoring=None).fit(X, y) search_accuracy = GridSearchCV(clf, grid, scoring='accuracy').fit(X, y) search_no_score_method_auc = GridSearchCV(LinearSVCNoScore(), grid, scoring='roc_auc' ).fit(X, y) search_auc = GridSearchCV(clf, grid, scoring='roc_auc').fit(X, y) # Check warning only occurs in situation where behavior changed: # estimator requires score method to compete with scoring parameter score_no_scoring = search_no_scoring.score(X, y) score_accuracy = search_accuracy.score(X, y) score_no_score_auc = search_no_score_method_auc.score(X, y) score_auc = search_auc.score(X, y) # ensure the test is sane assert score_auc < 1.0 assert score_accuracy < 1.0 assert score_auc != score_accuracy assert_almost_equal(score_accuracy, score_no_scoring) assert_almost_equal(score_auc, score_no_score_auc) def test_grid_search_groups(): # Check if ValueError (when groups is None) propagates to GridSearchCV # And also check if groups is correctly passed to the cv object rng = np.random.RandomState(0) X, y = make_classification(n_samples=15, n_classes=2, random_state=0) groups = rng.randint(0, 3, 15) clf = LinearSVC(random_state=0) grid = {'C': [1]} group_cvs = [LeaveOneGroupOut(), LeavePGroupsOut(2), GroupKFold(n_splits=3), GroupShuffleSplit()] for cv in group_cvs: gs = GridSearchCV(clf, grid, cv=cv) assert_raise_message(ValueError, "The 'groups' parameter should not be None.", gs.fit, X, y) gs.fit(X, y, groups=groups) non_group_cvs = [StratifiedKFold(), StratifiedShuffleSplit()] for cv in non_group_cvs: gs = GridSearchCV(clf, grid, cv=cv) # Should not raise an error gs.fit(X, y) def test_classes__property(): # Test that classes_ property matches best_estimator_.classes_ X = np.arange(100).reshape(10, 10) y = np.array([0] * 5 + [1] * 5) Cs = [.1, 1, 10] grid_search = GridSearchCV(LinearSVC(random_state=0), {'C': Cs}) grid_search.fit(X, y) assert_array_equal(grid_search.best_estimator_.classes_, grid_search.classes_) # Test that regressors do not have a classes_ attribute grid_search = GridSearchCV(Ridge(), {'alpha': [1.0, 2.0]}) grid_search.fit(X, y) assert not hasattr(grid_search, 'classes_') # Test that the grid searcher has no classes_ attribute before it's fit grid_search = GridSearchCV(LinearSVC(random_state=0), {'C': Cs}) assert not hasattr(grid_search, 'classes_') # Test that the grid searcher has no classes_ attribute without a refit grid_search = GridSearchCV(LinearSVC(random_state=0), {'C': Cs}, refit=False) grid_search.fit(X, y) assert not hasattr(grid_search, 'classes_') def test_trivial_cv_results_attr(): # Test search over a "grid" with only one point. clf = MockClassifier() grid_search = GridSearchCV(clf, {'foo_param': [1]}, cv=3) grid_search.fit(X, y) assert hasattr(grid_search, "cv_results_") random_search = RandomizedSearchCV(clf, {'foo_param': [0]}, n_iter=1, cv=3) random_search.fit(X, y) assert hasattr(grid_search, "cv_results_") def test_no_refit(): # Test that GSCV can be used for model selection alone without refitting clf = MockClassifier() for scoring in [None, ['accuracy', 'precision']]: grid_search = GridSearchCV( clf, {'foo_param': [1, 2, 3]}, refit=False, cv=3 ) grid_search.fit(X, y) assert not hasattr(grid_search, "best_estimator_") and \ hasattr(grid_search, "best_index_") and \ hasattr(grid_search, "best_params_") # Make sure the functions predict/transform etc raise meaningful # error messages for fn_name in ('predict', 'predict_proba', 'predict_log_proba', 'transform', 'inverse_transform'): assert_raise_message(NotFittedError, ('refit=False. %s is available only after ' 'refitting on the best parameters' % fn_name), getattr(grid_search, fn_name), X) # Test that an invalid refit param raises appropriate error messages for refit in ["", 5, True, 'recall', 'accuracy']: assert_raise_message(ValueError, "For multi-metric scoring, the " "parameter refit must be set to a scorer key", GridSearchCV(clf, {}, refit=refit, scoring={'acc': 'accuracy', 'prec': 'precision'} ).fit, X, y) def test_grid_search_error(): # Test that grid search will capture errors on data with different length X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0) clf = LinearSVC() cv = GridSearchCV(clf, {'C': [0.1, 1.0]}) assert_raises(ValueError, cv.fit, X_[:180], y_) def test_grid_search_one_grid_point(): X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0) param_dict = {"C": [1.0], "kernel": ["rbf"], "gamma": [0.1]} clf = SVC(gamma='auto') cv = GridSearchCV(clf, param_dict) cv.fit(X_, y_) clf = SVC(C=1.0, kernel="rbf", gamma=0.1) clf.fit(X_, y_) assert_array_equal(clf.dual_coef_, cv.best_estimator_.dual_coef_) def test_grid_search_when_param_grid_includes_range(): # Test that the best estimator contains the right value for foo_param clf = MockClassifier() grid_search = None grid_search = GridSearchCV(clf, {'foo_param': range(1, 4)}, cv=3) grid_search.fit(X, y) assert grid_search.best_estimator_.foo_param == 2 def test_grid_search_bad_param_grid(): param_dict = {"C": 1} clf = SVC(gamma='auto') assert_raise_message( ValueError, "Parameter grid for parameter (C) needs to" " be a list or numpy array, but got (<class 'int'>)." " Single values need to be wrapped in a list" " with one element.", GridSearchCV, clf, param_dict) param_dict = {"C": []} clf = SVC() assert_raise_message( ValueError, "Parameter values for parameter (C) need to be a non-empty sequence.", GridSearchCV, clf, param_dict) param_dict = {"C": "1,2,3"} clf = SVC(gamma='auto') assert_raise_message( ValueError, "Parameter grid for parameter (C) needs to" " be a list or numpy array, but got (<class 'str'>)." " Single values need to be wrapped in a list" " with one element.", GridSearchCV, clf, param_dict) param_dict = {"C": np.ones((3, 2))} clf = SVC() assert_raises(ValueError, GridSearchCV, clf, param_dict) def test_grid_search_sparse(): # Test that grid search works with both dense and sparse matrices X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0) clf = LinearSVC() cv = GridSearchCV(clf, {'C': [0.1, 1.0]}) cv.fit(X_[:180], y_[:180]) y_pred = cv.predict(X_[180:]) C = cv.best_estimator_.C X_ = sp.csr_matrix(X_) clf = LinearSVC() cv = GridSearchCV(clf, {'C': [0.1, 1.0]}) cv.fit(X_[:180].tocoo(), y_[:180]) y_pred2 = cv.predict(X_[180:]) C2 = cv.best_estimator_.C assert np.mean(y_pred == y_pred2) >= .9 assert C == C2 def test_grid_search_sparse_scoring(): X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0) clf = LinearSVC() cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, scoring="f1") cv.fit(X_[:180], y_[:180]) y_pred = cv.predict(X_[180:]) C = cv.best_estimator_.C X_ = sp.csr_matrix(X_) clf = LinearSVC() cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, scoring="f1") cv.fit(X_[:180], y_[:180]) y_pred2 = cv.predict(X_[180:]) C2 = cv.best_estimator_.C assert_array_equal(y_pred, y_pred2) assert C == C2 # Smoke test the score # np.testing.assert_allclose(f1_score(cv.predict(X_[:180]), y[:180]), # cv.score(X_[:180], y[:180])) # test loss where greater is worse def f1_loss(y_true_, y_pred_): return -f1_score(y_true_, y_pred_) F1Loss = make_scorer(f1_loss, greater_is_better=False) cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, scoring=F1Loss) cv.fit(X_[:180], y_[:180]) y_pred3 = cv.predict(X_[180:]) C3 = cv.best_estimator_.C assert C == C3 assert_array_equal(y_pred, y_pred3) def test_grid_search_precomputed_kernel(): # Test that grid search works when the input features are given in the # form of a precomputed kernel matrix X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0) # compute the training kernel matrix corresponding to the linear kernel K_train = np.dot(X_[:180], X_[:180].T) y_train = y_[:180] clf = SVC(kernel='precomputed') cv = GridSearchCV(clf, {'C': [0.1, 1.0]}) cv.fit(K_train, y_train) assert cv.best_score_ >= 0 # compute the test kernel matrix K_test = np.dot(X_[180:], X_[:180].T) y_test = y_[180:] y_pred = cv.predict(K_test) assert np.mean(y_pred == y_test) >= 0 # test error is raised when the precomputed kernel is not array-like # or sparse assert_raises(ValueError, cv.fit, K_train.tolist(), y_train) def test_grid_search_precomputed_kernel_error_nonsquare(): # Test that grid search returns an error with a non-square precomputed # training kernel matrix K_train = np.zeros((10, 20)) y_train = np.ones((10, )) clf = SVC(kernel='precomputed') cv = GridSearchCV(clf, {'C': [0.1, 1.0]}) assert_raises(ValueError, cv.fit, K_train, y_train) class BrokenClassifier(BaseEstimator): """Broken classifier that cannot be fit twice""" def __init__(self, parameter=None): self.parameter = parameter def fit(self, X, y): assert not hasattr(self, 'has_been_fit_') self.has_been_fit_ = True def predict(self, X): return np.zeros(X.shape[0]) @ignore_warnings def test_refit(): # Regression test for bug in refitting # Simulates re-fitting a broken estimator; this used to break with # sparse SVMs. X = np.arange(100).reshape(10, 10) y = np.array([0] * 5 + [1] * 5) clf = GridSearchCV(BrokenClassifier(), [{'parameter': [0, 1]}], scoring="precision", refit=True) clf.fit(X, y) def test_refit_callable(): """ Test refit=callable, which adds flexibility in identifying the "best" estimator. """ def refit_callable(cv_results): """ A dummy function tests `refit=callable` interface. Return the index of a model that has the least `mean_test_score`. """ # Fit a dummy clf with `refit=True` to get a list of keys in # clf.cv_results_. X, y = make_classification(n_samples=100, n_features=4, random_state=42) clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.01, 0.1, 1]}, scoring='precision', refit=True) clf.fit(X, y) # Ensure that `best_index_ != 0` for this dummy clf assert clf.best_index_ != 0 # Assert every key matches those in `cv_results` for key in clf.cv_results_.keys(): assert key in cv_results return cv_results['mean_test_score'].argmin() X, y = make_classification(n_samples=100, n_features=4, random_state=42) clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.01, 0.1, 1]}, scoring='precision', refit=refit_callable) clf.fit(X, y) assert clf.best_index_ == 0 # Ensure `best_score_` is disabled when using `refit=callable` assert not hasattr(clf, 'best_score_') def test_refit_callable_invalid_type(): """ Test implementation catches the errors when 'best_index_' returns an invalid result. """ def refit_callable_invalid_type(cv_results): """ A dummy function tests when returned 'best_index_' is not integer. """ return None X, y = make_classification(n_samples=100, n_features=4, random_state=42) clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.1, 1]}, scoring='precision', refit=refit_callable_invalid_type) with pytest.raises(TypeError, match='best_index_ returned is not an integer'): clf.fit(X, y) @pytest.mark.parametrize('out_bound_value', [-1, 2]) @pytest.mark.parametrize('search_cv', [RandomizedSearchCV, GridSearchCV]) def test_refit_callable_out_bound(out_bound_value, search_cv): """ Test implementation catches the errors when 'best_index_' returns an out of bound result. """ def refit_callable_out_bound(cv_results): """ A dummy function tests when returned 'best_index_' is out of bounds. """ return out_bound_value X, y = make_classification(n_samples=100, n_features=4, random_state=42) clf = search_cv(LinearSVC(random_state=42), {'C': [0.1, 1]}, scoring='precision', refit=refit_callable_out_bound) with pytest.raises(IndexError, match='best_index_ index out of range'): clf.fit(X, y) def test_refit_callable_multi_metric(): """ Test refit=callable in multiple metric evaluation setting """ def refit_callable(cv_results): """ A dummy function tests `refit=callable` interface. Return the index of a model that has the least `mean_test_prec`. """ assert 'mean_test_prec' in cv_results return cv_results['mean_test_prec'].argmin() X, y = make_classification(n_samples=100, n_features=4, random_state=42) scoring = {'Accuracy': make_scorer(accuracy_score), 'prec': 'precision'} clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.01, 0.1, 1]}, scoring=scoring, refit=refit_callable) clf.fit(X, y) assert clf.best_index_ == 0 # Ensure `best_score_` is disabled when using `refit=callable` assert not hasattr(clf, 'best_score_') def test_gridsearch_nd(): # Pass X as list in GridSearchCV X_4d = np.arange(10 * 5 * 3 * 2).reshape(10, 5, 3, 2) y_3d = np.arange(10 * 7 * 11).reshape(10, 7, 11) check_X = lambda x: x.shape[1:] == (5, 3, 2) check_y = lambda x: x.shape[1:] == (7, 11) clf = CheckingClassifier( check_X=check_X, check_y=check_y, methods_to_check=["fit"], ) grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}) grid_search.fit(X_4d, y_3d).score(X, y) assert hasattr(grid_search, "cv_results_") def test_X_as_list(): # Pass X as list in GridSearchCV X = np.arange(100).reshape(10, 10) y = np.array([0] * 5 + [1] * 5) clf = CheckingClassifier( check_X=lambda x: isinstance(x, list), methods_to_check=["fit"], ) cv = KFold(n_splits=3) grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, cv=cv) grid_search.fit(X.tolist(), y).score(X, y) assert hasattr(grid_search, "cv_results_") def test_y_as_list(): # Pass y as list in GridSearchCV X = np.arange(100).reshape(10, 10) y = np.array([0] * 5 + [1] * 5) clf = CheckingClassifier( check_y=lambda x: isinstance(x, list), methods_to_check=["fit"], ) cv = KFold(n_splits=3) grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, cv=cv) grid_search.fit(X, y.tolist()).score(X, y) assert hasattr(grid_search, "cv_results_") @ignore_warnings def test_pandas_input(): # check cross_val_score doesn't destroy pandas dataframe types = [(MockDataFrame, MockDataFrame)] try: from pandas import Series, DataFrame types.append((DataFrame, Series)) except ImportError: pass X = np.arange(100).reshape(10, 10) y = np.array([0] * 5 + [1] * 5) for InputFeatureType, TargetType in types: # X dataframe, y series X_df, y_ser = InputFeatureType(X), TargetType(y) def check_df(x): return isinstance(x, InputFeatureType) def check_series(x): return isinstance(x, TargetType) clf = CheckingClassifier(check_X=check_df, check_y=check_series) grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}) grid_search.fit(X_df, y_ser).score(X_df, y_ser) grid_search.predict(X_df) assert hasattr(grid_search, "cv_results_") def test_unsupervised_grid_search(): # test grid-search with unsupervised estimator X, y = make_blobs(n_samples=50, random_state=0) km = KMeans(random_state=0, init="random", n_init=1) # Multi-metric evaluation unsupervised scoring = ['adjusted_rand_score', 'fowlkes_mallows_score'] for refit in ['adjusted_rand_score', 'fowlkes_mallows_score']: grid_search = GridSearchCV(km, param_grid=dict(n_clusters=[2, 3, 4]), scoring=scoring, refit=refit) grid_search.fit(X, y) # Both ARI and FMS can find the right number :) assert grid_search.best_params_["n_clusters"] == 3 # Single metric evaluation unsupervised grid_search = GridSearchCV(km, param_grid=dict(n_clusters=[2, 3, 4]), scoring='fowlkes_mallows_score') grid_search.fit(X, y) assert grid_search.best_params_["n_clusters"] == 3 # Now without a score, and without y grid_search = GridSearchCV(km, param_grid=dict(n_clusters=[2, 3, 4])) grid_search.fit(X) assert grid_search.best_params_["n_clusters"] == 4 def test_gridsearch_no_predict(): # test grid-search with an estimator without predict. # slight duplication of a test from KDE def custom_scoring(estimator, X): return 42 if estimator.bandwidth == .1 else 0 X, _ = make_blobs(cluster_std=.1, random_state=1, centers=[[0, 1], [1, 0], [0, 0]]) search = GridSearchCV(KernelDensity(), param_grid=dict(bandwidth=[.01, .1, 1]), scoring=custom_scoring) search.fit(X) assert search.best_params_['bandwidth'] == .1 assert search.best_score_ == 42 def test_param_sampler(): # test basic properties of param sampler param_distributions = {"kernel": ["rbf", "linear"], "C": uniform(0, 1)} sampler = ParameterSampler(param_distributions=param_distributions, n_iter=10, random_state=0) samples = [x for x in sampler] assert len(samples) == 10 for sample in samples: assert sample["kernel"] in ["rbf", "linear"] assert 0 <= sample["C"] <= 1 # test that repeated calls yield identical parameters param_distributions = {"C": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]} sampler = ParameterSampler(param_distributions=param_distributions, n_iter=3, random_state=0) assert [x for x in sampler] == [x for x in sampler] if sp_version >= (0, 16): param_distributions = {"C": uniform(0, 1)} sampler = ParameterSampler(param_distributions=param_distributions, n_iter=10, random_state=0) assert [x for x in sampler] == [x for x in sampler] def check_cv_results_array_types(search, param_keys, score_keys): # Check if the search `cv_results`'s array are of correct types cv_results = search.cv_results_ assert all(isinstance(cv_results[param], np.ma.MaskedArray) for param in param_keys) assert all(cv_results[key].dtype == object for key in param_keys) assert not any(isinstance(cv_results[key], np.ma.MaskedArray) for key in score_keys) assert all(cv_results[key].dtype == np.float64 for key in score_keys if not key.startswith('rank')) scorer_keys = search.scorer_.keys() if search.multimetric_ else ['score'] for key in scorer_keys: assert cv_results['rank_test_%s' % key].dtype == np.int32 def check_cv_results_keys(cv_results, param_keys, score_keys, n_cand): # Test the search.cv_results_ contains all the required results assert_array_equal(sorted(cv_results.keys()), sorted(param_keys + score_keys + ('params',))) assert all(cv_results[key].shape == (n_cand,) for key in param_keys + score_keys) def test_grid_search_cv_results(): X, y = make_classification(n_samples=50, n_features=4, random_state=42) n_splits = 3 n_grid_points = 6 params = [dict(kernel=['rbf', ], C=[1, 10], gamma=[0.1, 1]), dict(kernel=['poly', ], degree=[1, 2])] param_keys = ('param_C', 'param_degree', 'param_gamma', 'param_kernel') score_keys = ('mean_test_score', 'mean_train_score', 'rank_test_score', 'split0_test_score', 'split1_test_score', 'split2_test_score', 'split0_train_score', 'split1_train_score', 'split2_train_score', 'std_test_score', 'std_train_score', 'mean_fit_time', 'std_fit_time', 'mean_score_time', 'std_score_time') n_candidates = n_grid_points search = GridSearchCV(SVC(), cv=n_splits, param_grid=params, return_train_score=True) search.fit(X, y) cv_results = search.cv_results_ # Check if score and timing are reasonable assert all(cv_results['rank_test_score'] >= 1) assert (all(cv_results[k] >= 0) for k in score_keys if k != 'rank_test_score') assert (all(cv_results[k] <= 1) for k in score_keys if 'time' not in k and k != 'rank_test_score') # Check cv_results structure check_cv_results_array_types(search, param_keys, score_keys) check_cv_results_keys(cv_results, param_keys, score_keys, n_candidates) # Check masking cv_results = search.cv_results_ n_candidates = len(search.cv_results_['params']) assert all((cv_results['param_C'].mask[i] and cv_results['param_gamma'].mask[i] and not cv_results['param_degree'].mask[i]) for i in range(n_candidates) if cv_results['param_kernel'][i] == 'linear') assert all((not cv_results['param_C'].mask[i] and not cv_results['param_gamma'].mask[i] and cv_results['param_degree'].mask[i]) for i in range(n_candidates) if cv_results['param_kernel'][i] == 'rbf') def test_random_search_cv_results(): X, y = make_classification(n_samples=50, n_features=4, random_state=42) n_splits = 3 n_search_iter = 30 params = [{'kernel': ['rbf'], 'C': expon(scale=10), 'gamma': expon(scale=0.1)}, {'kernel': ['poly'], 'degree': [2, 3]}] param_keys = ('param_C', 'param_degree', 'param_gamma', 'param_kernel') score_keys = ('mean_test_score', 'mean_train_score', 'rank_test_score', 'split0_test_score', 'split1_test_score', 'split2_test_score', 'split0_train_score', 'split1_train_score', 'split2_train_score', 'std_test_score', 'std_train_score', 'mean_fit_time', 'std_fit_time', 'mean_score_time', 'std_score_time') n_cand = n_search_iter search = RandomizedSearchCV(SVC(), n_iter=n_search_iter, cv=n_splits, param_distributions=params, return_train_score=True) search.fit(X, y) cv_results = search.cv_results_ # Check results structure check_cv_results_array_types(search, param_keys, score_keys) check_cv_results_keys(cv_results, param_keys, score_keys, n_cand) n_candidates = len(search.cv_results_['params']) assert all((cv_results['param_C'].mask[i] and cv_results['param_gamma'].mask[i] and not cv_results['param_degree'].mask[i]) for i in range(n_candidates) if cv_results['param_kernel'][i] == 'linear') assert all((not cv_results['param_C'].mask[i] and not cv_results['param_gamma'].mask[i] and cv_results['param_degree'].mask[i]) for i in range(n_candidates) if cv_results['param_kernel'][i] == 'rbf') @pytest.mark.parametrize( "SearchCV, specialized_params", [(GridSearchCV, {'param_grid': {'C': [1, 10]}}), (RandomizedSearchCV, {'param_distributions': {'C': [1, 10]}, 'n_iter': 2})] ) def test_search_default_iid(SearchCV, specialized_params): # Test the IID parameter TODO: Clearly this test does something else??? # noise-free simple 2d-data X, y = make_blobs(centers=[[0, 0], [1, 0], [0, 1], [1, 1]], random_state=0, cluster_std=0.1, shuffle=False, n_samples=80) # split dataset into two folds that are not iid # first one contains data of all 4 blobs, second only from two. mask = np.ones(X.shape[0], dtype=np.bool) mask[np.where(y == 1)[0][::2]] = 0 mask[np.where(y == 2)[0][::2]] = 0 # this leads to perfect classification on one fold and a score of 1/3 on # the other # create "cv" for splits cv = [[mask, ~mask], [~mask, mask]] common_params = {'estimator': SVC(), 'cv': cv, 'return_train_score': True} search = SearchCV(**common_params, **specialized_params) search.fit(X, y) test_cv_scores = np.array( [search.cv_results_['split%d_test_score' % s][0] for s in range(search.n_splits_)] ) test_mean = search.cv_results_['mean_test_score'][0] test_std = search.cv_results_['std_test_score'][0] train_cv_scores = np.array( [search.cv_results_['split%d_train_score' % s][0] for s in range(search.n_splits_)] ) train_mean = search.cv_results_['mean_train_score'][0] train_std = search.cv_results_['std_train_score'][0] assert search.cv_results_['param_C'][0] == 1 # scores are the same as above assert_allclose(test_cv_scores, [1, 1. / 3.]) assert_allclose(train_cv_scores, [1, 1]) # Unweighted mean/std is used assert test_mean == pytest.approx(np.mean(test_cv_scores)) assert test_std == pytest.approx(np.std(test_cv_scores)) # For the train scores, we do not take a weighted mean irrespective of # i.i.d. or not assert train_mean == pytest.approx(1) assert train_std == pytest.approx(0) def test_grid_search_cv_results_multimetric(): X, y = make_classification(n_samples=50, n_features=4, random_state=42) n_splits = 3 params = [dict(kernel=['rbf', ], C=[1, 10], gamma=[0.1, 1]), dict(kernel=['poly', ], degree=[1, 2])] grid_searches = [] for scoring in ({'accuracy': make_scorer(accuracy_score), 'recall': make_scorer(recall_score)}, 'accuracy', 'recall'): grid_search = GridSearchCV(SVC(), cv=n_splits, param_grid=params, scoring=scoring, refit=False) grid_search.fit(X, y) grid_searches.append(grid_search) compare_cv_results_multimetric_with_single(*grid_searches) def test_random_search_cv_results_multimetric(): X, y = make_classification(n_samples=50, n_features=4, random_state=42) n_splits = 3 n_search_iter = 30 # Scipy 0.12's stats dists do not accept seed, hence we use param grid params = dict(C=np.logspace(-4, 1, 3), gamma=np.logspace(-5, 0, 3, base=0.1)) for refit in (True, False): random_searches = [] for scoring in (('accuracy', 'recall'), 'accuracy', 'recall'): # If True, for multi-metric pass refit='accuracy' if refit: probability = True refit = 'accuracy' if isinstance(scoring, tuple) else refit else: probability = False clf = SVC(probability=probability, random_state=42) random_search = RandomizedSearchCV(clf, n_iter=n_search_iter, cv=n_splits, param_distributions=params, scoring=scoring, refit=refit, random_state=0) random_search.fit(X, y) random_searches.append(random_search) compare_cv_results_multimetric_with_single(*random_searches) compare_refit_methods_when_refit_with_acc( random_searches[0], random_searches[1], refit) def compare_cv_results_multimetric_with_single( search_multi, search_acc, search_rec): """Compare multi-metric cv_results with the ensemble of multiple single metric cv_results from single metric grid/random search""" assert search_multi.multimetric_ assert_array_equal(sorted(search_multi.scorer_), ('accuracy', 'recall')) cv_results_multi = search_multi.cv_results_ cv_results_acc_rec = {re.sub('_score$', '_accuracy', k): v for k, v in search_acc.cv_results_.items()} cv_results_acc_rec.update({re.sub('_score$', '_recall', k): v for k, v in search_rec.cv_results_.items()}) # Check if score and timing are reasonable, also checks if the keys # are present assert all((np.all(cv_results_multi[k] <= 1) for k in ( 'mean_score_time', 'std_score_time', 'mean_fit_time', 'std_fit_time'))) # Compare the keys, other than time keys, among multi-metric and # single metric grid search results. np.testing.assert_equal performs a # deep nested comparison of the two cv_results dicts np.testing.assert_equal({k: v for k, v in cv_results_multi.items() if not k.endswith('_time')}, {k: v for k, v in cv_results_acc_rec.items() if not k.endswith('_time')}) def compare_refit_methods_when_refit_with_acc(search_multi, search_acc, refit): """Compare refit multi-metric search methods with single metric methods""" assert search_acc.refit == refit if refit: assert search_multi.refit == 'accuracy' else: assert not search_multi.refit return # search cannot predict/score without refit X, y = make_blobs(n_samples=100, n_features=4, random_state=42) for method in ('predict', 'predict_proba', 'predict_log_proba'): assert_almost_equal(getattr(search_multi, method)(X), getattr(search_acc, method)(X)) assert_almost_equal(search_multi.score(X, y), search_acc.score(X, y)) for key in ('best_index_', 'best_score_', 'best_params_'): assert getattr(search_multi, key) == getattr(search_acc, key) def test_search_cv_results_rank_tie_breaking(): X, y = make_blobs(n_samples=50, random_state=42) # The two C values are close enough to give similar models # which would result in a tie of their mean cv-scores param_grid = {'C': [1, 1.001, 0.001]} grid_search = GridSearchCV(SVC(), param_grid=param_grid, return_train_score=True) random_search = RandomizedSearchCV(SVC(), n_iter=3, param_distributions=param_grid, return_train_score=True) for search in (grid_search, random_search): search.fit(X, y) cv_results = search.cv_results_ # Check tie breaking strategy - # Check that there is a tie in the mean scores between # candidates 1 and 2 alone assert_almost_equal(cv_results['mean_test_score'][0], cv_results['mean_test_score'][1]) assert_almost_equal(cv_results['mean_train_score'][0], cv_results['mean_train_score'][1]) assert not np.allclose(cv_results['mean_test_score'][1], cv_results['mean_test_score'][2]) assert not np.allclose(cv_results['mean_train_score'][1], cv_results['mean_train_score'][2]) # 'min' rank should be assigned to the tied candidates assert_almost_equal(search.cv_results_['rank_test_score'], [1, 1, 3]) def test_search_cv_results_none_param(): X, y = [[1], [2], [3], [4], [5]], [0, 0, 0, 0, 1] estimators = (DecisionTreeRegressor(), DecisionTreeClassifier()) est_parameters = {"random_state": [0, None]} cv = KFold() for est in estimators: grid_search = GridSearchCV(est, est_parameters, cv=cv, ).fit(X, y) assert_array_equal(grid_search.cv_results_['param_random_state'], [0, None]) @ignore_warnings() def test_search_cv_timing(): svc = LinearSVC(random_state=0) X = [[1, ], [2, ], [3, ], [4, ]] y = [0, 1, 1, 0] gs = GridSearchCV(svc, {'C': [0, 1]}, cv=2, error_score=0) rs = RandomizedSearchCV(svc, {'C': [0, 1]}, cv=2, error_score=0, n_iter=2) for search in (gs, rs): search.fit(X, y) for key in ['mean_fit_time', 'std_fit_time']: # NOTE The precision of time.time in windows is not high # enough for the fit/score times to be non-zero for trivial X and y assert np.all(search.cv_results_[key] >= 0) assert np.all(search.cv_results_[key] < 1) for key in ['mean_score_time', 'std_score_time']: assert search.cv_results_[key][1] >= 0 assert search.cv_results_[key][0] == 0.0 assert np.all(search.cv_results_[key] < 1) assert hasattr(search, "refit_time_") assert isinstance(search.refit_time_, float) assert search.refit_time_ >= 0 def test_grid_search_correct_score_results(): # test that correct scores are used n_splits = 3 clf = LinearSVC(random_state=0) X, y = make_blobs(random_state=0, centers=2) Cs = [.1, 1, 10] for score in ['f1', 'roc_auc']: grid_search = GridSearchCV(clf, {'C': Cs}, scoring=score, cv=n_splits) cv_results = grid_search.fit(X, y).cv_results_ # Test scorer names result_keys = list(cv_results.keys()) expected_keys = (("mean_test_score", "rank_test_score") + tuple("split%d_test_score" % cv_i for cv_i in range(n_splits))) assert all(np.in1d(expected_keys, result_keys)) cv = StratifiedKFold(n_splits=n_splits) n_splits = grid_search.n_splits_ for candidate_i, C in enumerate(Cs): clf.set_params(C=C) cv_scores = np.array( list(grid_search.cv_results_['split%d_test_score' % s][candidate_i] for s in range(n_splits))) for i, (train, test) in enumerate(cv.split(X, y)): clf.fit(X[train], y[train]) if score == "f1": correct_score = f1_score(y[test], clf.predict(X[test])) elif score == "roc_auc": dec = clf.decision_function(X[test]) correct_score = roc_auc_score(y[test], dec) assert_almost_equal(correct_score, cv_scores[i]) # FIXME remove test_fit_grid_point as the function will be removed on 0.25 @ignore_warnings(category=FutureWarning) def test_fit_grid_point(): X, y = make_classification(random_state=0) cv = StratifiedKFold() svc = LinearSVC(random_state=0) scorer = make_scorer(accuracy_score) for params in ({'C': 0.1}, {'C': 0.01}, {'C': 0.001}): for train, test in cv.split(X, y): this_scores, this_params, n_test_samples = fit_grid_point( X, y, clone(svc), params, train, test, scorer, verbose=False) est = clone(svc).set_params(**params) est.fit(X[train], y[train]) expected_score = scorer(est, X[test], y[test]) # Test the return values of fit_grid_point assert_almost_equal(this_scores, expected_score) assert params == this_params assert n_test_samples == test.size # Should raise an error upon multimetric scorer assert_raise_message(ValueError, "For evaluating multiple scores, use " "sklearn.model_selection.cross_validate instead.", fit_grid_point, X, y, svc, params, train, test, {'score': scorer}, verbose=True) # FIXME remove test_fit_grid_point_deprecated as # fit_grid_point will be removed on 0.25 def test_fit_grid_point_deprecated(): X, y = make_classification(random_state=0) svc = LinearSVC(random_state=0) scorer = make_scorer(accuracy_score) msg = ("fit_grid_point is deprecated in version 0.23 " "and will be removed in version 0.25") params = {'C': 0.1} train, test = next(StratifiedKFold().split(X, y)) with pytest.warns(FutureWarning, match=msg): fit_grid_point(X, y, svc, params, train, test, scorer, verbose=False) def test_pickle(): # Test that a fit search can be pickled clf = MockClassifier() grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, refit=True, cv=3) grid_search.fit(X, y) grid_search_pickled = pickle.loads(pickle.dumps(grid_search)) assert_array_almost_equal(grid_search.predict(X), grid_search_pickled.predict(X)) random_search = RandomizedSearchCV(clf, {'foo_param': [1, 2, 3]}, refit=True, n_iter=3, cv=3) random_search.fit(X, y) random_search_pickled = pickle.loads(pickle.dumps(random_search)) assert_array_almost_equal(random_search.predict(X), random_search_pickled.predict(X)) def test_grid_search_with_multioutput_data(): # Test search with multi-output estimator X, y = make_multilabel_classification(return_indicator=True, random_state=0) est_parameters = {"max_depth": [1, 2, 3, 4]} cv = KFold() estimators = [DecisionTreeRegressor(random_state=0), DecisionTreeClassifier(random_state=0)] # Test with grid search cv for est in estimators: grid_search = GridSearchCV(est, est_parameters, cv=cv) grid_search.fit(X, y) res_params = grid_search.cv_results_['params'] for cand_i in range(len(res_params)): est.set_params(**res_params[cand_i]) for i, (train, test) in enumerate(cv.split(X, y)): est.fit(X[train], y[train]) correct_score = est.score(X[test], y[test]) assert_almost_equal( correct_score, grid_search.cv_results_['split%d_test_score' % i][cand_i]) # Test with a randomized search for est in estimators: random_search = RandomizedSearchCV(est, est_parameters, cv=cv, n_iter=3) random_search.fit(X, y) res_params = random_search.cv_results_['params'] for cand_i in range(len(res_params)): est.set_params(**res_params[cand_i]) for i, (train, test) in enumerate(cv.split(X, y)): est.fit(X[train], y[train]) correct_score = est.score(X[test], y[test]) assert_almost_equal( correct_score, random_search.cv_results_['split%d_test_score' % i][cand_i]) def test_predict_proba_disabled(): # Test predict_proba when disabled on estimator. X = np.arange(20).reshape(5, -1) y = [0, 0, 1, 1, 1] clf = SVC(probability=False) gs = GridSearchCV(clf, {}, cv=2).fit(X, y) assert not hasattr(gs, "predict_proba") def test_grid_search_allows_nans(): # Test GridSearchCV with SimpleImputer X = np.arange(20, dtype=np.float64).reshape(5, -1) X[2, :] = np.nan y = [0, 0, 1, 1, 1] p = Pipeline([ ('imputer', SimpleImputer(strategy='mean', missing_values=np.nan)), ('classifier', MockClassifier()), ]) GridSearchCV(p, {'classifier__foo_param': [1, 2, 3]}, cv=2).fit(X, y) class FailingClassifier(BaseEstimator): """Classifier that raises a ValueError on fit()""" FAILING_PARAMETER = 2 def __init__(self, parameter=None): self.parameter = parameter def fit(self, X, y=None): if self.parameter == FailingClassifier.FAILING_PARAMETER: raise ValueError("Failing classifier failed as required") def predict(self, X): return np.zeros(X.shape[0]) def score(self, X=None, Y=None): return 0. def test_grid_search_failing_classifier(): # GridSearchCV with on_error != 'raise' # Ensures that a warning is raised and score reset where appropriate. X, y = make_classification(n_samples=20, n_features=10, random_state=0) clf = FailingClassifier() # refit=False because we only want to check that errors caused by fits # to individual folds will be caught and warnings raised instead. If # refit was done, then an exception would be raised on refit and not # caught by grid_search (expected behavior), and this would cause an # error in this test. gs = GridSearchCV(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy', refit=False, error_score=0.0) assert_warns(FitFailedWarning, gs.fit, X, y) n_candidates = len(gs.cv_results_['params']) # Ensure that grid scores were set to zero as required for those fits # that are expected to fail. def get_cand_scores(i): return np.array(list(gs.cv_results_['split%d_test_score' % s][i] for s in range(gs.n_splits_))) assert all((np.all(get_cand_scores(cand_i) == 0.0) for cand_i in range(n_candidates) if gs.cv_results_['param_parameter'][cand_i] == FailingClassifier.FAILING_PARAMETER)) gs = GridSearchCV(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy', refit=False, error_score=float('nan')) assert_warns(FitFailedWarning, gs.fit, X, y) n_candidates = len(gs.cv_results_['params']) assert all(np.all(np.isnan(get_cand_scores(cand_i))) for cand_i in range(n_candidates) if gs.cv_results_['param_parameter'][cand_i] == FailingClassifier.FAILING_PARAMETER) ranks = gs.cv_results_['rank_test_score'] # Check that succeeded estimators have lower ranks assert ranks[0] <= 2 and ranks[1] <= 2 # Check that failed estimator has the highest rank assert ranks[clf.FAILING_PARAMETER] == 3 assert gs.best_index_ != clf.FAILING_PARAMETER def test_grid_search_failing_classifier_raise(): # GridSearchCV with on_error == 'raise' raises the error X, y = make_classification(n_samples=20, n_features=10, random_state=0) clf = FailingClassifier() # refit=False because we want to test the behaviour of the grid search part gs = GridSearchCV(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy', refit=False, error_score='raise') # FailingClassifier issues a ValueError so this is what we look for. assert_raises(ValueError, gs.fit, X, y) def test_parameters_sampler_replacement(): # raise warning if n_iter is bigger than total parameter space params = [{'first': [0, 1], 'second': ['a', 'b', 'c']}, {'third': ['two', 'values']}] sampler = ParameterSampler(params, n_iter=9) n_iter = 9 grid_size = 8 expected_warning = ('The total space of parameters %d is smaller ' 'than n_iter=%d. Running %d iterations. For ' 'exhaustive searches, use GridSearchCV.' % (grid_size, n_iter, grid_size)) assert_warns_message(UserWarning, expected_warning, list, sampler) # degenerates to GridSearchCV if n_iter the same as grid_size sampler = ParameterSampler(params, n_iter=8) samples = list(sampler) assert len(samples) == 8 for values in ParameterGrid(params): assert values in samples # test sampling without replacement in a large grid params = {'a': range(10), 'b': range(10), 'c': range(10)} sampler = ParameterSampler(params, n_iter=99, random_state=42) samples = list(sampler) assert len(samples) == 99 hashable_samples = ["a%db%dc%d" % (p['a'], p['b'], p['c']) for p in samples] assert len(set(hashable_samples)) == 99 # doesn't go into infinite loops params_distribution = {'first': bernoulli(.5), 'second': ['a', 'b', 'c']} sampler = ParameterSampler(params_distribution, n_iter=7) samples = list(sampler) assert len(samples) == 7 def test_stochastic_gradient_loss_param(): # Make sure the predict_proba works when loss is specified # as one of the parameters in the param_grid. param_grid = { 'loss': ['log'], } X = np.arange(24).reshape(6, -1) y = [0, 0, 0, 1, 1, 1] clf = GridSearchCV(estimator=SGDClassifier(loss='hinge'), param_grid=param_grid, cv=3) # When the estimator is not fitted, `predict_proba` is not available as the # loss is 'hinge'. assert not hasattr(clf, "predict_proba") clf.fit(X, y) clf.predict_proba(X) clf.predict_log_proba(X) # Make sure `predict_proba` is not available when setting loss=['hinge'] # in param_grid param_grid = { 'loss': ['hinge'], } clf = GridSearchCV(estimator=SGDClassifier(loss='hinge'), param_grid=param_grid, cv=3) assert not hasattr(clf, "predict_proba") clf.fit(X, y) assert not hasattr(clf, "predict_proba") def test_search_train_scores_set_to_false(): X = np.arange(6).reshape(6, -1) y = [0, 0, 0, 1, 1, 1] clf = LinearSVC(random_state=0) gs = GridSearchCV(clf, param_grid={'C': [0.1, 0.2]}, cv=3) gs.fit(X, y) def test_grid_search_cv_splits_consistency(): # Check if a one time iterable is accepted as a cv parameter. n_samples = 100 n_splits = 5 X, y = make_classification(n_samples=n_samples, random_state=0) gs = GridSearchCV(LinearSVC(random_state=0), param_grid={'C': [0.1, 0.2, 0.3]}, cv=OneTimeSplitter(n_splits=n_splits, n_samples=n_samples), return_train_score=True) gs.fit(X, y) gs2 = GridSearchCV(LinearSVC(random_state=0), param_grid={'C': [0.1, 0.2, 0.3]}, cv=KFold(n_splits=n_splits), return_train_score=True) gs2.fit(X, y) # Give generator as a cv parameter assert isinstance(KFold(n_splits=n_splits, shuffle=True, random_state=0).split(X, y), GeneratorType) gs3 = GridSearchCV(LinearSVC(random_state=0), param_grid={'C': [0.1, 0.2, 0.3]}, cv=KFold(n_splits=n_splits, shuffle=True, random_state=0).split(X, y), return_train_score=True) gs3.fit(X, y) gs4 = GridSearchCV(LinearSVC(random_state=0), param_grid={'C': [0.1, 0.2, 0.3]}, cv=KFold(n_splits=n_splits, shuffle=True, random_state=0), return_train_score=True) gs4.fit(X, y) def _pop_time_keys(cv_results): for key in ('mean_fit_time', 'std_fit_time', 'mean_score_time', 'std_score_time'): cv_results.pop(key) return cv_results # Check if generators are supported as cv and # that the splits are consistent np.testing.assert_equal(_pop_time_keys(gs3.cv_results_), _pop_time_keys(gs4.cv_results_)) # OneTimeSplitter is a non-re-entrant cv where split can be called only # once if ``cv.split`` is called once per param setting in GridSearchCV.fit # the 2nd and 3rd parameter will not be evaluated as no train/test indices # will be generated for the 2nd and subsequent cv.split calls. # This is a check to make sure cv.split is not called once per param # setting. np.testing.assert_equal({k: v for k, v in gs.cv_results_.items() if not k.endswith('_time')}, {k: v for k, v in gs2.cv_results_.items() if not k.endswith('_time')}) # Check consistency of folds across the parameters gs = GridSearchCV(LinearSVC(random_state=0), param_grid={'C': [0.1, 0.1, 0.2, 0.2]}, cv=KFold(n_splits=n_splits, shuffle=True), return_train_score=True) gs.fit(X, y) # As the first two param settings (C=0.1) and the next two param # settings (C=0.2) are same, the test and train scores must also be # same as long as the same train/test indices are generated for all # the cv splits, for both param setting for score_type in ('train', 'test'): per_param_scores = {} for param_i in range(4): per_param_scores[param_i] = list( gs.cv_results_['split%d_%s_score' % (s, score_type)][param_i] for s in range(5)) assert_array_almost_equal(per_param_scores[0], per_param_scores[1]) assert_array_almost_equal(per_param_scores[2], per_param_scores[3]) def test_transform_inverse_transform_round_trip(): clf = MockClassifier() grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, cv=3, verbose=3) grid_search.fit(X, y) X_round_trip = grid_search.inverse_transform(grid_search.transform(X)) assert_array_equal(X, X_round_trip) def test_custom_run_search(): def check_results(results, gscv): exp_results = gscv.cv_results_ assert sorted(results.keys()) == sorted(exp_results) for k in results: if not k.endswith('_time'): # XXX: results['params'] is a list :| results[k] = np.asanyarray(results[k]) if results[k].dtype.kind == 'O': assert_array_equal(exp_results[k], results[k], err_msg='Checking ' + k) else: assert_allclose(exp_results[k], results[k], err_msg='Checking ' + k) def fit_grid(param_grid): return GridSearchCV(clf, param_grid, return_train_score=True).fit(X, y) class CustomSearchCV(BaseSearchCV): def __init__(self, estimator, **kwargs): super().__init__(estimator, **kwargs) def _run_search(self, evaluate): results = evaluate([{'max_depth': 1}, {'max_depth': 2}]) check_results(results, fit_grid({'max_depth': [1, 2]})) results = evaluate([{'min_samples_split': 5}, {'min_samples_split': 10}]) check_results(results, fit_grid([{'max_depth': [1, 2]}, {'min_samples_split': [5, 10]}])) # Using regressor to make sure each score differs clf = DecisionTreeRegressor(random_state=0) X, y = make_classification(n_samples=100, n_informative=4, random_state=0) mycv = CustomSearchCV(clf, return_train_score=True).fit(X, y) gscv = fit_grid([{'max_depth': [1, 2]}, {'min_samples_split': [5, 10]}]) results = mycv.cv_results_ check_results(results, gscv) for attr in dir(gscv): if (attr[0].islower() and attr[-1:] == '_' and attr not in {'cv_results_', 'best_estimator_', 'refit_time_', 'classes_'}): assert getattr(gscv, attr) == getattr(mycv, attr), \ "Attribute %s not equal" % attr def test__custom_fit_no_run_search(): class NoRunSearchSearchCV(BaseSearchCV): def __init__(self, estimator, **kwargs): super().__init__(estimator, **kwargs) def fit(self, X, y=None, groups=None, **fit_params): return self # this should not raise any exceptions NoRunSearchSearchCV(SVC()).fit(X, y) class BadSearchCV(BaseSearchCV): def __init__(self, estimator, **kwargs): super().__init__(estimator, **kwargs) with pytest.raises(NotImplementedError, match="_run_search not implemented."): # this should raise a NotImplementedError BadSearchCV(SVC()).fit(X, y) def test_empty_cv_iterator_error(): # Use global X, y # create cv cv = KFold(n_splits=3).split(X) # pop all of it, this should cause the expected ValueError [u for u in cv] # cv is empty now train_size = 100 ridge = RandomizedSearchCV(Ridge(), {'alpha': [1e-3, 1e-2, 1e-1]}, cv=cv, n_jobs=4) # assert that this raises an error with pytest.raises(ValueError, match='No fits were performed. ' 'Was the CV iterator empty\\? ' 'Were there no candidates\\?'): ridge.fit(X[:train_size], y[:train_size]) def test_random_search_bad_cv(): # Use global X, y class BrokenKFold(KFold): def get_n_splits(self, *args, **kw): return 1 # create bad cv cv = BrokenKFold(n_splits=3) train_size = 100 ridge = RandomizedSearchCV(Ridge(), {'alpha': [1e-3, 1e-2, 1e-1]}, cv=cv, n_jobs=4) # assert that this raises an error with pytest.raises(ValueError, match='cv.split and cv.get_n_splits returned ' 'inconsistent results. Expected \\d+ ' 'splits, got \\d+'): ridge.fit(X[:train_size], y[:train_size]) def test_n_features_in(): # make sure grid search and random search delegate n_features_in to the # best estimator n_features = 4 X, y = make_classification(n_features=n_features) gbdt = HistGradientBoostingClassifier() param_grid = {'max_iter': [3, 4]} gs = GridSearchCV(gbdt, param_grid) rs = RandomizedSearchCV(gbdt, param_grid, n_iter=1) assert not hasattr(gs, 'n_features_in_') assert not hasattr(rs, 'n_features_in_') gs.fit(X, y) rs.fit(X, y) assert gs.n_features_in_ == n_features assert rs.n_features_in_ == n_features def test_search_cv__pairwise_property_delegated_to_base_estimator(): """ Test implementation of BaseSearchCV has the _pairwise property which matches the _pairwise property of its estimator. This test make sure _pairwise is delegated to the base estimator. Non-regression test for issue #13920. """ est = BaseEstimator() attr_message = "BaseSearchCV _pairwise property must match estimator" for _pairwise_setting in [True, False]: setattr(est, '_pairwise', _pairwise_setting) cv = GridSearchCV(est, {'n_neighbors': [10]}) assert _pairwise_setting == cv._pairwise, attr_message def test_search_cv__pairwise_property_equivalence_of_precomputed(): """ Test implementation of BaseSearchCV has the _pairwise property which matches the _pairwise property of its estimator. This test ensures the equivalence of 'precomputed'. Non-regression test for issue #13920. """ n_samples = 50 n_splits = 2 X, y = make_classification(n_samples=n_samples, random_state=0) grid_params = {'n_neighbors': [10]} # defaults to euclidean metric (minkowski p = 2) clf = KNeighborsClassifier() cv = GridSearchCV(clf, grid_params, cv=n_splits) cv.fit(X, y) preds_original = cv.predict(X) # precompute euclidean metric to validate _pairwise is working X_precomputed = euclidean_distances(X) clf = KNeighborsClassifier(metric='precomputed') cv = GridSearchCV(clf, grid_params, cv=n_splits) cv.fit(X_precomputed, y) preds_precomputed = cv.predict(X_precomputed) attr_message = "GridSearchCV not identical with precomputed metric" assert (preds_original == preds_precomputed).all(), attr_message @pytest.mark.parametrize( "SearchCV, param_search", [(GridSearchCV, {'a': [0.1, 0.01]}), (RandomizedSearchCV, {'a': uniform(1, 3)})] ) def test_scalar_fit_param(SearchCV, param_search): # unofficially sanctioned tolerance for scalar values in fit_params # non-regression test for: # https://github.com/scikit-learn/scikit-learn/issues/15805 class TestEstimator(BaseEstimator, ClassifierMixin): def __init__(self, a=None): self.a = a def fit(self, X, y, r=None): self.r_ = r def predict(self, X): return np.zeros(shape=(len(X))) model = SearchCV(TestEstimator(), param_search) X, y = make_classification(random_state=42) model.fit(X, y, r=42) assert model.best_estimator_.r_ == 42 @pytest.mark.parametrize( "SearchCV, param_search", [(GridSearchCV, {'alpha': [0.1, 0.01]}), (RandomizedSearchCV, {'alpha': uniform(0.01, 0.1)})] ) def test_scalar_fit_param_compat(SearchCV, param_search): # check support for scalar values in fit_params, for instance in LightGBM # that do not exactly respect the scikit-learn API contract but that we do # not want to break without an explicit deprecation cycle and API # recommendations for implementing early stopping with a user provided # validation set. non-regression test for: # https://github.com/scikit-learn/scikit-learn/issues/15805 X_train, X_valid, y_train, y_valid = train_test_split( *make_classification(random_state=42), random_state=42 ) class _FitParamClassifier(SGDClassifier): def fit(self, X, y, sample_weight=None, tuple_of_arrays=None, scalar_param=None, callable_param=None): super().fit(X, y, sample_weight=sample_weight) assert scalar_param > 0 assert callable(callable_param) # The tuple of arrays should be preserved as tuple. assert isinstance(tuple_of_arrays, tuple) assert tuple_of_arrays[0].ndim == 2 assert tuple_of_arrays[1].ndim == 1 return self def _fit_param_callable(): pass model = SearchCV( _FitParamClassifier(), param_search ) # NOTE: `fit_params` should be data dependent (e.g. `sample_weight`) which # is not the case for the following parameters. But this abuse is common in # popular third-party libraries and we should tolerate this behavior for # now and be careful not to break support for those without following # proper deprecation cycle. fit_params = { 'tuple_of_arrays': (X_valid, y_valid), 'callable_param': _fit_param_callable, 'scalar_param': 42, } model.fit(X_train, y_train, **fit_params)
38.18767
79
0.629904
3,692
0.052744
0
0
11,765
0.168076
0
0
17,052
0.243607
8a20b188a03474ef2ca2b4263906e89597ed6c86
6,029
py
Python
src/economy/migrations/0027_zettlebalance_zettlereceipt.py
bornhack/bornhack-website
40ed0875f5129a4c8ae1887e33e7dedb4981dadc
[ "BSD-3-Clause" ]
7
2017-04-14T15:28:29.000Z
2021-09-10T09:45:38.000Z
src/economy/migrations/0027_zettlebalance_zettlereceipt.py
bornhack/bornhack-website
40ed0875f5129a4c8ae1887e33e7dedb4981dadc
[ "BSD-3-Clause" ]
799
2016-04-28T09:31:50.000Z
2022-03-29T09:05:02.000Z
src/economy/migrations/0027_zettlebalance_zettlereceipt.py
bornhack/bornhack-website
40ed0875f5129a4c8ae1887e33e7dedb4981dadc
[ "BSD-3-Clause" ]
35
2016-04-28T09:23:53.000Z
2021-05-02T12:36:01.000Z
# Generated by Django 3.2.7 on 2021-09-13 03:52 import uuid from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("economy", "0026_alter_clearhaussettlement_options"), ] operations = [ migrations.CreateModel( name="ZettleBalance", fields=[ ( "uuid", models.UUIDField( default=uuid.uuid4, editable=False, primary_key=True, serialize=False, ), ), ("created", models.DateTimeField(auto_now_add=True)), ("updated", models.DateTimeField(auto_now=True)), ( "statement_time", models.DateTimeField( help_text="The date and time this movement was added to the account statement." ), ), ( "payment_time", models.DateTimeField( blank=True, help_text="The date and time this payment was made. Can be empty if this transaction is not a customer payment.", null=True, ), ), ( "payment_reference", models.IntegerField( blank=True, help_text="The reference for this payment. Can be empty if this transaction is not a customer payment.", null=True, ), ), ( "description", models.CharField( help_text="The description of this transaction.", max_length=100 ), ), ( "amount", models.DecimalField( decimal_places=2, help_text="The amount of this transaction", max_digits=12, ), ), ( "balance", models.DecimalField( decimal_places=2, help_text="Our balance in Zettles systems after this transaction.", max_digits=12, ), ), ], options={ "ordering": ["-statement_time"], "get_latest_by": ["statement_time"], }, ), migrations.CreateModel( name="ZettleReceipt", fields=[ ( "uuid", models.UUIDField( default=uuid.uuid4, editable=False, primary_key=True, serialize=False, ), ), ("created", models.DateTimeField(auto_now_add=True)), ("updated", models.DateTimeField(auto_now=True)), ( "zettle_created", models.DateTimeField( help_text="The date and time this receipt was created in Zettles end" ), ), ( "receipt_number", models.IntegerField(help_text="The Zettle receipt number."), ), ( "vat", models.DecimalField( decimal_places=2, help_text="The part of the total amount which is VAT", max_digits=12, ), ), ( "total", models.DecimalField( decimal_places=2, help_text="The total amount the customer paid", max_digits=12, ), ), ( "fee", models.DecimalField( decimal_places=2, help_text="The payment fee BornHack has to pay to receive this payment", max_digits=12, ), ), ( "net", models.DecimalField( decimal_places=2, help_text="The part of the payment which goes to BornHack after fees have been substracted.", max_digits=12, ), ), ( "payment_method", models.CharField(help_text="The payment method", max_length=100), ), ( "card_issuer", models.CharField( blank=True, help_text="The card issuer. Can be empty if this was not a card payment.", max_length=100, null=True, ), ), ( "staff", models.CharField( help_text="The Zettle account which was used to make this sale.", max_length=100, ), ), ( "description", models.CharField( help_text="The description of this transaction.", max_length=255 ), ), ("sold_via", models.CharField(help_text="Always POS?", max_length=100)), ], options={ "ordering": ["-zettle_created"], "get_latest_by": ["zettle_created"], }, ), ]
35.674556
137
0.379499
5,923
0.982418
0
0
0
0
0
0
1,369
0.227069
8a20b1d12635ada6c636b100e165021b86485320
2,854
py
Python
main.py
vkumarma/Complete-Interpreter
5ec15ea84b0e7e735328511cc504efa43638f720
[ "MIT" ]
null
null
null
main.py
vkumarma/Complete-Interpreter
5ec15ea84b0e7e735328511cc504efa43638f720
[ "MIT" ]
null
null
null
main.py
vkumarma/Complete-Interpreter
5ec15ea84b0e7e735328511cc504efa43638f720
[ "MIT" ]
null
null
null
import re import sys class Lexer: def __init__(self, inp_str): self.index = 0 self.s = inp_str def get_char(self): if self.index < len(self.s): var = self.s[self.index] self.index += 1 return var input_file = open(str(sys.argv[1]), 'r') # Open file for reading line = input_file.read() # "if z then while x * 4 - 2 do skip endwhile else x := 7 endif; y := 1" input_string = line.strip("\n") lexer = Lexer(input_string) hashtable = {} tokens_list = [] def token_check(input): if re.fullmatch("if|then|else|endif|while|do|endwhile|skip", input): hashtable[input] = "KEYWORD" tokens_list.append(input) elif re.search("([a-z]|[A-Z])([a-z]|[A-Z]|[0-9])*", input): hashtable[input] = "IDENTIFIER" tokens_list.append(input) elif re.search("[0-9]+", input): hashtable[input] = "NUMBER" tokens_list.append(input) elif re.fullmatch("\+|\-|\*|/|\(|\)|:=|;", input): hashtable[input] = "SYMBOL" tokens_list.append(input) else: hashtable[input] = "ERROR READING" def digit(curr_char, lexer): sub = "" while (curr_char.isdigit()): sub += curr_char curr_char = lexer.get_char() if curr_char == None: break new.append(curr_char) return sub def longest_sub_string(curr_char, lexer): sub = "" while (curr_char.isalpha() or curr_char.isdigit()): sub += curr_char curr_char = lexer.get_char() if curr_char == None: break new.append(curr_char) return sub def symbol(curr_char, lexer): # print(curr_char) sym = curr_char curr_char = lexer.get_char() new.append(curr_char) return sym def assignment(curr_char, lexer): sub = curr_char next_char = lexer.get_char() if next_char == "=": sub += next_char new.append(next_char) return sub new.append(lexer.get_char()) return sub new = [] # keeping track of current char. curr_char = lexer.get_char() while (curr_char != None): while (curr_char == ' ' or curr_char == ''): curr_char = lexer.get_char() if (curr_char.isdigit()): token_check(digit(curr_char, lexer)) curr_char = new.pop() elif (curr_char.isalpha()): token_check(longest_sub_string(curr_char, lexer)) curr_char = new.pop() elif curr_char in "+-/*();": token_check(symbol(curr_char, lexer)) curr_char = new.pop() elif curr_char == ":": token_check(assignment(curr_char, lexer)) curr_char = new.pop() if curr_char == "=": curr_char = lexer.get_char() else: token_check(curr_char) curr_char = lexer.get_char() def tokens(): return hashtable # print(tokens_list) # print(tokens())
23.983193
72
0.590049
243
0.085144
0
0
0
0
0
0
377
0.132095
8a20dee928bb3a353769ebc5d7c40156ab5eb131
306
py
Python
deduplicate.py
Ghostofapacket/NewsGrabber-Deduplicate
0b8152af2e1c6c87cf8540970f42084b96a99d9c
[ "Unlicense" ]
null
null
null
deduplicate.py
Ghostofapacket/NewsGrabber-Deduplicate
0b8152af2e1c6c87cf8540970f42084b96a99d9c
[ "Unlicense" ]
null
null
null
deduplicate.py
Ghostofapacket/NewsGrabber-Deduplicate
0b8152af2e1c6c87cf8540970f42084b96a99d9c
[ "Unlicense" ]
null
null
null
import sys sys.path.append('/usr/local/lib/python3.4/site-packages/') from warc_dedup import deduplicate def main(): if len(sys.argv) == 1: raise Exception('Please provide the WARC file as argument.') deduplicate.Warc(*sys.argv[1:]).deduplicate() if __name__ == '__main__': main()
20.4
68
0.679739
0
0
0
0
0
0
0
0
94
0.30719
8a20fc9b93bd3fc7e19c79190d5875b049bc7526
4,136
py
Python
build/lib/FinMesh/usgov/__init__.py
johnjdailey/FinMesh
64048b02bfec1a24de840877b38e82f4fa813d22
[ "MIT" ]
1
2020-08-14T16:09:54.000Z
2020-08-14T16:09:54.000Z
build/lib/FinMesh/usgov/__init__.py
johnjdailey/FinMesh
64048b02bfec1a24de840877b38e82f4fa813d22
[ "MIT" ]
null
null
null
build/lib/FinMesh/usgov/__init__.py
johnjdailey/FinMesh
64048b02bfec1a24de840877b38e82f4fa813d22
[ "MIT" ]
null
null
null
import os import requests import xmltodict import csv import json # # # # # # # # # # # FRED DATA BELOW # # # # # # # # # # # FRED_BASE_URL = 'https://api.stlouisfed.org/fred/' GEOFRED_BASE_URL = 'https://api.stlouisfed.org/geofred/' def append_fred_token(url): token = os.getenv('FRED_TOKEN') return f'{url}&api_key={token}' FRED_SERIES_OBS_URL = FRED_BASE_URL + 'series/observations?' def fred_series(series, file_type=None, realtime_start=None, realtime_end=None, limit=None, offset=None, sort_order=None, observation_start=None, observation_end=None, units=None, frequency=None, aggregation_method=None, output_type=None, vintage_dates=None): ## Returns time series historical data for the requested FRED data. url = FRED_SERIES_OBS_URL + f'series_id={series}' if file_type: url += f'&file_type={file_type}' if realtime_start: url += f'&realtime_start={realtime_start}' if realtime_end: url += f'&realtime_end={realtime_end}' if limit: url += f'&limit={limit}' if offset: url += f'&offset={offset}' if sort_order: url += f'&sort_order={sort_order}' if observation_start: url += f'&observation_start={observation_start}' if observation_end: url += f'&observation_end={observation_end}' if units: url += f'&units={units}' if frequency: url += f'&frequency={frequency}' if aggregation_method: url += f'&aggregation_method={aggregation_method}' if output_type: url += f'&output_type={output_type}' if vintage_dates: url += f'&vintage_dates={vintage_dates}' url = append_fred_token(url) result = requests.get(url) return result.text GEOFRED_SERIES_META_URL = GEOFRED_BASE_URL + 'series/group?' def geofred_series_meta(series_id, file_type=None): ## Returns meta data for the requested FRED data. url = GEOFRED_SERIES_META_URL + f'series_id={series_id}' if file_type: url += f'&file_type={file_type}' url = append_fred_token(url) result = requests.get(url) return result.text GEOFRED_REGIONAL_SERIES_URL = GEOFRED_BASE_URL + 'series/data?' def geofred_regional_series(series_id, file_type=None, date=None, start_date=None): ## Returns the historical, geographically organized time series data for the requested FRED data. url = GEOFRED_REGIONAL_SERIES_URL + f'series_id={series_id}' if file_type: url += f'&file_type={file_type}' if date: url += f'&date={date}' if start_date: url += f'&start_date={start_date}' url = append_fred_token(url) result = requests.get(url) return result.text # # # # # # # # # # # # # # # # # GOVERNMENT YIELD CURVE DATA # # # # # # # # # # # # # # # # # GOV_YIELD_URL = 'https://data.treasury.gov/feed.svc/DailyTreasuryYieldCurveRateData?$filter=month(NEW_DATE)%20eq%204%20and%20year(NEW_DATE)%20eq%202019' def get_yield(): ## Returns government treasury bond yields. Organized in Python dictionary format by bond length. # Formatting of XML to Python Dict curve = requests.get(GOV_YIELD_URL) parse_curve = xmltodict.parse(curve.content) # This is based around retrieving the n last dates or average of n days. feed = parse_curve['feed'] entry = feed['entry'] last_entry = len(entry)-1 content = entry[last_entry]['content']['m:properties'] # Dict that contains the whole yield curve so there is no need to bring in each rate. yield_curve_values = { 'date' : entry[last_entry]['content']['m:properties']['d:NEW_DATE']['#text'], '1month' : float(content['d:BC_1MONTH']['#text']), '2month' : float(content['d:BC_2MONTH']['#text']), '3month' : float(content['d:BC_3MONTH']['#text']), '6month' : float(content['d:BC_6MONTH']['#text']), '1year' : float(content['d:BC_1YEAR']['#text']), '2year' : float(content['d:BC_2YEAR']['#text']), '3year' : float(content['d:BC_3YEAR']['#text']), '5year' : float(content['d:BC_5YEAR']['#text']), '10year' : float(content['d:BC_10YEAR']['#text']), '20year' : float(content['d:BC_20YEAR']['#text']), '30year' : float(content['d:BC_30YEAR']['#text']), } return yield_curve_values
44
259
0.676499
0
0
0
0
0
0
0
0
1,869
0.451886
8a22e67655b4062b0aecbc7e8062db32e1383d10
1,268
py
Python
settings.py
Cradac/mattermost-octane-integration
6a3cb4d2e0854cbf190f66467b604e6e4344a907
[ "MIT" ]
null
null
null
settings.py
Cradac/mattermost-octane-integration
6a3cb4d2e0854cbf190f66467b604e6e4344a907
[ "MIT" ]
null
null
null
settings.py
Cradac/mattermost-octane-integration
6a3cb4d2e0854cbf190f66467b604e6e4344a907
[ "MIT" ]
null
null
null
''' This is the Settings File for the Mattermost-Octane Bridge. You can change various variables here to customize and set up the client. ''' '''----------------------Mattermost Webhook Configuration----------------------''' #URL of the webhook from mattermost. To create one go to `Main Menu -> Integrations -> Incoming Webhooks` and press `Add Incoming Webhook` mm_webhook_url = 'http://localhost:8065/hooks/yuro8xrfeffj787cj1bwc4ziue' #Override the channel to send the notifications to, use the channel name as a String mm_channel = None #Set a custom Username to display in Mattermost mm_username = 'Defect Notification' #Set a custom Profile Image for the Client mm_profileimage = 'https://i.imgur.com/7Wg3Tgs.png' #Telekom T Image #The latter two need to be enabled in the settings.json of the Mattermost server '''----------------------------Flask Configuration----------------------------''' #set external IP for the Flask Server to create a Webhook for ALM Octane #local: 127.0.0.1 / False #default external: 0.0.0.0 (will default to only available external adress) external_ip = False #default: 5000 port = 5000 #external webhook verify token can be set here, if set as `None` it will be autogenerated & changed on each startup. wh_token = None
34.27027
138
0.706625
0
0
0
0
0
0
0
0
1,123
0.885647
8a23dbe8e2d12d3a5feea4e2c5b3fc204f837e5d
10
py
Python
python/testData/console/indent7.after.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/console/indent7.after.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/console/indent7.after.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
print(1)
10
10
0.6
0
0
0
0
0
0
0
0
0
0