repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
rafaelols/airflow | dags/treinos_igti/treino03.py | 8e4af5fb576a9568af476c0607819649b724adea | from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.python_operator import PythonOperator, BranchPythonOperator
from datetime import datetime, timedelta
import pandas as pd
import random
# Default args definition
default_args = {
'owner': 'Rafael',
'depends_on_past': False,
'start_date': datetime(2020, 11, 29, 18, 20),
'email': ['example1@example.com', 'example2@example.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'Retry_delay': timedelta(minutes=1)
}
# Dag definition
dag = DAG(
'treino-03',
description="Extrai dados do Titanic e calcula idade media para homens ou mulheres",
default_args = default_args,
schedule_interval='*/20 * * * *'
)
get_data = BashOperator(
task_id='get-data',
bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv',
dag=dag
)
def sorteia_h_m():
return random.choice(['male', 'female'])
escolhe_h_m = PythonOperator(
task_id='escolhe-h-m',
python_callable=sorteia_h_m,
dag=dag
)
def MouF(**context):
value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m')
if value == 'male':
return 'branch_homem'
else:
return 'branch_mulher'
male_female = BranchPythonOperator(
task_id='condicional',
python_callable=MouF,
provide_context=True,
dag=dag
)
def mean_homem():
df = pd.read_csv('/usr/local/airflow/data/train.csv')
med = df.loc[df.Sex == 'male'].Age.mean()
print(f'Media de idade dos homens no Titanic: {med}')
branch_homem = PythonOperator(
task_id='branch_homem',
python_callable=mean_homem,
dag=dag
)
def mean_mulher():
df = pd.read_csv('/usr/local/airflow/data/train.csv')
med = df.loc[df.Sex == 'female'].Age.mean()
print(f'Media de idade das mulheres no Titanic: {med}')
branch_mulher = PythonOperator(
task_id='branch_mulher',
python_callable=mean_mulher,
dag=dag
)
get_data >> escolhe_h_m >> male_female >> [branch_homem, branch_mulher]
| [((593, 764), 'airflow.DAG', 'DAG', (['"""treino-03"""'], {'description': '"""Extrai dados do Titanic e calcula idade media para homens ou mulheres"""', 'default_args': 'default_args', 'schedule_interval': '"""*/20 * * * *"""'}), "('treino-03', description=\n 'Extrai dados do Titanic e calcula idade media para homens ou mulheres',\n default_args=default_args, schedule_interval='*/20 * * * *')\n", (596, 764), False, 'from airflow import DAG\n'), ((788, 983), 'airflow.operators.bash_operator.BashOperator', 'BashOperator', ([], {'task_id': '"""get-data"""', 'bash_command': '"""curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv"""', 'dag': 'dag'}), "(task_id='get-data', bash_command=\n 'curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv'\n , dag=dag)\n", (800, 983), False, 'from airflow.operators.bash_operator import BashOperator\n'), ((1068, 1143), 'airflow.operators.python_operator.PythonOperator', 'PythonOperator', ([], {'task_id': '"""escolhe-h-m"""', 'python_callable': 'sorteia_h_m', 'dag': 'dag'}), "(task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag)\n", (1082, 1143), False, 'from airflow.operators.python_operator import PythonOperator, BranchPythonOperator\n'), ((1359, 1459), 'airflow.operators.python_operator.BranchPythonOperator', 'BranchPythonOperator', ([], {'task_id': '"""condicional"""', 'python_callable': 'MouF', 'provide_context': '(True)', 'dag': 'dag'}), "(task_id='condicional', python_callable=MouF,\n provide_context=True, dag=dag)\n", (1379, 1459), False, 'from airflow.operators.python_operator import PythonOperator, BranchPythonOperator\n'), ((1671, 1746), 'airflow.operators.python_operator.PythonOperator', 'PythonOperator', ([], {'task_id': '"""branch_homem"""', 'python_callable': 'mean_homem', 'dag': 'dag'}), "(task_id='branch_homem', python_callable=mean_homem, dag=dag)\n", (1685, 1746), False, 'from airflow.operators.python_operator import PythonOperator, BranchPythonOperator\n'), ((1964, 2041), 'airflow.operators.python_operator.PythonOperator', 'PythonOperator', ([], {'task_id': '"""branch_mulher"""', 'python_callable': 'mean_mulher', 'dag': 'dag'}), "(task_id='branch_mulher', python_callable=mean_mulher, dag=dag)\n", (1978, 2041), False, 'from airflow.operators.python_operator import PythonOperator, BranchPythonOperator\n'), ((354, 384), 'datetime.datetime', 'datetime', (['(2020)', '(11)', '(29)', '(18)', '(20)'], {}), '(2020, 11, 29, 18, 20)\n', (362, 384), False, 'from datetime import datetime, timedelta\n'), ((546, 566), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (555, 566), False, 'from datetime import datetime, timedelta\n'), ((1019, 1052), 'random.choice', 'random.choice', (["['male', 'female']"], {}), "(['male', 'female'])\n", (1032, 1052), False, 'import random\n'), ((1502, 1550), 'pandas.read_csv', 'pd.read_csv', (['"""/usr/local/airflow/data/train.csv"""'], {}), "('/usr/local/airflow/data/train.csv')\n", (1513, 1550), True, 'import pandas as pd\n'), ((1790, 1838), 'pandas.read_csv', 'pd.read_csv', (['"""/usr/local/airflow/data/train.csv"""'], {}), "('/usr/local/airflow/data/train.csv')\n", (1801, 1838), True, 'import pandas as pd\n')] |
DannyPol/flatcam | tclCommands/TclCommandListSys.py | 25a8634d0658e98b7fae31a095f8bef40c1b3067 | # ##########################################################
# FlatCAM: 2D Post-processing for Manufacturing #
# File Author: Marius Adrian Stanciu (c) #
# Date: 8/17/2019 #
# MIT Licence #
# ##########################################################
from tclCommands.TclCommand import *
class TclCommandListSys(TclCommand):
"""
Tcl shell command to get the list of system variables
example:
list_sys
"""
# List of all command aliases, to be able use old names for backward compatibility (add_poly, add_polygon)
aliases = ['list_sys', 'listsys']
description = '%s %s' % ("--", "Outputs in Tcl Shell the list with the names of system variables.")
# Dictionary of types from Tcl command, needs to be ordered
arg_names = collections.OrderedDict([
('selection', str),
])
# Dictionary of types from Tcl command, needs to be ordered , this is for options like -optionname value
option_types = collections.OrderedDict([
])
# array of mandatory options for current Tcl command: required = {'name','outname'}
required = []
# structured help for current command, args needs to be ordered
help = {
'main': "Returns the list of the names of system variables.\n"
"Without an argument it will list all the system parameters. "
"As an argument use first letter or first letters from the name "
"of the system variable.\n"
"In that case it will list only the system variables that starts with that string.\n"
"Main categories start with: gerber or excellon or geometry or cncjob or global.\n"
"Note: Use 'get_sys system variable' to get the value and 'set_sys system variable value' to set it.\n",
'args': collections.OrderedDict([
]),
'examples': ['list_sys',
'list_sys ser',
'list_sys gerber',
'list_sys cncj']
}
def execute(self, args, unnamed_args):
"""
:param args:
:param unnamed_args:
:return:
"""
if 'selection' in args:
argument = args['selection']
return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))])
else:
ret_val = list(self.app.defaults.keys())
return str(ret_val)
# return str([*self.app.defaults])
| [] |
MuellerSeb/ogs5py | ogs5py/fileclasses/mcp/core.py | 752e7bd2298fbd476406d168f6b7d1a85863dccd | # -*- coding: utf-8 -*-
"""Class for the ogs COMPONENT_PROPERTIES file."""
from ogs5py.fileclasses.base import BlockFile
class MCP(BlockFile):
"""
Class for the ogs COMPONENT_PROPERTIES file.
Parameters
----------
task_root : str, optional
Path to the destiny model folder.
Default: cwd+"ogs5model"
task_id : str, optional
Name for the ogs task.
Default: "model"
Notes
-----
Main-Keywords (#):
- COMPONENT_PROPERTIES
Sub-Keywords ($) per Main-Keyword:
- COMPONENT_PROPERTIES
- ACENTRIC_FACTOR
- A_ZERO
- BUBBLE_VELOCITY
- CRITICAL_PRESSURE
- CRITICAL_TEMPERATURE
- DECAY
- DIFFUSION
- FLUID_ID
- FLUID_PHASE
- FORMULA
- ISOTHERM
- MAXIMUM_AQUEOUS_SOLUBILITY
- MINERAL_DENSITY
- MOBILE
- MOLAR_DENSITY
- MOLAR_VOLUME
- MOLAR_WEIGHT
- MOL_MASS
- NAME
- OutputMassOfComponentInModel
- TRANSPORT_PHASE
- VALENCE
- VOLUME_DIFFUSION
Standard block:
None
Keyword documentation:
https://ogs5-keywords.netlify.com/ogs/wiki/public/doc-auto/by_ext/mcp
Reading routines:
https://github.com/ufz/ogs5/blob/master/FEM/rfmat_cp.cpp#L269
See Also
--------
add_block
"""
MKEYS = ["COMPONENT_PROPERTIES"]
# sorted
SKEYS = [
[
"NAME",
"FORMULA",
"MOBILE",
"TRANSPORT_PHASE",
"FLUID_PHASE",
"MOL_MASS",
"CRITICAL_PRESSURE",
"CRITICAL_TEMPERATURE",
"ACENTRIC_FACTOR",
"FLUID_ID",
"MOLAR_VOLUME",
"VOLUME_DIFFUSION",
"MINERAL_DENSITY",
"DIFFUSION",
"DECAY",
"ISOTHERM",
"BUBBLE_VELOCITY",
"MOLAR_DENSITY",
"MOLAR_WEIGHT",
"MAXIMUM_AQUEOUS_SOLUBILITY",
"OutputMassOfComponentInModel",
"VALENCE",
"A_ZERO",
"CRITICAL_VOLUME", # really?
"CRITICAL_DENSITY", # really?
"COMP_CAPACITY", # really?
"COMP_CONDUCTIVITY", # really?
"SOLUTE", # really?
"MOLECULAR_WEIGHT", # really?
]
]
STD = {}
def __init__(self, **OGS_Config):
super().__init__(**OGS_Config)
self.file_ext = ".mcp"
| [] |
crowdy/keystone | keystone/tests/unit/test_v3_assignment.py | 1e7ecca881a51144d61ae8026e1a77d6669997e2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import random
import uuid
import freezegun
import http.client
from testtools import matchers
from keystone.common import provider_api
import keystone.conf
from keystone import exception
from keystone.resource.backends import base as resource_base
from keystone.tests import unit
from keystone.tests.unit import test_v3
CONF = keystone.conf.CONF
PROVIDERS = provider_api.ProviderAPIs
class SystemRoleAssignmentMixin(object):
def _create_new_role(self):
"""Create a role available for use anywhere and return the ID."""
ref = unit.new_role_ref()
response = self.post('/roles', body={'role': ref})
# We only really need the role ID, so omit the rest of the response and
# return the ID of the role we just created.
return response.json_body['role']['id']
def _create_group(self):
body = {
'group': {
'domain_id': self.domain_id,
'name': uuid.uuid4().hex
}
}
response = self.post('/groups/', body=body)
return response.json_body['group']
def _create_user(self):
body = {
'user': {
'domain_id': self.domain_id,
'name': uuid.uuid4().hex
}
}
response = self.post('/users/', body=body)
return response.json_body['user']
class AssignmentTestCase(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin,
SystemRoleAssignmentMixin):
"""Test roles and role assignments."""
def setUp(self):
super(AssignmentTestCase, self).setUp()
self.group = unit.new_group_ref(domain_id=self.domain_id)
self.group = PROVIDERS.identity_api.create_group(self.group)
self.group_id = self.group['id']
# Role CRUD tests
def test_create_role(self):
"""Call ``POST /roles``."""
ref = unit.new_role_ref()
r = self.post(
'/roles',
body={'role': ref})
return self.assertValidRoleResponse(r, ref)
def test_create_role_bad_request(self):
"""Call ``POST /roles``."""
self.post('/roles', body={'role': {}},
expected_status=http.client.BAD_REQUEST)
def test_list_head_roles(self):
"""Call ``GET & HEAD /roles``."""
resource_url = '/roles'
r = self.get(resource_url)
self.assertValidRoleListResponse(r, ref=self.role,
resource_url=resource_url)
self.head(resource_url, expected_status=http.client.OK)
def test_get_head_role(self):
"""Call ``GET & HEAD /roles/{role_id}``."""
resource_url = '/roles/%(role_id)s' % {
'role_id': self.role_id}
r = self.get(resource_url)
self.assertValidRoleResponse(r, self.role)
self.head(resource_url, expected_status=http.client.OK)
def test_update_role(self):
"""Call ``PATCH /roles/{role_id}``."""
ref = unit.new_role_ref()
del ref['id']
r = self.patch('/roles/%(role_id)s' % {
'role_id': self.role_id},
body={'role': ref})
self.assertValidRoleResponse(r, ref)
def test_delete_role(self):
"""Call ``DELETE /roles/{role_id}``."""
self.delete('/roles/%(role_id)s' % {
'role_id': self.role_id})
# Role Grants tests
def test_crud_user_project_role_grants(self):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
collection_url = (
'/projects/%(project_id)s/users/%(user_id)s/roles' % {
'project_id': self.project['id'],
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': role['id']}
# There is a role assignment for self.user on self.project
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=self.role,
expected_length=1)
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role,
resource_url=collection_url,
expected_length=2)
self.head(collection_url, expected_status=http.client.OK)
self.delete(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=self.role, expected_length=1)
self.assertIn(collection_url, r.result['links']['self'])
self.head(collection_url, expected_status=http.client.OK)
def test_crud_user_project_role_grants_no_user(self):
"""Grant role on a project to a user that doesn't exist.
When grant a role on a project to a user that doesn't exist, the server
returns Not Found for the user.
"""
user_id = uuid.uuid4().hex
collection_url = (
'/projects/%(project_id)s/users/%(user_id)s/roles' % {
'project_id': self.project['id'], 'user_id': user_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url, expected_status=http.client.NOT_FOUND)
self.head(member_url, expected_status=http.client.NOT_FOUND)
self.get(member_url, expected_status=http.client.NOT_FOUND)
def test_crud_user_domain_role_grants(self):
time = datetime.datetime.utcnow()
with freezegun.freeze_time(time) as frozen_datetime:
collection_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domain_id,
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=self.role,
resource_url=collection_url)
self.head(collection_url, expected_status=http.client.OK)
self.delete(member_url)
# NOTE(lbragstad): Make sure we wait a second before we ask for the
# roles. This ensures the token we use isn't considered revoked
# because it was issued within the same second as a revocation
# event.
frozen_datetime.tick(delta=datetime.timedelta(seconds=1))
r = self.get(collection_url)
self.assertValidRoleListResponse(r, expected_length=0,
resource_url=collection_url)
self.head(collection_url, expected_status=http.client.OK)
def test_crud_user_domain_role_grants_no_user(self):
"""Grant role on a domain to a user that doesn't exist.
When grant a role on a domain to a user that doesn't exist, the server
returns 404 Not Found for the user.
"""
user_id = uuid.uuid4().hex
collection_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domain_id, 'user_id': user_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url, expected_status=http.client.NOT_FOUND)
self.head(member_url, expected_status=http.client.NOT_FOUND)
self.get(member_url, expected_status=http.client.NOT_FOUND)
def test_crud_group_project_role_grants(self):
time = datetime.datetime.utcnow()
with freezegun.freeze_time(time) as frozen_datetime:
collection_url = (
'/projects/%(project_id)s/groups/%(group_id)s/roles' % {
'project_id': self.project_id,
'group_id': self.group_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=self.role,
resource_url=collection_url)
self.head(collection_url, expected_status=http.client.OK)
self.delete(member_url)
# NOTE(lbragstad): Make sure we wait a second before we ask for the
# roles. This ensures the token we use isn't considered revoked
# because it was issued within the same second as a revocation
# event.
frozen_datetime.tick(delta=datetime.timedelta(seconds=1))
r = self.get(collection_url)
self.assertValidRoleListResponse(r, expected_length=0,
resource_url=collection_url)
self.head(collection_url, expected_status=http.client.OK)
def test_crud_group_project_role_grants_no_group(self):
"""Grant role on a project to a group that doesn't exist.
When grant a role on a project to a group that doesn't exist, the
server returns 404 Not Found for the group.
"""
group_id = uuid.uuid4().hex
collection_url = (
'/projects/%(project_id)s/groups/%(group_id)s/roles' % {
'project_id': self.project_id,
'group_id': group_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url, expected_status=http.client.NOT_FOUND)
self.head(member_url, expected_status=http.client.NOT_FOUND)
self.get(member_url, expected_status=http.client.NOT_FOUND)
def test_crud_group_domain_role_grants(self):
time = datetime.datetime.utcnow()
with freezegun.freeze_time(time) as frozen_datetime:
collection_url = (
'/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': self.domain_id,
'group_id': self.group_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=self.role,
resource_url=collection_url)
self.head(collection_url, expected_status=http.client.OK)
self.delete(member_url)
# NOTE(lbragstad): Make sure we wait a second before we ask for the
# roles. This ensures the token we use isn't considered revoked
# because it was issued within the same second as a revocation
# event.
frozen_datetime.tick(delta=datetime.timedelta(seconds=1))
r = self.get(collection_url)
self.assertValidRoleListResponse(r, expected_length=0,
resource_url=collection_url)
self.head(collection_url, expected_status=http.client.OK)
def test_crud_group_domain_role_grants_no_group(self):
"""Grant role on a domain to a group that doesn't exist.
When grant a role on a domain to a group that doesn't exist, the server
returns 404 Not Found for the group.
"""
group_id = uuid.uuid4().hex
collection_url = (
'/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': self.domain_id,
'group_id': group_id})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
self.put(member_url, expected_status=http.client.NOT_FOUND)
self.head(member_url, expected_status=http.client.NOT_FOUND)
self.get(member_url, expected_status=http.client.NOT_FOUND)
def _create_new_user_and_assign_role_on_project(self):
"""Create a new user and assign user a role on a project."""
# Create a new user
new_user = unit.new_user_ref(domain_id=self.domain_id)
user_ref = PROVIDERS.identity_api.create_user(new_user)
# Assign the user a role on the project
collection_url = (
'/projects/%(project_id)s/users/%(user_id)s/roles' % {
'project_id': self.project_id,
'user_id': user_ref['id']})
member_url = ('%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id})
self.put(member_url)
# Check the user has the role assigned
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
return member_url, user_ref
def test_delete_user_before_removing_role_assignment_succeeds(self):
"""Call ``DELETE`` on the user before the role assignment."""
member_url, user = self._create_new_user_and_assign_role_on_project()
# Delete the user from identity backend
PROVIDERS.identity_api.driver.delete_user(user['id'])
# Clean up the role assignment
self.delete(member_url)
# Make sure the role is gone
self.head(member_url, expected_status=http.client.NOT_FOUND)
def test_delete_group_before_removing_role_assignment_succeeds(self):
# Disable the cache so that we perform a fresh check of the identity
# backend when attempting to remove the role assignment.
self.config_fixture.config(group='cache', enabled=False)
# Create a new group
group = unit.new_group_ref(domain_id=self.domain_id)
group_ref = PROVIDERS.identity_api.create_group(group)
# Assign the user a role on the project
collection_url = (
'/projects/%(project_id)s/groups/%(group_id)s/roles' % {
'project_id': self.project_id,
'group_id': group_ref['id']})
member_url = ('%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id})
self.put(member_url)
# Check the user has the role assigned
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
# Simulate removing the group via LDAP by directly removing it from the
# identity backend.
PROVIDERS.identity_api.driver.delete_group(group_ref['id'])
# Ensure we can clean up the role assignment even though the group
# doesn't exist
self.delete(member_url)
def test_delete_user_before_removing_system_assignments_succeeds(self):
system_role = self._create_new_role()
user = self._create_user()
path = (
'/system/users/%(user_id)s/roles/%(role_id)s' %
{'user_id': user['id'], 'role_id': system_role}
)
self.put(path)
response = self.get('/role_assignments')
number_of_assignments = len(response.json_body['role_assignments'])
path = '/users/%(user_id)s' % {'user_id': user['id']}
self.delete(path)
# The user with the system role assignment is a new user and only has
# one role on the system. We should expect one less role assignment in
# the list.
response = self.get('/role_assignments')
self.assertValidRoleAssignmentListResponse(
response, expected_length=number_of_assignments - 1
)
def test_delete_user_and_check_role_assignment_fails(self):
"""Call ``DELETE`` on the user and check the role assignment."""
member_url, user = self._create_new_user_and_assign_role_on_project()
# Delete the user from identity backend
PROVIDERS.identity_api.delete_user(user['id'])
# We should get a 404 Not Found when looking for the user in the
# identity backend because we're not performing a delete operation on
# the role.
self.head(member_url, expected_status=http.client.NOT_FOUND)
def test_token_revoked_once_group_role_grant_revoked(self):
"""Test token invalid when direct & indirect role on user is revoked.
When a role granted to a group is revoked for a given scope,
and user direct role is revoked, then tokens created
by user will be invalid.
"""
time = datetime.datetime.utcnow()
with freezegun.freeze_time(time) as frozen_datetime:
# creates grant from group on project.
PROVIDERS.assignment_api.create_grant(
role_id=self.role['id'], project_id=self.project['id'],
group_id=self.group['id']
)
# adds user to the group.
PROVIDERS.identity_api.add_user_to_group(
user_id=self.user['id'], group_id=self.group['id']
)
# creates a token for the user
auth_body = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
token_resp = self.post('/auth/tokens', body=auth_body)
token = token_resp.headers.get('x-subject-token')
# validates the returned token; it should be valid.
self.head('/auth/tokens',
headers={'x-subject-token': token},
expected_status=http.client.OK)
frozen_datetime.tick(delta=datetime.timedelta(seconds=1))
# revokes the grant from group on project.
PROVIDERS.assignment_api.delete_grant(
role_id=self.role['id'], project_id=self.project['id'],
group_id=self.group['id'])
# revokes the direct role form user on project
PROVIDERS.assignment_api.delete_grant(
role_id=self.role['id'], project_id=self.project['id'],
user_id=self.user['id']
)
frozen_datetime.tick(delta=datetime.timedelta(seconds=1))
# validates the same token again; it should not longer be valid.
self.head('/auth/tokens', token=token,
expected_status=http.client.UNAUTHORIZED)
def test_delete_group_before_removing_system_assignments_succeeds(self):
system_role = self._create_new_role()
group = self._create_group()
path = (
'/system/groups/%(group_id)s/roles/%(role_id)s' %
{'group_id': group['id'], 'role_id': system_role}
)
self.put(path)
response = self.get('/role_assignments')
number_of_assignments = len(response.json_body['role_assignments'])
path = '/groups/%(group_id)s' % {'group_id': group['id']}
self.delete(path)
# The group with the system role assignment is a new group and only has
# one role on the system. We should expect one less role assignment in
# the list.
response = self.get('/role_assignments')
self.assertValidRoleAssignmentListResponse(
response, expected_length=number_of_assignments - 1
)
@unit.skip_if_cache_disabled('assignment')
def test_delete_grant_from_user_and_project_invalidate_cache(self):
# create a new project
new_project = unit.new_project_ref(domain_id=self.domain_id)
PROVIDERS.resource_api.create_project(new_project['id'], new_project)
collection_url = (
'/projects/%(project_id)s/users/%(user_id)s/roles' % {
'project_id': new_project['id'],
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
# create the user a grant on the new project
self.put(member_url)
# check the grant that was just created
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
resp = self.get(collection_url)
self.assertValidRoleListResponse(resp, ref=self.role,
resource_url=collection_url)
# delete the grant
self.delete(member_url)
# get the collection and ensure there are no roles on the project
resp = self.get(collection_url)
self.assertListEqual(resp.json_body['roles'], [])
@unit.skip_if_cache_disabled('assignment')
def test_delete_grant_from_user_and_domain_invalidates_cache(self):
# create a new domain
new_domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(new_domain['id'], new_domain)
collection_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': new_domain['id'],
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
# create the user a grant on the new domain
self.put(member_url)
# check the grant that was just created
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
resp = self.get(collection_url)
self.assertValidRoleListResponse(resp, ref=self.role,
resource_url=collection_url)
# delete the grant
self.delete(member_url)
# get the collection and ensure there are no roles on the domain
resp = self.get(collection_url)
self.assertListEqual(resp.json_body['roles'], [])
@unit.skip_if_cache_disabled('assignment')
def test_delete_grant_from_group_and_project_invalidates_cache(self):
# create a new project
new_project = unit.new_project_ref(domain_id=self.domain_id)
PROVIDERS.resource_api.create_project(new_project['id'], new_project)
collection_url = (
'/projects/%(project_id)s/groups/%(group_id)s/roles' % {
'project_id': new_project['id'],
'group_id': self.group['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
# create the group a grant on the new project
self.put(member_url)
# check the grant that was just created
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
resp = self.get(collection_url)
self.assertValidRoleListResponse(resp, ref=self.role,
resource_url=collection_url)
# delete the grant
self.delete(member_url)
# get the collection and ensure there are no roles on the project
resp = self.get(collection_url)
self.assertListEqual(resp.json_body['roles'], [])
@unit.skip_if_cache_disabled('assignment')
def test_delete_grant_from_group_and_domain_invalidates_cache(self):
# create a new domain
new_domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(new_domain['id'], new_domain)
collection_url = (
'/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': new_domain['id'],
'group_id': self.group['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id}
# create the group a grant on the new domain
self.put(member_url)
# check the grant that was just created
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
resp = self.get(collection_url)
self.assertValidRoleListResponse(resp, ref=self.role,
resource_url=collection_url)
# delete the grant
self.delete(member_url)
# get the collection and ensure there are no roles on the domain
resp = self.get(collection_url)
self.assertListEqual(resp.json_body['roles'], [])
# Role Assignments tests
def test_get_head_role_assignments(self):
"""Call ``GET & HEAD /role_assignments``.
The sample data set up already has a user, group and project
that is part of self.domain. We use these plus a new user
we create as our data set, making sure we ignore any
role assignments that are already in existence.
Since we don't yet support a first class entity for role
assignments, we are only testing the LIST API. To create
and delete the role assignments we use the old grant APIs.
Test Plan:
- Create extra user for tests
- Get a list of all existing role assignments
- Add a new assignment for each of the four combinations, i.e.
group+domain, user+domain, group+project, user+project, using
the same role each time
- Get a new list of all role assignments, checking these four new
ones have been added
- Then delete the four we added
- Get a new list of all role assignments, checking the four have
been removed
"""
time = datetime.datetime.utcnow()
with freezegun.freeze_time(time) as frozen_datetime:
# Since the default fixtures already assign some roles to the
# user it creates, we also need a new user that will not have any
# existing assignments
user1 = unit.new_user_ref(domain_id=self.domain['id'])
user1 = PROVIDERS.identity_api.create_user(user1)
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
collection_url = '/role_assignments'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r, resource_url=collection_url)
self.head(collection_url, expected_status=http.client.OK)
existing_assignments = len(r.result.get('role_assignments'))
# Now add one of each of the four types of assignment, making sure
# that we get them all back.
gd_entity = self.build_role_assignment_entity(
domain_id=self.domain_id,
group_id=self.group_id,
role_id=role['id'])
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
self.head(collection_url, expected_status=http.client.OK)
ud_entity = self.build_role_assignment_entity(
domain_id=self.domain_id,
user_id=user1['id'],
role_id=role['id'])
self.put(ud_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, ud_entity)
self.head(collection_url, expected_status=http.client.OK)
gp_entity = self.build_role_assignment_entity(
project_id=self.project_id, group_id=self.group_id,
role_id=role['id'])
self.put(gp_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 3,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gp_entity)
self.head(collection_url, expected_status=http.client.OK)
up_entity = self.build_role_assignment_entity(
project_id=self.project_id, user_id=user1['id'],
role_id=role['id'])
self.put(up_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 4,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
self.head(collection_url, expected_status=http.client.OK)
# Now delete the four we added and make sure they are removed
# from the collection.
self.delete(gd_entity['links']['assignment'])
self.delete(ud_entity['links']['assignment'])
self.delete(gp_entity['links']['assignment'])
self.delete(up_entity['links']['assignment'])
frozen_datetime.tick(delta=datetime.timedelta(seconds=1))
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments,
resource_url=collection_url)
self.assertRoleAssignmentNotInListResponse(r, gd_entity)
self.assertRoleAssignmentNotInListResponse(r, ud_entity)
self.assertRoleAssignmentNotInListResponse(r, gp_entity)
self.assertRoleAssignmentNotInListResponse(r, up_entity)
self.head(collection_url, expected_status=http.client.OK)
def test_get_effective_role_assignments(self):
"""Call ``GET /role_assignments?effective``.
Test Plan:
- Create two extra user for tests
- Add these users to a group
- Add a role assignment for the group on a domain
- Get a list of all role assignments, checking one has been added
- Then get a list of all effective role assignments - the group
assignment should have turned into assignments on the domain
for each of the group members.
"""
user1 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
user2 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
PROVIDERS.identity_api.add_user_to_group(user1['id'], self.group['id'])
PROVIDERS.identity_api.add_user_to_group(user2['id'], self.group['id'])
collection_url = '/role_assignments'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
existing_assignments = len(r.result.get('role_assignments'))
gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
group_id=self.group_id,
role_id=self.role_id)
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
# Now re-read the collection asking for effective roles - this
# should mean the group assignment is translated into the two
# member user assignments
collection_url = '/role_assignments?effective'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
ud_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], domain_id=self.domain_id,
user_id=user1['id'], role_id=self.role_id)
self.assertRoleAssignmentInListResponse(r, ud_entity)
ud_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], domain_id=self.domain_id,
user_id=user2['id'], role_id=self.role_id)
self.assertRoleAssignmentInListResponse(r, ud_entity)
def test_check_effective_values_for_role_assignments(self):
"""Call ``GET & HEAD /role_assignments?effective=value``.
Check the various ways of specifying the 'effective'
query parameter. If the 'effective' query parameter
is included then this should always be treated as meaning 'True'
unless it is specified as:
{url}?effective=0
This is by design to match the agreed way of handling
policy checking on query/filter parameters.
Test Plan:
- Create two extra user for tests
- Add these users to a group
- Add a role assignment for the group on a domain
- Get a list of all role assignments, checking one has been added
- Then issue various request with different ways of defining
the 'effective' query parameter. As we have tested the
correctness of the data coming back when we get effective roles
in other tests, here we just use the count of entities to
know if we are getting effective roles or not
"""
user1 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
user2 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
PROVIDERS.identity_api.add_user_to_group(user1['id'], self.group['id'])
PROVIDERS.identity_api.add_user_to_group(user2['id'], self.group['id'])
collection_url = '/role_assignments'
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
existing_assignments = len(r.result.get('role_assignments'))
gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
group_id=self.group_id,
role_id=self.role_id)
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
# Now re-read the collection asking for effective roles,
# using the most common way of defining "effective'. This
# should mean the group assignment is translated into the two
# member user assignments
collection_url = '/role_assignments?effective'
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
# Now set 'effective' to false explicitly - should get
# back the regular roles
collection_url = '/role_assignments?effective=0'
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
# Now try setting 'effective' to 'False' explicitly- this is
# NOT supported as a way of setting a query or filter
# parameter to false by design. Hence we should get back
# effective roles.
collection_url = '/role_assignments?effective=False'
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
# Now set 'effective' to True explicitly
collection_url = '/role_assignments?effective=True'
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
def test_filtered_role_assignments(self):
"""Call ``GET /role_assignments?filters``.
Test Plan:
- Create extra users, group, role and project for tests
- Make the following assignments:
Give group1, role1 on project1 and domain
Give user1, role2 on project1 and domain
Make User1 a member of Group1
- Test a series of single filter list calls, checking that
the correct results are obtained
- Test a multi-filtered list call
- Test listing all effective roles for a given user
- Test the equivalent of the list of roles in a project scoped
token (all effective roles for a user on a project)
"""
# Since the default fixtures already assign some roles to the
# user it creates, we also need a new user that will not have any
# existing assignments
user1 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
user2 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
group1 = unit.new_group_ref(domain_id=self.domain['id'])
group1 = PROVIDERS.identity_api.create_group(group1)
PROVIDERS.identity_api.add_user_to_group(user1['id'], group1['id'])
PROVIDERS.identity_api.add_user_to_group(user2['id'], group1['id'])
project1 = unit.new_project_ref(domain_id=self.domain['id'])
PROVIDERS.resource_api.create_project(project1['id'], project1)
self.role1 = unit.new_role_ref()
PROVIDERS.role_api.create_role(self.role1['id'], self.role1)
self.role2 = unit.new_role_ref()
PROVIDERS.role_api.create_role(self.role2['id'], self.role2)
# Now add one of each of the six types of assignment
gd_entity = self.build_role_assignment_entity(
domain_id=self.domain_id, group_id=group1['id'],
role_id=self.role1['id'])
self.put(gd_entity['links']['assignment'])
ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
user_id=user1['id'],
role_id=self.role2['id'])
self.put(ud_entity['links']['assignment'])
gp_entity = self.build_role_assignment_entity(
project_id=project1['id'],
group_id=group1['id'],
role_id=self.role1['id'])
self.put(gp_entity['links']['assignment'])
up_entity = self.build_role_assignment_entity(
project_id=project1['id'],
user_id=user1['id'],
role_id=self.role2['id'])
self.put(up_entity['links']['assignment'])
gs_entity = self.build_role_assignment_entity(
system='all',
group_id=group1['id'],
role_id=self.role1['id'])
self.put(gs_entity['links']['assignment'])
us_entity = self.build_role_assignment_entity(
system='all',
user_id=user1['id'],
role_id=self.role2['id'])
self.put(us_entity['links']['assignment'])
us2_entity = self.build_role_assignment_entity(
system='all',
user_id=user2['id'],
role_id=self.role2['id'])
self.put(us2_entity['links']['assignment'])
# Now list by various filters to make sure we get back the right ones
collection_url = ('/role_assignments?scope.project.id=%s' %
project1['id'])
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, gp_entity)
collection_url = ('/role_assignments?scope.domain.id=%s' %
self.domain['id'])
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, ud_entity)
self.assertRoleAssignmentInListResponse(r, gd_entity)
collection_url = '/role_assignments?user.id=%s' % user1['id']
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, ud_entity)
collection_url = '/role_assignments?group.id=%s' % group1['id']
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
self.assertRoleAssignmentInListResponse(r, gp_entity)
collection_url = '/role_assignments?role.id=%s' % self.role1['id']
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
self.assertRoleAssignmentInListResponse(r, gp_entity)
self.assertRoleAssignmentInListResponse(r, gs_entity)
collection_url = '/role_assignments?role.id=%s' % self.role2['id']
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=4,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, ud_entity)
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, us_entity)
# Let's try combining two filers together....
collection_url = (
'/role_assignments?user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
# Now for a harder one - filter for user with effective
# roles - this should return role assignment that were directly
# assigned as well as by virtue of group membership
collection_url = ('/role_assignments?effective&user.id=%s' %
user1['id'])
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=4,
resource_url=collection_url)
# Should have the two direct roles...
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, ud_entity)
# ...and the two via group membership...
gp1_link = self.build_role_assignment_link(
project_id=project1['id'],
group_id=group1['id'],
role_id=self.role1['id'])
gd1_link = self.build_role_assignment_link(domain_id=self.domain_id,
group_id=group1['id'],
role_id=self.role1['id'])
up1_entity = self.build_role_assignment_entity(
link=gp1_link, project_id=project1['id'],
user_id=user1['id'], role_id=self.role1['id'])
ud1_entity = self.build_role_assignment_entity(
link=gd1_link, domain_id=self.domain_id, user_id=user1['id'],
role_id=self.role1['id'])
self.assertRoleAssignmentInListResponse(r, up1_entity)
self.assertRoleAssignmentInListResponse(r, ud1_entity)
# ...and for the grand-daddy of them all, simulate the request
# that would generate the list of effective roles in a project
# scoped token.
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
# Should have one direct role and one from group membership...
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, up1_entity)
def test_list_system_role_assignments(self):
# create a bunch of roles
user_system_role_id = self._create_new_role()
user_domain_role_id = self._create_new_role()
user_project_role_id = self._create_new_role()
group_system_role_id = self._create_new_role()
group_domain_role_id = self._create_new_role()
group_project_role_id = self._create_new_role()
# create a user and grant the user a role on the system, domain, and
# project
user = self._create_user()
url = '/system/users/%s/roles/%s' % (user['id'], user_system_role_id)
self.put(url)
url = '/domains/%s/users/%s/roles/%s' % (
self.domain_id, user['id'], user_domain_role_id
)
self.put(url)
url = '/projects/%s/users/%s/roles/%s' % (
self.project_id, user['id'], user_project_role_id
)
self.put(url)
# create a group and grant the group a role on the system, domain, and
# project
group = self._create_group()
url = '/system/groups/%s/roles/%s' % (
group['id'], group_system_role_id
)
self.put(url)
url = '/domains/%s/groups/%s/roles/%s' % (
self.domain_id, group['id'], group_domain_role_id
)
self.put(url)
url = '/projects/%s/groups/%s/roles/%s' % (
self.project_id, group['id'], group_project_role_id
)
self.put(url)
# /v3/role_assignments?scope.system=all should return two assignments
response = self.get('/role_assignments?scope.system=all')
self.assertValidRoleAssignmentListResponse(response, expected_length=2)
for assignment in response.json_body['role_assignments']:
self.assertTrue(assignment['scope']['system']['all'])
if assignment.get('user'):
self.assertEqual(user_system_role_id, assignment['role']['id'])
if assignment.get('group'):
self.assertEqual(
group_system_role_id,
assignment['role']['id']
)
# /v3/role_assignments?scope_system=all&user.id=$USER_ID should return
# one role assignment
url = '/role_assignments?scope.system=all&user.id=%s' % user['id']
response = self.get(url)
self.assertValidRoleAssignmentListResponse(response, expected_length=1)
self.assertEqual(
user_system_role_id,
response.json_body['role_assignments'][0]['role']['id']
)
# /v3/role_assignments?scope_system=all&group.id=$GROUP_ID should
# return one role assignment
url = '/role_assignments?scope.system=all&group.id=%s' % group['id']
response = self.get(url)
self.assertValidRoleAssignmentListResponse(response, expected_length=1)
self.assertEqual(
group_system_role_id,
response.json_body['role_assignments'][0]['role']['id']
)
# /v3/role_assignments?user.id=$USER_ID should return 3 assignments
# and system should be in that list of assignments
url = '/role_assignments?user.id=%s' % user['id']
response = self.get(url)
self.assertValidRoleAssignmentListResponse(response, expected_length=3)
for assignment in response.json_body['role_assignments']:
if 'system' in assignment['scope']:
self.assertEqual(
user_system_role_id, assignment['role']['id']
)
if 'domain' in assignment['scope']:
self.assertEqual(
user_domain_role_id, assignment['role']['id']
)
if 'project' in assignment['scope']:
self.assertEqual(
user_project_role_id, assignment['role']['id']
)
# /v3/role_assignments?group.id=$GROUP_ID should return 3 assignments
# and system should be in that list of assignments
url = '/role_assignments?group.id=%s' % group['id']
response = self.get(url)
self.assertValidRoleAssignmentListResponse(response, expected_length=3)
for assignment in response.json_body['role_assignments']:
if 'system' in assignment['scope']:
self.assertEqual(
group_system_role_id, assignment['role']['id']
)
if 'domain' in assignment['scope']:
self.assertEqual(
group_domain_role_id, assignment['role']['id']
)
if 'project' in assignment['scope']:
self.assertEqual(
group_project_role_id, assignment['role']['id']
)
class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin):
"""Base class for testing /v3/role_assignments API behavior."""
MAX_HIERARCHY_BREADTH = 3
MAX_HIERARCHY_DEPTH = CONF.max_project_tree_depth - 1
def load_sample_data(self):
"""Create sample data to be used on tests.
Created data are i) a role and ii) a domain containing: a project
hierarchy and 3 users within 3 groups.
"""
def create_project_hierarchy(parent_id, depth):
"""Create a random project hierarchy."""
if depth == 0:
return
breadth = random.randint(1, self.MAX_HIERARCHY_BREADTH)
subprojects = []
for i in range(breadth):
subprojects.append(unit.new_project_ref(
domain_id=self.domain_id, parent_id=parent_id))
PROVIDERS.resource_api.create_project(
subprojects[-1]['id'], subprojects[-1]
)
new_parent = subprojects[random.randint(0, breadth - 1)]
create_project_hierarchy(new_parent['id'], depth - 1)
super(RoleAssignmentBaseTestCase, self).load_sample_data()
# Create a domain
self.domain = unit.new_domain_ref()
self.domain_id = self.domain['id']
PROVIDERS.resource_api.create_domain(self.domain_id, self.domain)
# Create a project hierarchy
self.project = unit.new_project_ref(domain_id=self.domain_id)
self.project_id = self.project['id']
PROVIDERS.resource_api.create_project(self.project_id, self.project)
# Create a random project hierarchy
create_project_hierarchy(self.project_id,
random.randint(1, self.MAX_HIERARCHY_DEPTH))
# Create 3 users
self.user_ids = []
for i in range(3):
user = unit.new_user_ref(domain_id=self.domain_id)
user = PROVIDERS.identity_api.create_user(user)
self.user_ids.append(user['id'])
# Create 3 groups
self.group_ids = []
for i in range(3):
group = unit.new_group_ref(domain_id=self.domain_id)
group = PROVIDERS.identity_api.create_group(group)
self.group_ids.append(group['id'])
# Put 2 members on each group
PROVIDERS.identity_api.add_user_to_group(
user_id=self.user_ids[i], group_id=group['id']
)
PROVIDERS.identity_api.add_user_to_group(
user_id=self.user_ids[i % 2], group_id=group['id']
)
PROVIDERS.assignment_api.create_grant(
user_id=self.user_id, project_id=self.project_id,
role_id=self.role_id
)
# Create a role
self.role = unit.new_role_ref()
self.role_id = self.role['id']
PROVIDERS.role_api.create_role(self.role_id, self.role)
# Set default user and group to be used on tests
self.default_user_id = self.user_ids[0]
self.default_group_id = self.group_ids[0]
def get_role_assignments(self, expected_status=http.client.OK, **filters):
"""Return the result from querying role assignment API + queried URL.
Calls GET /v3/role_assignments?<params> and returns its result, where
<params> is the HTTP query parameters form of effective option plus
filters, if provided. Queried URL is returned as well.
:returns: a tuple containing the list role assignments API response and
queried URL.
"""
query_url = self._get_role_assignments_query_url(**filters)
response = self.get(query_url, expected_status=expected_status)
return (response, query_url)
def _get_role_assignments_query_url(self, **filters):
"""Return non-effective role assignments query URL from given filters.
:param filters: query parameters are created with the provided filters
on role assignments attributes. Valid filters are:
role_id, domain_id, project_id, group_id, user_id and
inherited_to_projects.
:returns: role assignments query URL.
"""
return self.build_role_assignment_query_url(**filters)
class RoleAssignmentFailureTestCase(RoleAssignmentBaseTestCase):
"""Class for testing invalid query params on /v3/role_assignments API.
Querying domain and project, or user and group results in a HTTP 400 Bad
Request, since a role assignment must contain only a single pair of (actor,
target). In addition, since filtering on role assignments applies only to
the final result, effective mode cannot be combined with i) group or ii)
domain and inherited, because it would always result in an empty list.
"""
def test_get_role_assignments_by_domain_and_project(self):
self.get_role_assignments(domain_id=self.domain_id,
project_id=self.project_id,
expected_status=http.client.BAD_REQUEST)
def test_get_role_assignments_by_user_and_group(self):
self.get_role_assignments(user_id=self.default_user_id,
group_id=self.default_group_id,
expected_status=http.client.BAD_REQUEST)
def test_get_role_assignments_by_effective_and_inherited(self):
self.get_role_assignments(domain_id=self.domain_id, effective=True,
inherited_to_projects=True,
expected_status=http.client.BAD_REQUEST)
def test_get_role_assignments_by_effective_and_group(self):
self.get_role_assignments(effective=True,
group_id=self.default_group_id,
expected_status=http.client.BAD_REQUEST)
class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase):
"""Class for testing direct assignments on /v3/role_assignments API.
Direct assignments on a domain or project have effect on them directly,
instead of on their project hierarchy, i.e they are non-inherited. In
addition, group direct assignments are not expanded to group's users.
Tests on this class make assertions on the representation and API filtering
of direct assignments.
"""
def _test_get_role_assignments(self, **filters):
"""Generic filtering test method.
According to the provided filters, this method:
- creates a new role assignment;
- asserts that list role assignments API reponds correctly;
- deletes the created role assignment.
:param filters: filters to be considered when listing role assignments.
Valid filters are: role_id, domain_id, project_id,
group_id, user_id and inherited_to_projects.
"""
# Fills default assignment with provided filters
test_assignment = self._set_default_assignment_attributes(**filters)
# Create new role assignment for this test
PROVIDERS.assignment_api.create_grant(**test_assignment)
# Get expected role assignments
expected_assignments = self._list_expected_role_assignments(
**test_assignment)
# Get role assignments from API
response, query_url = self.get_role_assignments(**test_assignment)
self.assertValidRoleAssignmentListResponse(response,
resource_url=query_url)
self.assertEqual(len(expected_assignments),
len(response.result.get('role_assignments')))
# Assert that expected role assignments were returned by the API call
for assignment in expected_assignments:
self.assertRoleAssignmentInListResponse(response, assignment)
# Delete created role assignment
PROVIDERS.assignment_api.delete_grant(**test_assignment)
def _set_default_assignment_attributes(self, **attribs):
"""Insert default values for missing attributes of role assignment.
If no actor, target or role are provided, they will default to values
from sample data.
:param attribs: info from a role assignment entity. Valid attributes
are: role_id, domain_id, project_id, group_id, user_id
and inherited_to_projects.
"""
if not any(target in attribs
for target in ('domain_id', 'projects_id')):
attribs['project_id'] = self.project_id
if not any(actor in attribs for actor in ('user_id', 'group_id')):
attribs['user_id'] = self.default_user_id
if 'role_id' not in attribs:
attribs['role_id'] = self.role_id
return attribs
def _list_expected_role_assignments(self, **filters):
"""Given the filters, it returns expected direct role assignments.
:param filters: filters that will be considered when listing role
assignments. Valid filters are: role_id, domain_id,
project_id, group_id, user_id and
inherited_to_projects.
:returns: the list of the expected role assignments.
"""
return [self.build_role_assignment_entity(**filters)]
# Test cases below call the generic test method, providing different filter
# combinations. Filters are provided as specified in the method name, after
# 'by'. For example, test_get_role_assignments_by_project_user_and_role
# calls the generic test method with project_id, user_id and role_id.
def test_get_role_assignments_by_domain(self, **filters):
self._test_get_role_assignments(domain_id=self.domain_id, **filters)
def test_get_role_assignments_by_project(self, **filters):
self._test_get_role_assignments(project_id=self.project_id, **filters)
def test_get_role_assignments_by_user(self, **filters):
self._test_get_role_assignments(user_id=self.default_user_id,
**filters)
def test_get_role_assignments_by_group(self, **filters):
self._test_get_role_assignments(group_id=self.default_group_id,
**filters)
def test_get_role_assignments_by_role(self, **filters):
self._test_get_role_assignments(role_id=self.role_id, **filters)
def test_get_role_assignments_by_domain_and_user(self, **filters):
self.test_get_role_assignments_by_domain(user_id=self.default_user_id,
**filters)
def test_get_role_assignments_by_domain_and_group(self, **filters):
self.test_get_role_assignments_by_domain(
group_id=self.default_group_id, **filters)
def test_get_role_assignments_by_project_and_user(self, **filters):
self.test_get_role_assignments_by_project(user_id=self.default_user_id,
**filters)
def test_get_role_assignments_by_project_and_group(self, **filters):
self.test_get_role_assignments_by_project(
group_id=self.default_group_id, **filters)
def test_get_role_assignments_by_domain_user_and_role(self, **filters):
self.test_get_role_assignments_by_domain_and_user(role_id=self.role_id,
**filters)
def test_get_role_assignments_by_domain_group_and_role(self, **filters):
self.test_get_role_assignments_by_domain_and_group(
role_id=self.role_id, **filters)
def test_get_role_assignments_by_project_user_and_role(self, **filters):
self.test_get_role_assignments_by_project_and_user(
role_id=self.role_id, **filters)
def test_get_role_assignments_by_project_group_and_role(self, **filters):
self.test_get_role_assignments_by_project_and_group(
role_id=self.role_id, **filters)
class RoleAssignmentInheritedTestCase(RoleAssignmentDirectTestCase):
"""Class for testing inherited assignments on /v3/role_assignments API.
Inherited assignments on a domain or project have no effect on them
directly, but on the projects under them instead.
Tests on this class do not make assertions on the effect of inherited
assignments, but in their representation and API filtering.
"""
def _test_get_role_assignments(self, **filters):
"""Add inherited_to_project filter to expected entity in tests."""
super(RoleAssignmentInheritedTestCase,
self)._test_get_role_assignments(inherited_to_projects=True,
**filters)
class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase):
"""Class for testing inheritance effects on /v3/role_assignments API.
Inherited assignments on a domain or project have no effect on them
directly, but on the projects under them instead.
Tests on this class make assertions on the effect of inherited assignments
and API filtering.
"""
def _get_role_assignments_query_url(self, **filters):
"""Return effective role assignments query URL from given filters.
For test methods in this class, effetive will always be true. As in
effective mode, inherited_to_projects, group_id, domain_id and
project_id will always be desconsidered from provided filters.
:param filters: query parameters are created with the provided filters.
Valid filters are: role_id, domain_id, project_id,
group_id, user_id and inherited_to_projects.
:returns: role assignments query URL.
"""
query_filters = filters.copy()
query_filters.pop('inherited_to_projects')
query_filters.pop('group_id', None)
query_filters.pop('domain_id', None)
query_filters.pop('project_id', None)
return self.build_role_assignment_query_url(effective=True,
**query_filters)
def _list_expected_role_assignments(self, **filters):
"""Given the filters, it returns expected direct role assignments.
:param filters: filters that will be considered when listing role
assignments. Valid filters are: role_id, domain_id,
project_id, group_id, user_id and
inherited_to_projects.
:returns: the list of the expected role assignments.
"""
# Get assignment link, to be put on 'links': {'assignment': link}
assignment_link = self.build_role_assignment_link(**filters)
# Expand group membership
user_ids = [None]
if filters.get('group_id'):
user_ids = [user['id'] for user in
PROVIDERS.identity_api.list_users_in_group(
filters['group_id'])]
else:
user_ids = [self.default_user_id]
# Expand role inheritance
project_ids = [None]
if filters.get('domain_id'):
project_ids = [project['id'] for project in
PROVIDERS.resource_api.list_projects_in_domain(
filters.pop('domain_id'))]
else:
project_ids = [project['id'] for project in
PROVIDERS.resource_api.list_projects_in_subtree(
self.project_id)]
# Compute expected role assignments
assignments = []
for project_id in project_ids:
filters['project_id'] = project_id
for user_id in user_ids:
filters['user_id'] = user_id
assignments.append(self.build_role_assignment_entity(
link=assignment_link, **filters))
return assignments
class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin):
"""Test inheritance crud and its effects."""
def test_get_token_from_inherited_user_domain_role_grants(self):
# Create a new user to ensure that no grant is loaded from sample data
user = unit.create_user(
PROVIDERS.identity_api, domain_id=self.domain_id
)
# Define domain and project authentication data
domain_auth_data = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
domain_id=self.domain_id)
project_auth_data = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
project_id=self.project_id)
# Check the user cannot get a domain nor a project token
self.v3_create_token(domain_auth_data,
expected_status=http.client.UNAUTHORIZED)
self.v3_create_token(project_auth_data,
expected_status=http.client.UNAUTHORIZED)
# Grant non-inherited role for user on domain
non_inher_ud_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id)
self.put(non_inher_ud_link)
# Check the user can get only a domain token
self.v3_create_token(domain_auth_data)
self.v3_create_token(project_auth_data,
expected_status=http.client.UNAUTHORIZED)
# Create inherited role
inherited_role = unit.new_role_ref(name='inherited')
PROVIDERS.role_api.create_role(inherited_role['id'], inherited_role)
# Grant inherited role for user on domain
inher_ud_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'],
role_id=inherited_role['id'], inherited_to_projects=True)
self.put(inher_ud_link)
# Check the user can get both a domain and a project token
self.v3_create_token(domain_auth_data)
self.v3_create_token(project_auth_data)
# Delete inherited grant
self.delete(inher_ud_link)
# Check the user can only get a domain token
self.v3_create_token(domain_auth_data)
self.v3_create_token(project_auth_data,
expected_status=http.client.UNAUTHORIZED)
# Delete non-inherited grant
self.delete(non_inher_ud_link)
# Check the user cannot get a domain token anymore
self.v3_create_token(domain_auth_data,
expected_status=http.client.UNAUTHORIZED)
def test_get_token_from_inherited_group_domain_role_grants(self):
# Create a new group and put a new user in it to
# ensure that no grant is loaded from sample data
user = unit.create_user(
PROVIDERS.identity_api, domain_id=self.domain_id
)
group = unit.new_group_ref(domain_id=self.domain['id'])
group = PROVIDERS.identity_api.create_group(group)
PROVIDERS.identity_api.add_user_to_group(user['id'], group['id'])
# Define domain and project authentication data
domain_auth_data = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
domain_id=self.domain_id)
project_auth_data = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
project_id=self.project_id)
# Check the user cannot get a domain nor a project token
self.v3_create_token(domain_auth_data,
expected_status=http.client.UNAUTHORIZED)
self.v3_create_token(project_auth_data,
expected_status=http.client.UNAUTHORIZED)
# Grant non-inherited role for user on domain
non_inher_gd_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id)
self.put(non_inher_gd_link)
# Check the user can get only a domain token
self.v3_create_token(domain_auth_data)
self.v3_create_token(project_auth_data,
expected_status=http.client.UNAUTHORIZED)
# Create inherited role
inherited_role = unit.new_role_ref(name='inherited')
PROVIDERS.role_api.create_role(inherited_role['id'], inherited_role)
# Grant inherited role for user on domain
inher_gd_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'],
role_id=inherited_role['id'], inherited_to_projects=True)
self.put(inher_gd_link)
# Check the user can get both a domain and a project token
self.v3_create_token(domain_auth_data)
self.v3_create_token(project_auth_data)
# Delete inherited grant
self.delete(inher_gd_link)
# Check the user can only get a domain token
self.v3_create_token(domain_auth_data)
self.v3_create_token(project_auth_data,
expected_status=http.client.UNAUTHORIZED)
# Delete non-inherited grant
self.delete(non_inher_gd_link)
# Check the user cannot get a domain token anymore
self.v3_create_token(domain_auth_data,
expected_status=http.client.UNAUTHORIZED)
def _test_crud_inherited_and_direct_assignment_on_target(self, target_url):
time = datetime.datetime.utcnow()
with freezegun.freeze_time(time) as frozen_datetime:
# Create a new role to avoid assignments loaded from sample data
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
# Define URLs
direct_url = '%s/users/%s/roles/%s' % (
target_url, self.user_id, role['id'])
inherited_url = ('/OS-INHERIT/%s/inherited_to_projects' %
direct_url.lstrip('/'))
# Create the direct assignment
self.put(direct_url)
# Check the direct assignment exists, but the inherited one does
# not
self.head(direct_url)
self.head(inherited_url, expected_status=http.client.NOT_FOUND)
# Now add the inherited assignment
self.put(inherited_url)
# Check both the direct and inherited assignment exist
self.head(direct_url)
self.head(inherited_url)
# Delete indirect assignment
self.delete(inherited_url)
frozen_datetime.tick(delta=datetime.timedelta(seconds=1))
# Check the direct assignment exists, but the inherited one does
# not
self.head(direct_url)
self.head(inherited_url, expected_status=http.client.NOT_FOUND)
# Now delete the inherited assignment
self.delete(direct_url)
# Check that none of them exist
self.head(direct_url, expected_status=http.client.NOT_FOUND)
self.head(inherited_url, expected_status=http.client.NOT_FOUND)
def test_crud_inherited_and_direct_assignment_on_domains(self):
self._test_crud_inherited_and_direct_assignment_on_target(
'/domains/%s' % self.domain_id)
def test_crud_inherited_and_direct_assignment_on_projects(self):
self._test_crud_inherited_and_direct_assignment_on_target(
'/projects/%s' % self.project_id)
def test_crud_user_inherited_domain_role_grants(self):
role_list = []
for _ in range(2):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
role_list.append(role)
# Create a non-inherited role as a spoiler
PROVIDERS.assignment_api.create_grant(
role_list[1]['id'], user_id=self.user['id'],
domain_id=self.domain_id)
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domain_id,
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[0]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
# Check we can read it back
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[0],
resource_url=collection_url)
# Now delete and check its gone
self.delete(member_url)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, expected_length=0,
resource_url=collection_url)
def test_list_role_assignments_for_inherited_domain_grants(self):
"""Call ``GET /role_assignments with inherited domain grants``.
Test Plan:
- Create 4 roles
- Create a domain with a user and two projects
- Assign two direct roles to project1
- Assign a spoiler role to project2
- Issue the URL to add inherited role to the domain
- Issue the URL to check it is indeed on the domain
- Issue the URL to check effective roles on project1 - this
should return 3 roles.
"""
role_list = []
for _ in range(4):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
role_list.append(role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user1 = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
project1 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project1['id'], project1)
project2 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[1]['id'])
# ..and one on a different project as a spoiler
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[2]['id'])
# Now create our inherited role on the domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': domain['id'],
'user_id': user1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
# Now use the list domain role assignments api to check if this
# is included
collection_url = (
'/role_assignments?user.id=%(user_id)s'
'&scope.domain.id=%(domain_id)s' % {
'user_id': user1['id'],
'domain_id': domain['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
ud_entity = self.build_role_assignment_entity(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, ud_entity)
# Now ask for effective list role assignments - the role should
# turn into a project role, along with the two direct roles that are
# on the project
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
# An effective role for an inherited role will be a project
# entity, with a domain link to the inherited assignment
ud_url = self.build_role_assignment_link(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
up_entity = self.build_role_assignment_entity(
link=ud_url, project_id=project1['id'],
user_id=user1['id'], role_id=role_list[3]['id'],
inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
def _test_list_role_assignments_include_names(self, role1):
"""Call ``GET /role_assignments with include names``.
Test Plan:
- Create a domain with a group and a user
- Create a project with a group and a user
"""
role1 = unit.new_role_ref()
PROVIDERS.role_api.create_role(role1['id'], role1)
user1 = unit.create_user(
PROVIDERS.identity_api, domain_id=self.domain_id
)
group = unit.new_group_ref(domain_id=self.domain_id)
group = PROVIDERS.identity_api.create_group(group)
project1 = unit.new_project_ref(domain_id=self.domain_id)
PROVIDERS.resource_api.create_project(project1['id'], project1)
expected_entity1 = self.build_role_assignment_entity_include_names(
role_ref=role1,
project_ref=project1,
user_ref=user1)
self.put(expected_entity1['links']['assignment'])
expected_entity2 = self.build_role_assignment_entity_include_names(
role_ref=role1,
domain_ref=self.domain,
group_ref=group)
self.put(expected_entity2['links']['assignment'])
expected_entity3 = self.build_role_assignment_entity_include_names(
role_ref=role1,
domain_ref=self.domain,
user_ref=user1)
self.put(expected_entity3['links']['assignment'])
expected_entity4 = self.build_role_assignment_entity_include_names(
role_ref=role1,
project_ref=project1,
group_ref=group)
self.put(expected_entity4['links']['assignment'])
collection_url_domain = (
'/role_assignments?include_names&scope.domain.id=%(domain_id)s' % {
'domain_id': self.domain_id})
rs_domain = self.get(collection_url_domain)
collection_url_project = (
'/role_assignments?include_names&'
'scope.project.id=%(project_id)s' % {
'project_id': project1['id']})
rs_project = self.get(collection_url_project)
collection_url_group = (
'/role_assignments?include_names&group.id=%(group_id)s' % {
'group_id': group['id']})
rs_group = self.get(collection_url_group)
collection_url_user = (
'/role_assignments?include_names&user.id=%(user_id)s' % {
'user_id': user1['id']})
rs_user = self.get(collection_url_user)
collection_url_role = (
'/role_assignments?include_names&role.id=%(role_id)s' % {
'role_id': role1['id']})
rs_role = self.get(collection_url_role)
# Make sure all entities were created successfully
self.assertEqual(http.client.OK, rs_domain.status_int)
self.assertEqual(http.client.OK, rs_project.status_int)
self.assertEqual(http.client.OK, rs_group.status_int)
self.assertEqual(http.client.OK, rs_user.status_int)
# Make sure we can get back the correct number of entities
self.assertValidRoleAssignmentListResponse(
rs_domain,
expected_length=2,
resource_url=collection_url_domain)
self.assertValidRoleAssignmentListResponse(
rs_project,
expected_length=2,
resource_url=collection_url_project)
self.assertValidRoleAssignmentListResponse(
rs_group,
expected_length=2,
resource_url=collection_url_group)
self.assertValidRoleAssignmentListResponse(
rs_user,
expected_length=2,
resource_url=collection_url_user)
self.assertValidRoleAssignmentListResponse(
rs_role,
expected_length=4,
resource_url=collection_url_role)
# Verify all types of entities have the correct format
self.assertRoleAssignmentInListResponse(rs_domain, expected_entity2)
self.assertRoleAssignmentInListResponse(rs_project, expected_entity1)
self.assertRoleAssignmentInListResponse(rs_group, expected_entity4)
self.assertRoleAssignmentInListResponse(rs_user, expected_entity3)
self.assertRoleAssignmentInListResponse(rs_role, expected_entity1)
def test_list_role_assignments_include_names_global_role(self):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
self._test_list_role_assignments_include_names(role)
def test_list_role_assignments_include_names_domain_role(self):
role = unit.new_role_ref(domain_id=self.domain['id'])
PROVIDERS.role_api.create_role(role['id'], role)
self._test_list_role_assignments_include_names(role)
def test_remove_assignment_for_project_acting_as_domain(self):
"""Test goal: remove assignment for project acting as domain.
Ensure when we have two role assignments for the project
acting as domain, one dealing with it as a domain and other as a
project, we still able to remove those assignments later.
Test plan:
- Create a role and a domain with a user;
- Grant a role for this user in this domain;
- Grant a role for this user in the same entity as a project;
- Ensure that both assignments were created and it was valid;
- Remove the domain assignment for the user and show that the project
assignment for him still valid
"""
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user = unit.create_user(PROVIDERS.identity_api, domain_id=domain['id'])
assignment_domain = self.build_role_assignment_entity(
role_id=role['id'], domain_id=domain['id'], user_id=user['id'],
inherited_to_projects=False)
assignment_project = self.build_role_assignment_entity(
role_id=role['id'], project_id=domain['id'], user_id=user['id'],
inherited_to_projects=False)
self.put(assignment_domain['links']['assignment'])
self.put(assignment_project['links']['assignment'])
collection_url = '/role_assignments?user.id=%(user_id)s' % (
{'user_id': user['id']})
result = self.get(collection_url)
# We have two role assignments based in both roles for the domain and
# project scope
self.assertValidRoleAssignmentListResponse(
result, expected_length=2, resource_url=collection_url)
self.assertRoleAssignmentInListResponse(result, assignment_domain)
domain_url = '/domains/%s/users/%s/roles/%s' % (
domain['id'], user['id'], role['id'])
self.delete(domain_url)
collection_url = '/role_assignments?user.id=%(user_id)s' % (
{'user_id': user['id']})
result = self.get(collection_url)
# Now we only have one assignment for the project scope since the
# domain scope was removed.
self.assertValidRoleAssignmentListResponse(
result, expected_length=1, resource_url=collection_url)
self.assertRoleAssignmentInListResponse(result, assignment_project)
def test_list_inherited_role_assignments_include_names(self):
"""Call ``GET /role_assignments?include_names``.
Test goal: ensure calling list role assignments including names
honors the inherited role assignments flag.
Test plan:
- Create a role and a domain with a user;
- Create a inherited role assignment;
- List role assignments for that user;
- List role assignments for that user including names.
"""
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user = unit.create_user(PROVIDERS.identity_api, domain_id=domain['id'])
# Create and store expected assignment refs
assignment = self.build_role_assignment_entity(
role_id=role['id'], domain_id=domain['id'], user_id=user['id'],
inherited_to_projects=True)
assignment_names = self.build_role_assignment_entity_include_names(
role_ref=role, domain_ref=domain, user_ref=user,
inherited_assignment=True)
# Ensure expected assignment refs are inherited and have the same URL
self.assertEqual('projects',
assignment['scope']['OS-INHERIT:inherited_to'])
self.assertEqual('projects',
assignment_names['scope']['OS-INHERIT:inherited_to'])
self.assertEqual(assignment['links']['assignment'],
assignment_names['links']['assignment'])
self.put(assignment['links']['assignment'])
collection_url = '/role_assignments?user.id=%(user_id)s' % (
{'user_id': user['id']})
result = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
result, expected_length=1, resource_url=collection_url)
self.assertRoleAssignmentInListResponse(result, assignment)
collection_url = ('/role_assignments?include_names&'
'user.id=%(user_id)s' % {'user_id': user['id']})
result = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
result, expected_length=1, resource_url=collection_url)
self.assertRoleAssignmentInListResponse(result, assignment_names)
def test_list_role_assignments_for_disabled_inheritance_extension(self):
"""Call ``GET /role_assignments with inherited domain grants``.
Test Plan:
- Issue the URL to add inherited role to the domain
- Issue the URL to check effective roles on project include the
inherited role
- Disable the extension
- Re-check the effective roles, proving the inherited role no longer
shows up.
"""
role_list = []
for _ in range(4):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
role_list.append(role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user1 = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
project1 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project1['id'], project1)
project2 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[1]['id'])
# ..and one on a different project as a spoiler
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[2]['id'])
# Now create our inherited role on the domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': domain['id'],
'user_id': user1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
# Get effective list role assignments - the role should
# turn into a project role, along with the two direct roles that are
# on the project
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
ud_url = self.build_role_assignment_link(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
up_entity = self.build_role_assignment_entity(
link=ud_url, project_id=project1['id'],
user_id=user1['id'], role_id=role_list[3]['id'],
inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
def test_list_role_assignments_for_inherited_group_domain_grants(self):
"""Call ``GET /role_assignments with inherited group domain grants``.
Test Plan:
- Create 4 roles
- Create a domain with a user and two projects
- Assign two direct roles to project1
- Assign a spoiler role to project2
- Issue the URL to add inherited role to the domain
- Issue the URL to check it is indeed on the domain
- Issue the URL to check effective roles on project1 - this
should return 3 roles.
"""
role_list = []
for _ in range(4):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
role_list.append(role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user1 = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
user2 = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
group1 = unit.new_group_ref(domain_id=domain['id'])
group1 = PROVIDERS.identity_api.create_group(group1)
PROVIDERS.identity_api.add_user_to_group(
user1['id'], group1['id']
)
PROVIDERS.identity_api.add_user_to_group(
user2['id'], group1['id']
)
project1 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project1['id'], project1)
project2 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[1]['id'])
# ..and one on a different project as a spoiler
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[2]['id'])
# Now create our inherited role on the domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': domain['id'],
'group_id': group1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
# Now use the list domain role assignments api to check if this
# is included
collection_url = (
'/role_assignments?group.id=%(group_id)s'
'&scope.domain.id=%(domain_id)s' % {
'group_id': group1['id'],
'domain_id': domain['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
gd_entity = self.build_role_assignment_entity(
domain_id=domain['id'], group_id=group1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, gd_entity)
# Now ask for effective list role assignments - the role should
# turn into a user project role, along with the two direct roles
# that are on the project
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
# An effective role for an inherited role will be a project
# entity, with a domain link to the inherited assignment
up_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], project_id=project1['id'],
user_id=user1['id'], role_id=role_list[3]['id'],
inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
def test_filtered_role_assignments_for_inherited_grants(self):
"""Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
Test Plan:
- Create 5 roles
- Create a domain with a user, group and two projects
- Assign three direct spoiler roles to projects
- Issue the URL to add an inherited user role to the domain
- Issue the URL to add an inherited group role to the domain
- Issue the URL to filter by inherited roles - this should
return just the 2 inherited roles.
"""
role_list = []
for _ in range(5):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
role_list.append(role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user1 = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
group1 = unit.new_group_ref(domain_id=domain['id'])
group1 = PROVIDERS.identity_api.create_group(group1)
project1 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project1['id'], project1)
project2 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project2['id'], project2)
# Add some spoiler roles to the projects
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[1]['id'])
# Create a non-inherited role as a spoiler
PROVIDERS.assignment_api.create_grant(
role_list[2]['id'], user_id=user1['id'], domain_id=domain['id'])
# Now create two inherited roles on the domain, one for a user
# and one for a domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': domain['id'],
'user_id': user1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': domain['id'],
'group_id': group1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[4]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[4],
resource_url=collection_url)
# Now use the list role assignments api to get a list of inherited
# roles on the domain - should get back the two roles
collection_url = (
'/role_assignments?scope.OS-INHERIT:inherited_to=projects')
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
ud_entity = self.build_role_assignment_entity(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
gd_entity = self.build_role_assignment_entity(
domain_id=domain['id'], group_id=group1['id'],
role_id=role_list[4]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, ud_entity)
self.assertRoleAssignmentInListResponse(r, gd_entity)
def _setup_hierarchical_projects_scenario(self):
"""Create basic hierarchical projects scenario.
This basic scenario contains a root with one leaf project and
two roles with the following names: non-inherited and inherited.
"""
# Create project hierarchy
root = unit.new_project_ref(domain_id=self.domain['id'])
leaf = unit.new_project_ref(domain_id=self.domain['id'],
parent_id=root['id'])
PROVIDERS.resource_api.create_project(root['id'], root)
PROVIDERS.resource_api.create_project(leaf['id'], leaf)
# Create 'non-inherited' and 'inherited' roles
non_inherited_role = unit.new_role_ref(name='non-inherited')
PROVIDERS.role_api.create_role(
non_inherited_role['id'], non_inherited_role
)
inherited_role = unit.new_role_ref(name='inherited')
PROVIDERS.role_api.create_role(inherited_role['id'], inherited_role)
return (root['id'], leaf['id'],
non_inherited_role['id'], inherited_role['id'])
def test_get_token_from_inherited_user_project_role_grants(self):
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Define root and leaf projects authentication data
root_project_auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=root_id)
leaf_project_auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=leaf_id)
# Check the user cannot get a token on root nor leaf project
self.v3_create_token(root_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
self.v3_create_token(leaf_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
# Grant non-inherited role for user on leaf project
non_inher_up_link = self.build_role_assignment_link(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_link)
# Check the user can only get a token on leaf project
self.v3_create_token(root_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
self.v3_create_token(leaf_project_auth_data)
# Grant inherited role for user on root project
inher_up_link = self.build_role_assignment_link(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_link)
# Check the user still can get a token only on leaf project
self.v3_create_token(root_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
self.v3_create_token(leaf_project_auth_data)
# Delete non-inherited grant
self.delete(non_inher_up_link)
# Check the inherited role still applies for leaf project
self.v3_create_token(root_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
self.v3_create_token(leaf_project_auth_data)
# Delete inherited grant
self.delete(inher_up_link)
# Check the user cannot get a token on leaf project anymore
self.v3_create_token(leaf_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
def test_get_token_from_inherited_group_project_role_grants(self):
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Create group and add user to it
group = unit.new_group_ref(domain_id=self.domain['id'])
group = PROVIDERS.identity_api.create_group(group)
PROVIDERS.identity_api.add_user_to_group(self.user['id'], group['id'])
# Define root and leaf projects authentication data
root_project_auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=root_id)
leaf_project_auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=leaf_id)
# Check the user cannot get a token on root nor leaf project
self.v3_create_token(root_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
self.v3_create_token(leaf_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
# Grant non-inherited role for group on leaf project
non_inher_gp_link = self.build_role_assignment_link(
project_id=leaf_id, group_id=group['id'],
role_id=non_inherited_role_id)
self.put(non_inher_gp_link)
# Check the user can only get a token on leaf project
self.v3_create_token(root_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
self.v3_create_token(leaf_project_auth_data)
# Grant inherited role for group on root project
inher_gp_link = self.build_role_assignment_link(
project_id=root_id, group_id=group['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_gp_link)
# Check the user still can get a token only on leaf project
self.v3_create_token(root_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
self.v3_create_token(leaf_project_auth_data)
# Delete no-inherited grant
self.delete(non_inher_gp_link)
# Check the inherited role still applies for leaf project
self.v3_create_token(leaf_project_auth_data)
# Delete inherited grant
self.delete(inher_gp_link)
# Check the user cannot get a token on leaf project anymore
self.v3_create_token(leaf_project_auth_data,
expected_status=http.client.UNAUTHORIZED)
def test_get_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments``.
Test Plan:
- Create 2 roles
- Create a hierarchy of projects with one root and one leaf project
- Issue the URL to add a non-inherited user role to the root project
- Issue the URL to add an inherited user role to the root project
- Issue the URL to get all role assignments - this should return just
2 roles (non-inherited and inherited) in the root project.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
# Get role assignments
collection_url = '/role_assignments'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
# Assert that the user has non-inherited role on root project
self.assertRoleAssignmentInListResponse(r, non_inher_up_entity)
# Assert that the user has inherited role on root project
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user does not have inherited role on leaf project
inher_up_entity['scope']['project']['id'] = leaf_id
self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
def test_get_effective_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments?effective``.
Test Plan:
- Create 2 roles
- Create a hierarchy of projects with one root and one leaf project
- Issue the URL to add a non-inherited user role to the root project
- Issue the URL to add an inherited user role to the root project
- Issue the URL to get effective role assignments - this should return
1 role (non-inherited) on the root project and 1 role (inherited) on
the leaf project.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
# Get effective role assignments
collection_url = '/role_assignments?effective'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
# Assert that the user has non-inherited role on root project
self.assertRoleAssignmentInListResponse(r, non_inher_up_entity)
# Assert that the user does not have inherited role on root project
self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user has inherited role on leaf project
inher_up_entity['scope']['project']['id'] = leaf_id
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
def test_project_id_specified_if_include_subtree_specified(self):
"""When using include_subtree, you must specify a project ID."""
r = self.get('/role_assignments?include_subtree=True',
expected_status=http.client.BAD_REQUEST)
error_msg = ("scope.project.id must be specified if include_subtree "
"is also specified")
self.assertEqual(error_msg, r.result['error']['message'])
r = self.get('/role_assignments?scope.project.id&'
'include_subtree=True',
expected_status=http.client.BAD_REQUEST)
self.assertEqual(error_msg, r.result['error']['message'])
def test_get_role_assignments_for_project_tree(self):
"""Get role_assignment?scope.project.id=X&include_subtree``.
Test Plan:
- Create 2 roles and a hierarchy of projects with one root and one leaf
- Issue the URL to add a non-inherited user role to the root project
and the leaf project
- Issue the URL to get role assignments for the root project but
not the subtree - this should return just the root assignment
- Issue the URL to get role assignments for the root project and
it's subtree - this should return both assignments
- Check that explicitly setting include_subtree to False is the
equivalent to not including it at all in the query.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, unused_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role to root and leaf projects
non_inher_entity_root = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_entity_root['links']['assignment'])
non_inher_entity_leaf = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_entity_leaf['links']['assignment'])
# Without the subtree, we should get the one assignment on the
# root project
collection_url = (
'/role_assignments?scope.project.id=%(project)s' % {
'project': root_id})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r, resource_url=collection_url)
self.assertThat(r.result['role_assignments'], matchers.HasLength(1))
self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
# With the subtree, we should get both assignments
collection_url = (
'/role_assignments?scope.project.id=%(project)s'
'&include_subtree=True' % {
'project': root_id})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r, resource_url=collection_url)
self.assertThat(r.result['role_assignments'], matchers.HasLength(2))
self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf)
# With subtree=0, we should also only get the one assignment on the
# root project
collection_url = (
'/role_assignments?scope.project.id=%(project)s'
'&include_subtree=0' % {
'project': root_id})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r, resource_url=collection_url)
self.assertThat(r.result['role_assignments'], matchers.HasLength(1))
self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
def test_get_effective_role_assignments_for_project_tree(self):
"""Get role_assignment ?project_id=X&include_subtree=True&effective``.
Test Plan:
- Create 2 roles and a hierarchy of projects with one root and 4 levels
of child project
- Issue the URL to add a non-inherited user role to the root project
and a level 1 project
- Issue the URL to add an inherited user role on the level 2 project
- Issue the URL to get effective role assignments for the level 1
project and it's subtree - this should return a role (non-inherited)
on the level 1 project and roles (inherited) on each of the level
2, 3 and 4 projects
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Add some extra projects to the project hierarchy
level2 = unit.new_project_ref(domain_id=self.domain['id'],
parent_id=leaf_id)
level3 = unit.new_project_ref(domain_id=self.domain['id'],
parent_id=level2['id'])
level4 = unit.new_project_ref(domain_id=self.domain['id'],
parent_id=level3['id'])
PROVIDERS.resource_api.create_project(level2['id'], level2)
PROVIDERS.resource_api.create_project(level3['id'], level3)
PROVIDERS.resource_api.create_project(level4['id'], level4)
# Grant non-inherited role to root (as a spoiler) and to
# the level 1 (leaf) project
non_inher_entity_root = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_entity_root['links']['assignment'])
non_inher_entity_leaf = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_entity_leaf['links']['assignment'])
# Grant inherited role to level 2
inher_entity = self.build_role_assignment_entity(
project_id=level2['id'], user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_entity['links']['assignment'])
# Get effective role assignments
collection_url = (
'/role_assignments?scope.project.id=%(project)s'
'&include_subtree=True&effective' % {
'project': leaf_id})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r, resource_url=collection_url)
# There should be three assignments returned in total
self.assertThat(r.result['role_assignments'], matchers.HasLength(3))
# Assert that the user does not non-inherited role on root project
self.assertRoleAssignmentNotInListResponse(r, non_inher_entity_root)
# Assert that the user does have non-inherited role on leaf project
self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf)
# Assert that the user has inherited role on levels 3 and 4
inher_entity['scope']['project']['id'] = level3['id']
self.assertRoleAssignmentInListResponse(r, inher_entity)
inher_entity['scope']['project']['id'] = level4['id']
self.assertRoleAssignmentInListResponse(r, inher_entity)
def test_get_inherited_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
Test Plan:
- Create 2 roles
- Create a hierarchy of projects with one root and one leaf project
- Issue the URL to add a non-inherited user role to the root project
- Issue the URL to add an inherited user role to the root project
- Issue the URL to filter inherited to projects role assignments - this
should return 1 role (inherited) on the root project.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
# Get inherited role assignments
collection_url = ('/role_assignments'
'?scope.OS-INHERIT:inherited_to=projects')
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
# Assert that the user does not have non-inherited role on root project
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user has inherited role on root project
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user does not have inherited role on leaf project
inher_up_entity['scope']['project']['id'] = leaf_id
self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin,
unit.TestCase):
def _create_role(self):
"""Call ``POST /roles``."""
ref = unit.new_role_ref()
r = self.post('/roles', body={'role': ref})
return self.assertValidRoleResponse(r, ref)
def test_list_implied_roles_none(self):
self.prior = self._create_role()
url = '/roles/%s/implies' % (self.prior['id'])
response = self.get(url).json["role_inference"]
self.head(url, expected_status=http.client.OK)
self.assertEqual(self.prior['id'], response['prior_role']['id'])
self.assertEqual(0, len(response['implies']))
def _create_implied_role(self, prior, implied):
self.put('/roles/%s/implies/%s' % (prior['id'], implied['id']),
expected_status=http.client.CREATED)
def _delete_implied_role(self, prior, implied):
self.delete('/roles/%s/implies/%s' % (prior['id'], implied['id']))
def _setup_prior_two_implied(self):
self.prior = self._create_role()
self.implied1 = self._create_role()
self._create_implied_role(self.prior, self.implied1)
self.implied2 = self._create_role()
self._create_implied_role(self.prior, self.implied2)
def _assert_expected_implied_role_response(
self, expected_prior_id, expected_implied_ids):
r = self.get('/roles/%s/implies' % expected_prior_id)
response = r.json
role_inference = response['role_inference']
self.assertEqual(expected_prior_id, role_inference['prior_role']['id'])
prior_link = '/v3/roles/' + expected_prior_id + '/implies'
self.assertThat(response['links']['self'],
matchers.EndsWith(prior_link))
actual_implied_ids = [implied['id']
for implied in role_inference['implies']]
self.assertItemsEqual(expected_implied_ids, actual_implied_ids)
self.assertIsNotNone(role_inference['prior_role']['links']['self'])
for implied in role_inference['implies']:
self.assertIsNotNone(implied['links']['self'])
def _assert_expected_role_inference_rule_response(
self, expected_prior_id, expected_implied_id):
url = '/roles/%s/implies/%s' % (expected_prior_id, expected_implied_id)
response = self.get(url).json
self.assertThat(response['links']['self'],
matchers.EndsWith('/v3%s' % url))
role_inference = response['role_inference']
prior_role = role_inference['prior_role']
self.assertEqual(expected_prior_id, prior_role['id'])
self.assertIsNotNone(prior_role['name'])
self.assertThat(prior_role['links']['self'],
matchers.EndsWith('/v3/roles/%s' % expected_prior_id))
implied_role = role_inference['implies']
self.assertEqual(expected_implied_id, implied_role['id'])
self.assertIsNotNone(implied_role['name'])
self.assertThat(implied_role['links']['self'], matchers.EndsWith(
'/v3/roles/%s' % expected_implied_id))
def _assert_two_roles_implied(self):
self._assert_expected_implied_role_response(
self.prior['id'], [self.implied1['id'], self.implied2['id']])
self._assert_expected_role_inference_rule_response(
self.prior['id'], self.implied1['id'])
self._assert_expected_role_inference_rule_response(
self.prior['id'], self.implied2['id'])
def _assert_one_role_implied(self):
self._assert_expected_implied_role_response(
self.prior['id'], [self.implied1['id']])
self.get('/roles/%s/implies/%s' %
(self.prior['id'], self.implied2['id']),
expected_status=http.client.NOT_FOUND)
def _assert_two_rules_defined(self):
r = self.get('/role_inferences/')
rules = r.result['role_inferences']
self.assertEqual(self.prior['id'], rules[0]['prior_role']['id'])
self.assertEqual(2, len(rules[0]['implies']))
implied_ids = [implied['id'] for implied in rules[0]['implies']]
implied_names = [implied['name'] for implied in rules[0]['implies']]
self.assertIn(self.implied1['id'], implied_ids)
self.assertIn(self.implied2['id'], implied_ids)
self.assertIn(self.implied1['name'], implied_names)
self.assertIn(self.implied2['name'], implied_names)
def _assert_one_rule_defined(self):
r = self.get('/role_inferences/')
rules = r.result['role_inferences']
self.assertEqual(self.prior['id'], rules[0]['prior_role']['id'])
self.assertEqual(self.implied1['id'], rules[0]['implies'][0]['id'])
self.assertEqual(self.implied1['name'], rules[0]['implies'][0]['name'])
self.assertEqual(1, len(rules[0]['implies']))
def test_list_all_rules(self):
self._setup_prior_two_implied()
self._assert_two_rules_defined()
self._delete_implied_role(self.prior, self.implied2)
self._assert_one_rule_defined()
def test_CRD_implied_roles(self):
self._setup_prior_two_implied()
self._assert_two_roles_implied()
self._delete_implied_role(self.prior, self.implied2)
self._assert_one_role_implied()
def _create_three_roles(self):
self.role_list = []
for _ in range(3):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
self.role_list.append(role)
def _create_test_domain_user_project(self):
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user = unit.create_user(PROVIDERS.identity_api, domain_id=domain['id'])
project = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project['id'], project)
return domain, user, project
def _assign_top_role_to_user_on_project(self, user, project):
PROVIDERS.assignment_api.add_role_to_user_and_project(
user['id'], project['id'], self.role_list[0]['id'])
def _build_effective_role_assignments_url(self, user):
return '/role_assignments?effective&user.id=%(user_id)s' % {
'user_id': user['id']}
def _assert_all_roles_in_assignment(self, response, user):
# Now use the list role assignments api to check that all three roles
# appear in the collection
self.assertValidRoleAssignmentListResponse(
response,
expected_length=len(self.role_list),
resource_url=self._build_effective_role_assignments_url(user))
def _assert_initial_assignment_in_effective(self, response, user, project):
# The initial assignment should be there (the link url will be
# generated and checked automatically since it matches the assignment)
entity = self.build_role_assignment_entity(
project_id=project['id'],
user_id=user['id'], role_id=self.role_list[0]['id'])
self.assertRoleAssignmentInListResponse(response, entity)
def _assert_effective_role_for_implied_has_prior_in_links(
self, response, user, project, prior_index, implied_index):
# An effective role for an implied role will have the prior role
# assignment in the links
prior_link = '/prior_roles/%(prior)s/implies/%(implied)s' % {
'prior': self.role_list[prior_index]['id'],
'implied': self.role_list[implied_index]['id']}
link = self.build_role_assignment_link(
project_id=project['id'], user_id=user['id'],
role_id=self.role_list[prior_index]['id'])
entity = self.build_role_assignment_entity(
link=link, project_id=project['id'],
user_id=user['id'], role_id=self.role_list[implied_index]['id'],
prior_link=prior_link)
self.assertRoleAssignmentInListResponse(response, entity)
def test_list_role_assignments_with_implied_roles(self):
"""Call ``GET /role_assignments`` with implied role grant.
Test Plan:
- Create a domain with a user and a project
- Create 3 roles
- Role 0 implies role 1 and role 1 implies role 2
- Assign the top role to the project
- Issue the URL to check effective roles on project - this
should return all 3 roles.
- Check the links of the 3 roles indicate the prior role where
appropriate
"""
(domain, user, project) = self._create_test_domain_user_project()
self._create_three_roles()
self._create_implied_role(self.role_list[0], self.role_list[1])
self._create_implied_role(self.role_list[1], self.role_list[2])
self._assign_top_role_to_user_on_project(user, project)
response = self.get(self._build_effective_role_assignments_url(user))
r = response
self._assert_all_roles_in_assignment(r, user)
self._assert_initial_assignment_in_effective(response, user, project)
self._assert_effective_role_for_implied_has_prior_in_links(
response, user, project, 0, 1)
self._assert_effective_role_for_implied_has_prior_in_links(
response, user, project, 1, 2)
def _create_named_role(self, name):
role = unit.new_role_ref()
role['name'] = name
PROVIDERS.role_api.create_role(role['id'], role)
return role
def test_root_role_as_implied_role_forbidden(self):
"""Test root role is forbidden to be set as an implied role.
Create 2 roles that are prohibited from being an implied role.
Create 1 additional role which should be accepted as an implied
role. Assure the prohibited role names cannot be set as an implied
role. Assure the accepted role name which is not a member of the
prohibited implied role list can be successfully set an implied
role.
"""
prohibited_name1 = 'root1'
prohibited_name2 = 'root2'
accepted_name1 = 'implied1'
prohibited_names = [prohibited_name1, prohibited_name2]
self.config_fixture.config(group='assignment',
prohibited_implied_role=prohibited_names)
prior_role = self._create_role()
prohibited_role1 = self._create_named_role(prohibited_name1)
url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
prior_role_id=prior_role['id'],
implied_role_id=prohibited_role1['id'])
self.put(url, expected_status=http.client.FORBIDDEN)
prohibited_role2 = self._create_named_role(prohibited_name2)
url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
prior_role_id=prior_role['id'],
implied_role_id=prohibited_role2['id'])
self.put(url, expected_status=http.client.FORBIDDEN)
accepted_role1 = self._create_named_role(accepted_name1)
url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
prior_role_id=prior_role['id'],
implied_role_id=accepted_role1['id'])
self.put(url, expected_status=http.client.CREATED)
def test_trusts_from_implied_role(self):
self._create_three_roles()
self._create_implied_role(self.role_list[0], self.role_list[1])
self._create_implied_role(self.role_list[1], self.role_list[2])
self._assign_top_role_to_user_on_project(self.user, self.project)
# Create a trustee and assign the prior role to her
trustee = unit.create_user(
PROVIDERS.identity_api, domain_id=self.domain_id
)
ref = unit.new_trust_ref(
trustor_user_id=self.user['id'],
trustee_user_id=trustee['id'],
project_id=self.project['id'],
role_ids=[self.role_list[0]['id']])
r = self.post('/OS-TRUST/trusts', body={'trust': ref})
trust = r.result['trust']
# Only the role that was specified is in the trust, NOT implied roles
self.assertEqual(self.role_list[0]['id'], trust['roles'][0]['id'])
self.assertThat(trust['roles'], matchers.HasLength(1))
# Authenticate as the trustee
auth_data = self.build_authentication_request(
user_id=trustee['id'],
password=trustee['password'],
trust_id=trust['id'])
r = self.v3_create_token(auth_data)
token = r.result['token']
self.assertThat(token['roles'],
matchers.HasLength(len(self.role_list)))
for role in token['roles']:
self.assertIn(role, self.role_list)
for role in self.role_list:
self.assertIn(role, token['roles'])
def test_trusts_from_domain_specific_implied_role(self):
self._create_three_roles()
# Overwrite the first role with a domain specific role
role = unit.new_role_ref(domain_id=self.domain_id)
self.role_list[0] = PROVIDERS.role_api.create_role(role['id'], role)
self._create_implied_role(self.role_list[0], self.role_list[1])
self._create_implied_role(self.role_list[1], self.role_list[2])
self._assign_top_role_to_user_on_project(self.user, self.project)
# Create a trustee and assign the prior role to her
trustee = unit.create_user(
PROVIDERS.identity_api, domain_id=self.domain_id
)
ref = unit.new_trust_ref(
trustor_user_id=self.user['id'],
trustee_user_id=trustee['id'],
project_id=self.project['id'],
role_ids=[self.role_list[0]['id']])
r = self.post('/OS-TRUST/trusts', body={'trust': ref})
trust = r.result['trust']
# Only the role that was specified is in the trust, NOT implied roles
self.assertEqual(self.role_list[0]['id'], trust['roles'][0]['id'])
self.assertThat(trust['roles'], matchers.HasLength(1))
# Authenticate as the trustee
auth_data = self.build_authentication_request(
user_id=trustee['id'],
password=trustee['password'],
trust_id=trust['id'])
r = self.v3_create_token(auth_data)
token = r.result['token']
# The token should have the roles implies by the domain specific role,
# but not the domain specific role itself.
self.assertThat(token['roles'],
matchers.HasLength(len(self.role_list) - 1))
for role in token['roles']:
self.assertIn(role, self.role_list)
for role in [self.role_list[1], self.role_list[2]]:
self.assertIn(role, token['roles'])
self.assertNotIn(self.role_list[0], token['roles'])
def test_global_role_cannot_imply_domain_specific_role(self):
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
domain_role_ref = unit.new_role_ref(domain_id=domain['id'])
domain_role = PROVIDERS.role_api.create_role(
domain_role_ref['id'], domain_role_ref
)
global_role_ref = unit.new_role_ref()
global_role = PROVIDERS.role_api.create_role(
global_role_ref['id'], global_role_ref
)
self.put('/roles/%s/implies/%s' % (global_role['id'],
domain_role['id']),
expected_status=http.client.FORBIDDEN)
class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase):
def setUp(self):
def create_role(domain_id=None):
"""Call ``POST /roles``."""
ref = unit.new_role_ref(domain_id=domain_id)
r = self.post(
'/roles',
body={'role': ref})
return self.assertValidRoleResponse(r, ref)
super(DomainSpecificRoleTests, self).setUp()
self.domainA = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(self.domainA['id'], self.domainA)
self.domainB = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(self.domainB['id'], self.domainB)
self.global_role1 = create_role()
self.global_role2 = create_role()
# Since there maybe other global roles already created, let's count
# them, so we can ensure we can check subsequent list responses
# are correct
r = self.get('/roles')
self.existing_global_roles = len(r.result['roles'])
# And now create some domain specific roles
self.domainA_role1 = create_role(domain_id=self.domainA['id'])
self.domainA_role2 = create_role(domain_id=self.domainA['id'])
self.domainB_role = create_role(domain_id=self.domainB['id'])
def test_get_and_list_domain_specific_roles(self):
# Check we can get a domain specific role
r = self.get('/roles/%s' % self.domainA_role1['id'])
self.assertValidRoleResponse(r, self.domainA_role1)
# If we list without specifying a domain, we should only get global
# roles back.
r = self.get('/roles')
self.assertValidRoleListResponse(
r, expected_length=self.existing_global_roles)
self.assertRoleInListResponse(r, self.global_role1)
self.assertRoleInListResponse(r, self.global_role2)
self.assertRoleNotInListResponse(r, self.domainA_role1)
self.assertRoleNotInListResponse(r, self.domainA_role2)
self.assertRoleNotInListResponse(r, self.domainB_role)
# Now list those in domainA, making sure that's all we get back
r = self.get('/roles?domain_id=%s' % self.domainA['id'])
self.assertValidRoleListResponse(r, expected_length=2)
self.assertRoleInListResponse(r, self.domainA_role1)
self.assertRoleInListResponse(r, self.domainA_role2)
def test_update_domain_specific_roles(self):
self.domainA_role1['name'] = uuid.uuid4().hex
self.patch('/roles/%(role_id)s' % {
'role_id': self.domainA_role1['id']},
body={'role': self.domainA_role1})
r = self.get('/roles/%s' % self.domainA_role1['id'])
self.assertValidRoleResponse(r, self.domainA_role1)
def test_delete_domain_specific_roles(self):
# Check delete only removes that one domain role
self.delete('/roles/%(role_id)s' % {
'role_id': self.domainA_role1['id']})
self.get('/roles/%s' % self.domainA_role1['id'],
expected_status=http.client.NOT_FOUND)
# Now re-list those in domainA, making sure there's only one left
r = self.get('/roles?domain_id=%s' % self.domainA['id'])
self.assertValidRoleListResponse(r, expected_length=1)
self.assertRoleInListResponse(r, self.domainA_role2)
def test_same_domain_assignment(self):
user = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domainA['id'])
projectA = unit.new_project_ref(domain_id=self.domainA['id'])
PROVIDERS.resource_api.create_project(projectA['id'], projectA)
PROVIDERS.assignment_api.create_grant(
self.domainA_role1['id'], user_id=user['id'],
project_id=projectA['id']
)
def test_cross_domain_assignment_valid(self):
user = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domainB['id'])
projectA = unit.new_project_ref(domain_id=self.domainA['id'])
PROVIDERS.resource_api.create_project(projectA['id'], projectA)
# Positive: a role on domainA can be assigned to a user from domainB
# but only for use on a project from domainA
PROVIDERS.assignment_api.create_grant(
self.domainA_role1['id'], user_id=user['id'],
project_id=projectA['id']
)
def test_cross_domain_assignment_invalid(self):
user = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domainB['id'])
projectB = unit.new_project_ref(domain_id=self.domainB['id'])
PROVIDERS.resource_api.create_project(projectB['id'], projectB)
# Negative: a role on domainA can be assigned to a user from domainB
# only for a project from domainA
self.assertRaises(exception.DomainSpecificRoleMismatch,
PROVIDERS.assignment_api.create_grant,
self.domainA_role1['id'],
user_id=user['id'],
project_id=projectB['id'])
def test_cross_domain_implied_roles_authentication(self):
# Create a user in domainB
user = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domainB['id'])
# Create project in domainA
projectA = unit.new_project_ref(domain_id=self.domainA['id'])
PROVIDERS.resource_api.create_project(projectA['id'], projectA)
# Now we create an implied rule from a role in domainA to a
# role in domainB
self.put('/roles/%s/implies/%s' %
(self.domainA_role1['id'], self.domainB_role['id']),
expected_status=http.client.CREATED)
# A role in domainA can be assigned to a user from domainB
# only for a project from domainA
PROVIDERS.assignment_api.create_grant(
self.domainA_role1['id'], user_id=user['id'],
project_id=projectA['id']
)
# The role assignments should return an empty list since domain roles
# can only be used to imply another roles
assignments = PROVIDERS.assignment_api.list_role_assignments(
user_id=user['id'], effective=True)
self.assertEqual([], assignments)
# This also means we can't authenticate using the existing assignment
auth_body = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
project_id=projectA['id'])
self.post('/auth/tokens', body=auth_body,
expected_status=http.client.UNAUTHORIZED)
class ListUserProjectsTestCase(test_v3.RestfulTestCase):
"""Test for /users/<user>/projects."""
def load_sample_data(self):
# do not load base class's data, keep it focused on the tests
self.auths = []
self.domains = []
self.projects = []
self.roles = []
self.users = []
root_domain = unit.new_domain_ref(
id=resource_base.NULL_DOMAIN_ID,
name=resource_base.NULL_DOMAIN_ID
)
self.resource_api.create_domain(resource_base.NULL_DOMAIN_ID,
root_domain)
# Create 3 sets of domain, roles, projects, and users to demonstrate
# the right user's data is loaded and only projects they can access
# are returned.
for _ in range(3):
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
PROVIDERS.assignment_api.create_grant(
role['id'], user_id=user['id'], domain_id=domain['id']
)
project = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project['id'], project)
PROVIDERS.assignment_api.create_grant(
role['id'], user_id=user['id'], project_id=project['id']
)
auth = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
domain_id=domain['id'])
self.auths.append(auth)
self.domains.append(domain)
self.projects.append(project)
self.roles.append(role)
self.users.append(user)
def test_list_head_all(self):
for i in range(len(self.users)):
user = self.users[i]
auth = self.auths[i]
url = '/users/%s/projects' % user['id']
result = self.get(url, auth=auth)
projects_result = result.json['projects']
self.assertEqual(1, len(projects_result))
self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
self.head(url, auth=auth, expected_status=http.client.OK)
def test_list_enabled(self):
for i in range(len(self.users)):
user = self.users[i]
auth = self.auths[i]
# There are no disabled projects
url = '/users/%s/projects?enabled=True' % user['id']
result = self.get(url, auth=auth)
projects_result = result.json['projects']
self.assertEqual(1, len(projects_result))
self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
def test_list_disabled(self):
for i in range(len(self.users)):
user = self.users[i]
auth = self.auths[i]
project = self.projects[i]
# There are no disabled projects
url = '/users/%s/projects?enabled=False' % user['id']
result = self.get(url, auth=auth)
self.assertEqual(0, len(result.json['projects']))
# disable this one and check again
project['enabled'] = False
PROVIDERS.resource_api.update_project(project['id'], project)
result = self.get(url, auth=auth)
projects_result = result.json['projects']
self.assertEqual(1, len(projects_result))
self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
def test_list_by_domain_id(self):
for i in range(len(self.users)):
user = self.users[i]
domain = self.domains[i]
auth = self.auths[i]
# Try looking for projects with a non-existent domain_id
url = '/users/%s/projects?domain_id=%s' % (user['id'],
uuid.uuid4().hex)
result = self.get(url, auth=auth)
self.assertEqual(0, len(result.json['projects']))
# Now try a valid one
url = '/users/%s/projects?domain_id=%s' % (user['id'],
domain['id'])
result = self.get(url, auth=auth)
projects_result = result.json['projects']
self.assertEqual(1, len(projects_result))
self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
# FIXME(lbragstad): These tests contain system-level API calls, which means
# they will log a warning message if they are called with a project-scoped
# token, regardless of the role assignment on the project. We need to fix
# them by using a proper system-scoped admin token to make the call instead
# of a project scoped token.
class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase,
SystemRoleAssignmentMixin):
def test_assign_system_role_to_user(self):
system_role_id = self._create_new_role()
# assign the user a role on the system
member_url = (
'/system/users/%(user_id)s/roles/%(role_id)s' % {
'user_id': self.user['id'],
'role_id': system_role_id
}
)
self.put(member_url)
# validate the role assignment
self.head(member_url)
# list system roles
collection_url = (
'/system/users/%(user_id)s/roles' % {'user_id': self.user['id']}
)
roles = self.get(collection_url).json_body['roles']
self.assertEqual(len(roles), 1)
self.assertEqual(roles[0]['id'], system_role_id)
self.head(collection_url, expected_status=http.client.OK)
response = self.get(
'/role_assignments?scope.system=all&user.id=%(user_id)s' % {
'user_id': self.user['id']
}
)
self.assertValidRoleAssignmentListResponse(response)
def test_list_role_assignments_for_user_returns_all_assignments(self):
system_role_id = self._create_new_role()
# assign the user a role on the system
member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % {
'user_id': self.user['id'],
'role_id': system_role_id
}
self.put(member_url)
# the response should contain one role assignment for the system role
# and one for a role that was setup during setUp().
response = self.get(
'/role_assignments?user.id=%(user_id)s' % {
'user_id': self.user['id']
}
)
self.assertValidRoleAssignmentListResponse(response, expected_length=2)
def test_list_system_roles_for_user_returns_none_without_assignment(self):
# list system roles for user
collection_url = '/system/users/%(user_id)s/roles' % {
'user_id': self.user['id']
}
response = self.get(collection_url)
# assert that the user doesn't have any system role assignments, which
# is denoted by an empty list
self.assertEqual(response.json_body['roles'], [])
response = self.get(
'/role_assignments?scope.system=all&user.id=%(user_id)s' % {
'user_id': self.user['id']
}
)
self.assertEqual(len(response.json_body['role_assignments']), 0)
self.assertValidRoleAssignmentListResponse(response)
def test_list_system_roles_for_user_does_not_return_project_roles(self):
system_role_id = self._create_new_role()
# assign the user a role on the system
member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % {
'user_id': self.user['id'],
'role_id': system_role_id
}
self.put(member_url)
# list project role assignments and save the role id of that
# assignment, this assignment was created during setUp
response = self.get(
'/projects/%(project_id)s/users/%(user_id)s/roles' % {
'project_id': self.project['id'],
'user_id': self.user['id']
}
)
self.assertEqual(len(response.json_body['roles']), 1)
project_role_id = response.json_body['roles'][0]['id']
# list system role assignments
collection_url = '/system/users/%(user_id)s/roles' % {
'user_id': self.user['id']
}
response = self.get(collection_url)
# assert the project role assignment is not in the system role
# assignments
for role in response.json_body['roles']:
self.assertNotEqual(role['id'], project_role_id)
# make sure the role_assignment API filters correctly based on system
# scope
response = self.get(
'/role_assignments?scope.system=all&user.id=%(user_id)s' % {
'user_id': self.user['id']
}
)
self.assertEqual(len(response.json_body['role_assignments']), 1)
system_assignment = response.json_body['role_assignments'][0]
self.assertEqual(system_assignment['role']['id'], system_role_id)
self.assertTrue(system_assignment['scope']['system']['all'])
# make sure the role_assignment API doesn't include the system role
# assignment when we filter based on project
path = (
'/role_assignments?scope.project.id=%(project_id)s&'
'user.id=%(user_id)s'
) % {'project_id': self.project['id'],
'user_id': self.user['id']}
response = self.get(path)
self.assertEqual(len(response.json_body['role_assignments']), 1)
project_assignment = response.json_body['role_assignments'][0]
self.assertEqual(project_assignment['role']['id'], project_role_id)
def test_list_system_roles_for_user_does_not_return_domain_roles(self):
system_role_id = self._create_new_role()
domain_role_id = self._create_new_role()
# assign a role to the user on a domain
domain_member_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles/%(role_id)s' % {
'domain_id': self.user['domain_id'],
'user_id': self.user['id'],
'role_id': domain_role_id
}
)
self.put(domain_member_url)
# assign the user a role on the system
member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % {
'user_id': self.user['id'],
'role_id': system_role_id
}
self.put(member_url)
# list domain role assignments
response = self.get(
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.user['domain_id'],
'user_id': self.user['id']
}
)
self.assertEqual(len(response.json_body['roles']), 1)
# list system role assignments
collection_url = '/system/users/%(user_id)s/roles' % {
'user_id': self.user['id']
}
response = self.get(collection_url)
# assert the domain role assignment is not in the system role
# assignments
for role in response.json_body['roles']:
self.assertNotEqual(role['id'], domain_role_id)
# make sure the role_assignment API filters correctly based on system
# scope
response = self.get(
'/role_assignments?scope.system=all&user.id=%(user_id)s' % {
'user_id': self.user['id']
}
)
self.assertEqual(len(response.json_body['role_assignments']), 1)
system_assignment = response.json_body['role_assignments'][0]
self.assertEqual(system_assignment['role']['id'], system_role_id)
self.assertTrue(system_assignment['scope']['system']['all'])
# make sure the role_assignment API doesn't include the system role
# assignment when we filter based on domain
path = (
'/role_assignments?scope.domain.id=%(domain_id)s&'
'user.id=%(user_id)s'
) % {'domain_id': self.user['domain_id'],
'user_id': self.user['id']}
response = self.get(path)
self.assertEqual(len(response.json_body['role_assignments']), 1)
domain_assignment = response.json_body['role_assignments'][0]
self.assertEqual(domain_assignment['role']['id'], domain_role_id)
def test_check_user_has_system_role_when_assignment_exists(self):
system_role_id = self._create_new_role()
# assign the user a role on the system
member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % {
'user_id': self.user['id'],
'role_id': system_role_id
}
self.put(member_url)
# check the user has the system role assignment
self.head(member_url)
def test_check_user_does_not_have_system_role_without_assignment(self):
system_role_id = self._create_new_role()
# check the user does't have the system role assignment
member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % {
'user_id': self.user['id'],
'role_id': system_role_id
}
self.head(member_url, expected_status=http.client.NOT_FOUND)
response = self.get(
'/role_assignments?scope.system=all&user.id=%(user_id)s' % {
'user_id': self.user['id']
}
)
self.assertEqual(len(response.json_body['role_assignments']), 0)
self.assertValidRoleAssignmentListResponse(response)
def test_unassign_system_role_from_user(self):
system_role_id = self._create_new_role()
# assign the user a role on the system
member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % {
'user_id': self.user['id'],
'role_id': system_role_id
}
self.put(member_url)
# ensure the user has the role assignment
self.head(member_url)
response = self.get(
'/role_assignments?scope.system=all&user.id=%(user_id)s' % {
'user_id': self.user['id']
}
)
self.assertEqual(len(response.json_body['role_assignments']), 1)
self.assertValidRoleAssignmentListResponse(response)
# remove the system role assignment from the user
self.delete(member_url)
# ensure the user doesn't have any system role assignments
collection_url = '/system/users/%(user_id)s/roles' % {
'user_id': self.user['id']
}
response = self.get(collection_url)
self.assertEqual(len(response.json_body['roles']), 0)
response = self.get(
'/role_assignments?scope.system=all&user.id=%(user_id)s' % {
'user_id': self.user['id']
}
)
self.assertValidRoleAssignmentListResponse(response, expected_length=0)
def test_query_for_system_scope_and_domain_scope_fails(self):
# When asking for assignments and providing query parameters, we
# shouldn't be able to ask for two different types of scope. This is
# also true for project + domain scope.
path = (
'/role_assignments?scope.system=all'
'&scope.domain.id=%(domain_id)s'
) % {'domain_id': self.domain_id}
self.get(path, expected_status=http.client.BAD_REQUEST)
def test_query_for_system_scope_and_project_scope_fails(self):
# When asking for assignments and providing query parameters, we
# shouldn't be able to ask for two different types of scope. This is
# also true for project + domain scope.
path = (
'/role_assignments?scope.system=all'
'&scope.project.id=%(project_id)s'
) % {'project_id': self.project_id}
self.get(path, expected_status=http.client.BAD_REQUEST)
def test_query_for_role_id_does_not_return_system_user_roles(self):
system_role_id = self._create_new_role()
# assign the user a role on the system
member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % {
'user_id': self.user['id'],
'role_id': system_role_id
}
self.put(member_url)
# Make sure we only get one role assignment back since the system role
# assignment shouldn't be returned.
path = (
'/role_assignments?role.id=%(role_id)s&user.id=%(user_id)s'
) % {'role_id': self.role_id, 'user_id': self.user['id']}
response = self.get(path)
self.assertValidRoleAssignmentListResponse(response, expected_length=1)
# FIXME(lbragstad): These tests contain system-level API calls, which means
# they will log a warning message if they are called with a project-scoped
# token, regardless of the role assignment on the project. We need to fix
# them by using a proper system-scoped admin token to make the call instead
# of a project scoped token.
class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase,
SystemRoleAssignmentMixin):
def test_assign_system_role_to_group(self):
system_role_id = self._create_new_role()
group = self._create_group()
# assign the role to the group globally
member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % {
'group_id': group['id'],
'role_id': system_role_id
}
self.put(member_url)
# validate the role assignment
self.head(member_url)
# list global roles
collection_url = '/system/groups/%(group_id)s/roles' % {
'group_id': group['id']
}
roles = self.get(collection_url).json_body['roles']
self.assertEqual(len(roles), 1)
self.assertEqual(roles[0]['id'], system_role_id)
self.head(collection_url, expected_status=http.client.OK)
response = self.get(
'/role_assignments?scope.system=all&group.id=%(group_id)s' % {
'group_id': group['id']
}
)
self.assertValidRoleAssignmentListResponse(response, expected_length=1)
self.assertEqual(
response.json_body['role_assignments'][0]['role']['id'],
system_role_id
)
def test_assign_system_role_to_non_existant_group_fails(self):
system_role_id = self._create_new_role()
group_id = uuid.uuid4().hex
# assign the role to the group globally
member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % {
'group_id': group_id,
'role_id': system_role_id
}
self.put(member_url, expected_status=http.client.NOT_FOUND)
def test_list_role_assignments_for_group_returns_all_assignments(self):
system_role_id = self._create_new_role()
group = self._create_group()
# assign the role to the group globally and on a single project
member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % {
'group_id': group['id'],
'role_id': system_role_id
}
self.put(member_url)
member_url = (
'/projects/%(project_id)s/groups/%(group_id)s/'
'roles/%(role_id)s'
) % {
'project_id': self.project_id,
'group_id': group['id'],
'role_id': system_role_id
}
self.put(member_url)
# make sure both assignments exist in the response, there should be two
response = self.get(
'/role_assignments?group.id=%(group_id)s' % {
'group_id': group['id']
}
)
self.assertValidRoleAssignmentListResponse(response, expected_length=2)
def test_list_system_roles_for_group_returns_none_without_assignment(self):
group = self._create_group()
# list global roles for group
collection_url = '/system/groups/%(group_id)s/roles' % {
'group_id': group['id']
}
response = self.get(collection_url)
# assert that the group doesn't have any system role assignments, which
# is denoted by an empty list
self.assertEqual(response.json_body['roles'], [])
response = self.get(
'/role_assignments?scope.system=all&group.id=%(group_id)s' % {
'group_id': group['id']
}
)
self.assertValidRoleAssignmentListResponse(response, expected_length=0)
def test_list_system_roles_for_group_does_not_return_project_roles(self):
system_role_id = self._create_new_role()
project_role_id = self._create_new_role()
group = self._create_group()
# assign the group a role on the system and a role on a project
member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % {
'group_id': group['id'], 'role_id': system_role_id
}
self.put(member_url)
member_url = (
'/projects/%(project_id)s/groups/%(group_id)s/'
'roles/%(role_id)s'
) % {
'project_id': self.project_id,
'group_id': group['id'],
'role_id': project_role_id
}
self.put(member_url)
# list system role assignments
collection_url = '/system/groups/%(group_id)s/roles' % {
'group_id': group['id']
}
response = self.get(collection_url)
# assert the project role assignment is not in the system role
# assignments
for role in response.json_body['roles']:
self.assertNotEqual(role['id'], project_role_id)
response = self.get(
'/role_assignments?scope.system=all&group.id=%(group_id)s' % {
'group_id': group['id']
}
)
self.assertValidRoleAssignmentListResponse(response, expected_length=1)
def test_list_system_roles_for_group_does_not_return_domain_roles(self):
system_role_id = self._create_new_role()
domain_role_id = self._create_new_role()
group = self._create_group()
# assign a role to the group on a domain
domain_member_url = (
'/domains/%(domain_id)s/groups/%(group_id)s/'
'roles/%(role_id)s' % {
'domain_id': group['domain_id'],
'group_id': group['id'],
'role_id': domain_role_id
}
)
self.put(domain_member_url)
# assign the group a role on the system
member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % {
'group_id': group['id'],
'role_id': system_role_id
}
self.put(member_url)
# list domain role assignments
response = self.get(
'/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': group['domain_id'], 'group_id': group['id']
}
)
self.assertEqual(len(response.json_body['roles']), 1)
# list system role assignments
collection_url = '/system/groups/%(group_id)s/roles' % {
'group_id': group['id']
}
response = self.get(collection_url)
# assert the domain role assignment is not in the system role
# assignments
for role in response.json_body['roles']:
self.assertNotEqual(role['id'], domain_role_id)
response = self.get(
'/role_assignments?scope.system=all&group.id=%(group_id)s' % {
'group_id': group['id']
}
)
self.assertValidRoleAssignmentListResponse(response, expected_length=1)
def test_check_group_has_system_role_when_assignment_exists(self):
system_role_id = self._create_new_role()
group = self._create_group()
# assign the group a role on the system
member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % {
'group_id': group['id'],
'role_id': system_role_id
}
self.put(member_url)
# check the group has the system role assignment
self.head(member_url)
response = self.get(
'/role_assignments?scope.system=all&group.id=%(group_id)s' % {
'group_id': group['id']
}
)
self.assertValidRoleAssignmentListResponse(response, expected_length=1)
self.assertEqual(
response.json_body['role_assignments'][0]['role']['id'],
system_role_id
)
def test_check_group_does_not_have_system_role_without_assignment(self):
system_role_id = self._create_new_role()
group = self._create_group()
# check the group does't have the system role assignment
member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % {
'group_id': group['id'],
'role_id': system_role_id
}
self.head(member_url, expected_status=http.client.NOT_FOUND)
response = self.get(
'/role_assignments?scope.system=all&group.id=%(group_id)s' % {
'group_id': group['id']
}
)
self.assertValidRoleAssignmentListResponse(response, expected_length=0)
def test_unassign_system_role_from_group(self):
system_role_id = self._create_new_role()
group = self._create_group()
# assign the group a role on the system
member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % {
'group_id': group['id'],
'role_id': system_role_id
}
self.put(member_url)
# ensure the group has the role assignment
self.head(member_url)
response = self.get(
'/role_assignments?scope.system=all&group.id=%(group_id)s' % {
'group_id': group['id']
}
)
self.assertEqual(len(response.json_body['role_assignments']), 1)
self.assertValidRoleAssignmentListResponse(response)
# remove the system role assignment from the group
self.delete(member_url)
# ensure the group doesn't have any system role assignments
collection_url = '/system/groups/%(group_id)s/roles' % {
'group_id': group['id']
}
response = self.get(collection_url)
self.assertEqual(len(response.json_body['roles']), 0)
response = self.get(
'/role_assignments?scope.system=all&group.id=%(group_id)s' % {
'group_id': group['id']
}
)
self.assertValidRoleAssignmentListResponse(response, expected_length=0)
def test_query_for_role_id_does_not_return_system_group_roles(self):
system_role_id = self._create_new_role()
group = self._create_group()
# assign the group a role on the system
member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % {
'group_id': group['id'],
'role_id': system_role_id
}
self.put(member_url)
# assign the group a role on the system
member_url = (
'/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' %
{'project_id': self.project_id,
'group_id': group['id'],
'role_id': self.role_id}
)
self.put(member_url)
# Make sure we only get one role assignment back since the system role
# assignment shouldn't be returned.
path = (
'/role_assignments?role.id=%(role_id)s&group.id=%(group_id)s'
) % {'role_id': self.role_id, 'group_id': group['id']}
response = self.get(path)
self.assertValidRoleAssignmentListResponse(response, expected_length=1)
| [((20387, 20428), 'keystone.tests.unit.skip_if_cache_disabled', 'unit.skip_if_cache_disabled', (['"""assignment"""'], {}), "('assignment')\n", (20414, 20428), False, 'from keystone.tests import unit\n'), ((21651, 21692), 'keystone.tests.unit.skip_if_cache_disabled', 'unit.skip_if_cache_disabled', (['"""assignment"""'], {}), "('assignment')\n", (21678, 21692), False, 'from keystone.tests import unit\n'), ((22879, 22920), 'keystone.tests.unit.skip_if_cache_disabled', 'unit.skip_if_cache_disabled', (['"""assignment"""'], {}), "('assignment')\n", (22906, 22920), False, 'from keystone.tests import unit\n'), ((24150, 24191), 'keystone.tests.unit.skip_if_cache_disabled', 'unit.skip_if_cache_disabled', (['"""assignment"""'], {}), "('assignment')\n", (24177, 24191), False, 'from keystone.tests import unit\n'), ((1113, 1132), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (1130, 1132), False, 'from keystone.tests import unit\n'), ((2213, 2257), 'keystone.tests.unit.new_group_ref', 'unit.new_group_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (2231, 2257), False, 'from keystone.tests import unit\n'), ((2474, 2493), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (2491, 2493), False, 'from keystone.tests import unit\n'), ((3563, 3582), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (3580, 3582), False, 'from keystone.tests import unit\n'), ((4023, 4042), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (4040, 4042), False, 'from keystone.tests import unit\n'), ((6219, 6245), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (6243, 6245), False, 'import datetime\n'), ((8525, 8551), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (8549, 8551), False, 'import datetime\n'), ((10868, 10894), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (10892, 10894), False, 'import datetime\n'), ((13310, 13353), 'keystone.tests.unit.new_user_ref', 'unit.new_user_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (13327, 13353), False, 'from keystone.tests import unit\n'), ((14842, 14886), 'keystone.tests.unit.new_group_ref', 'unit.new_group_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (14860, 14886), False, 'from keystone.tests import unit\n'), ((17606, 17632), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (17630, 17632), False, 'import datetime\n'), ((20554, 20600), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (20574, 20600), False, 'from keystone.tests import unit\n'), ((21816, 21837), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (21835, 21837), False, 'from keystone.tests import unit\n'), ((23048, 23094), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (23068, 23094), False, 'from keystone.tests import unit\n'), ((24316, 24337), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (24335, 24337), False, 'from keystone.tests import unit\n'), ((26516, 26542), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (26540, 26542), False, 'import datetime\n'), ((31321, 31390), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "self.domain['id']"}), "(PROVIDERS.identity_api, domain_id=self.domain['id'])\n", (31337, 31390), False, 'from keystone.tests import unit\n'), ((31440, 31509), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "self.domain['id']"}), "(PROVIDERS.identity_api, domain_id=self.domain['id'])\n", (31456, 31509), False, 'from keystone.tests import unit\n'), ((34562, 34631), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "self.domain['id']"}), "(PROVIDERS.identity_api, domain_id=self.domain['id'])\n", (34578, 34631), False, 'from keystone.tests import unit\n'), ((34681, 34750), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "self.domain['id']"}), "(PROVIDERS.identity_api, domain_id=self.domain['id'])\n", (34697, 34750), False, 'from keystone.tests import unit\n'), ((38917, 38986), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "self.domain['id']"}), "(PROVIDERS.identity_api, domain_id=self.domain['id'])\n", (38933, 38986), False, 'from keystone.tests import unit\n'), ((39036, 39105), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "self.domain['id']"}), "(PROVIDERS.identity_api, domain_id=self.domain['id'])\n", (39052, 39105), False, 'from keystone.tests import unit\n'), ((39157, 39204), 'keystone.tests.unit.new_group_ref', 'unit.new_group_ref', ([], {'domain_id': "self.domain['id']"}), "(domain_id=self.domain['id'])\n", (39175, 39204), False, 'from keystone.tests import unit\n'), ((39437, 39486), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "self.domain['id']"}), "(domain_id=self.domain['id'])\n", (39457, 39486), False, 'from keystone.tests import unit\n'), ((39580, 39599), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (39597, 39599), False, 'from keystone.tests import unit\n'), ((39690, 39709), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (39707, 39709), False, 'from keystone.tests import unit\n'), ((54280, 54301), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (54299, 54301), False, 'from keystone.tests import unit\n'), ((54480, 54526), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (54500, 54526), False, 'from keystone.tests import unit\n'), ((55834, 55853), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (55851, 55853), False, 'from keystone.tests import unit\n'), ((69343, 69409), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': 'self.domain_id'}), '(PROVIDERS.identity_api, domain_id=self.domain_id)\n', (69359, 69409), False, 'from keystone.tests import unit\n'), ((70647, 70682), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {'name': '"""inherited"""'}), "(name='inherited')\n", (70664, 70682), False, 'from keystone.tests import unit\n'), ((71936, 72002), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': 'self.domain_id'}), '(PROVIDERS.identity_api, domain_id=self.domain_id)\n', (71952, 72002), False, 'from keystone.tests import unit\n'), ((72042, 72089), 'keystone.tests.unit.new_group_ref', 'unit.new_group_ref', ([], {'domain_id': "self.domain['id']"}), "(domain_id=self.domain['id'])\n", (72060, 72089), False, 'from keystone.tests import unit\n'), ((73438, 73473), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {'name': '"""inherited"""'}), "(name='inherited')\n", (73455, 73473), False, 'from keystone.tests import unit\n'), ((74622, 74648), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (74646, 74648), False, 'import datetime\n'), ((78879, 78900), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (78898, 78900), False, 'from keystone.tests import unit\n'), ((78984, 79048), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "domain['id']"}), "(PROVIDERS.identity_api, domain_id=domain['id'])\n", (79000, 79048), False, 'from keystone.tests import unit\n'), ((79090, 79134), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (79110, 79134), False, 'from keystone.tests import unit\n'), ((79226, 79270), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (79246, 79270), False, 'from keystone.tests import unit\n'), ((82891, 82910), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (82908, 82910), False, 'from keystone.tests import unit\n'), ((82986, 83052), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': 'self.domain_id'}), '(PROVIDERS.identity_api, domain_id=self.domain_id)\n', (83002, 83052), False, 'from keystone.tests import unit\n'), ((83091, 83135), 'keystone.tests.unit.new_group_ref', 'unit.new_group_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (83109, 83135), False, 'from keystone.tests import unit\n'), ((83214, 83260), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (83234, 83260), False, 'from keystone.tests import unit\n'), ((86927, 86946), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (86944, 86946), False, 'from keystone.tests import unit\n'), ((87150, 87196), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {'domain_id': "self.domain['id']"}), "(domain_id=self.domain['id'])\n", (87167, 87196), False, 'from keystone.tests import unit\n'), ((88067, 88086), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (88084, 88086), False, 'from keystone.tests import unit\n'), ((88161, 88182), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (88180, 88182), False, 'from keystone.tests import unit\n'), ((88265, 88329), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "domain['id']"}), "(PROVIDERS.identity_api, domain_id=domain['id'])\n", (88281, 88329), False, 'from keystone.tests import unit\n'), ((90383, 90402), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (90400, 90402), False, 'from keystone.tests import unit\n'), ((90477, 90498), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (90496, 90498), False, 'from keystone.tests import unit\n'), ((90581, 90645), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "domain['id']"}), "(PROVIDERS.identity_api, domain_id=domain['id'])\n", (90597, 90645), False, 'from keystone.tests import unit\n'), ((92927, 92948), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (92946, 92948), False, 'from keystone.tests import unit\n'), ((93032, 93096), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "domain['id']"}), "(PROVIDERS.identity_api, domain_id=domain['id'])\n", (93048, 93096), False, 'from keystone.tests import unit\n'), ((93138, 93182), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (93158, 93182), False, 'from keystone.tests import unit\n'), ((93274, 93318), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (93294, 93318), False, 'from keystone.tests import unit\n'), ((96515, 96536), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (96534, 96536), False, 'from keystone.tests import unit\n'), ((96620, 96684), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "domain['id']"}), "(PROVIDERS.identity_api, domain_id=domain['id'])\n", (96636, 96684), False, 'from keystone.tests import unit\n'), ((96723, 96787), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "domain['id']"}), "(PROVIDERS.identity_api, domain_id=domain['id'])\n", (96739, 96787), False, 'from keystone.tests import unit\n'), ((96827, 96869), 'keystone.tests.unit.new_group_ref', 'unit.new_group_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (96845, 96869), False, 'from keystone.tests import unit\n'), ((97146, 97190), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (97166, 97190), False, 'from keystone.tests import unit\n'), ((97282, 97326), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (97302, 97326), False, 'from keystone.tests import unit\n'), ((101305, 101326), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (101324, 101326), False, 'from keystone.tests import unit\n'), ((101410, 101474), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "domain['id']"}), "(PROVIDERS.identity_api, domain_id=domain['id'])\n", (101426, 101474), False, 'from keystone.tests import unit\n'), ((101514, 101556), 'keystone.tests.unit.new_group_ref', 'unit.new_group_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (101532, 101556), False, 'from keystone.tests import unit\n'), ((101637, 101681), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (101657, 101681), False, 'from keystone.tests import unit\n'), ((101773, 101817), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (101793, 101817), False, 'from keystone.tests import unit\n'), ((105223, 105272), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "self.domain['id']"}), "(domain_id=self.domain['id'])\n", (105243, 105272), False, 'from keystone.tests import unit\n'), ((105288, 105359), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "self.domain['id']", 'parent_id': "root['id']"}), "(domain_id=self.domain['id'], parent_id=root['id'])\n", (105308, 105359), False, 'from keystone.tests import unit\n'), ((105610, 105649), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {'name': '"""non-inherited"""'}), "(name='non-inherited')\n", (105627, 105649), False, 'from keystone.tests import unit\n'), ((105782, 105817), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {'name': '"""inherited"""'}), "(name='inherited')\n", (105799, 105817), False, 'from keystone.tests import unit\n'), ((108866, 108913), 'keystone.tests.unit.new_group_ref', 'unit.new_group_ref', ([], {'domain_id': "self.domain['id']"}), "(domain_id=self.domain['id'])\n", (108884, 108913), False, 'from keystone.tests import unit\n'), ((120674, 120742), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "self.domain['id']", 'parent_id': 'leaf_id'}), "(domain_id=self.domain['id'], parent_id=leaf_id)\n", (120694, 120742), False, 'from keystone.tests import unit\n'), ((120798, 120871), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "self.domain['id']", 'parent_id': "level2['id']"}), "(domain_id=self.domain['id'], parent_id=level2['id'])\n", (120818, 120871), False, 'from keystone.tests import unit\n'), ((120927, 121000), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "self.domain['id']", 'parent_id': "level3['id']"}), "(domain_id=self.domain['id'], parent_id=level3['id'])\n", (120947, 121000), False, 'from keystone.tests import unit\n'), ((125829, 125848), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (125846, 125848), False, 'from keystone.tests import unit\n'), ((131271, 131292), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (131290, 131292), False, 'from keystone.tests import unit\n'), ((131375, 131439), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "domain['id']"}), "(PROVIDERS.identity_api, domain_id=domain['id'])\n", (131391, 131439), False, 'from keystone.tests import unit\n'), ((131458, 131502), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (131478, 131502), False, 'from keystone.tests import unit\n'), ((135032, 135051), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (135049, 135051), False, 'from keystone.tests import unit\n'), ((137288, 137354), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': 'self.domain_id'}), '(PROVIDERS.identity_api, domain_id=self.domain_id)\n', (137304, 137354), False, 'from keystone.tests import unit\n'), ((137391, 137545), 'keystone.tests.unit.new_trust_ref', 'unit.new_trust_ref', ([], {'trustor_user_id': "self.user['id']", 'trustee_user_id': "trustee['id']", 'project_id': "self.project['id']", 'role_ids': "[self.role_list[0]['id']]"}), "(trustor_user_id=self.user['id'], trustee_user_id=trustee\n ['id'], project_id=self.project['id'], role_ids=[self.role_list[0]['id']])\n", (137409, 137545), False, 'from keystone.tests import unit\n'), ((138635, 138678), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (138652, 138678), False, 'from keystone.tests import unit\n'), ((139053, 139119), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': 'self.domain_id'}), '(PROVIDERS.identity_api, domain_id=self.domain_id)\n', (139069, 139119), False, 'from keystone.tests import unit\n'), ((139156, 139310), 'keystone.tests.unit.new_trust_ref', 'unit.new_trust_ref', ([], {'trustor_user_id': "self.user['id']", 'trustee_user_id': "trustee['id']", 'project_id': "self.project['id']", 'role_ids': "[self.role_list[0]['id']]"}), "(trustor_user_id=self.user['id'], trustee_user_id=trustee\n ['id'], project_id=self.project['id'], role_ids=[self.role_list[0]['id']])\n", (139174, 139310), False, 'from keystone.tests import unit\n'), ((140528, 140549), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (140547, 140549), False, 'from keystone.tests import unit\n'), ((140644, 140685), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (140661, 140685), False, 'from keystone.tests import unit\n'), ((140827, 140846), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (140844, 140846), False, 'from keystone.tests import unit\n'), ((141598, 141619), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (141617, 141619), False, 'from keystone.tests import unit\n'), ((141722, 141743), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (141741, 141743), False, 'from keystone.tests import unit\n'), ((144530, 144600), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "self.domainA['id']"}), "(PROVIDERS.identity_api, domain_id=self.domainA['id'])\n", (144546, 144600), False, 'from keystone.tests import unit\n'), ((144653, 144703), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "self.domainA['id']"}), "(domain_id=self.domainA['id'])\n", (144673, 144703), False, 'from keystone.tests import unit\n'), ((144996, 145066), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "self.domainB['id']"}), "(PROVIDERS.identity_api, domain_id=self.domainB['id'])\n", (145012, 145066), False, 'from keystone.tests import unit\n'), ((145119, 145169), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "self.domainA['id']"}), "(domain_id=self.domainA['id'])\n", (145139, 145169), False, 'from keystone.tests import unit\n'), ((145594, 145664), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "self.domainB['id']"}), "(PROVIDERS.identity_api, domain_id=self.domainB['id'])\n", (145610, 145664), False, 'from keystone.tests import unit\n'), ((145717, 145767), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "self.domainB['id']"}), "(domain_id=self.domainB['id'])\n", (145737, 145767), False, 'from keystone.tests import unit\n'), ((146353, 146423), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "self.domainB['id']"}), "(PROVIDERS.identity_api, domain_id=self.domainB['id'])\n", (146369, 146423), False, 'from keystone.tests import unit\n'), ((146512, 146562), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "self.domainA['id']"}), "(domain_id=self.domainA['id'])\n", (146532, 146562), False, 'from keystone.tests import unit\n'), ((148156, 148248), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {'id': 'resource_base.NULL_DOMAIN_ID', 'name': 'resource_base.NULL_DOMAIN_ID'}), '(id=resource_base.NULL_DOMAIN_ID, name=resource_base.\n NULL_DOMAIN_ID)\n', (148175, 148248), False, 'from keystone.tests import unit\n'), ((5624, 5636), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (5634, 5636), False, 'import uuid\n'), ((6259, 6286), 'freezegun.freeze_time', 'freezegun.freeze_time', (['time'], {}), '(time)\n', (6280, 6286), False, 'import freezegun\n'), ((7935, 7947), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (7945, 7947), False, 'import uuid\n'), ((8565, 8592), 'freezegun.freeze_time', 'freezegun.freeze_time', (['time'], {}), '(time)\n', (8586, 8592), False, 'import freezegun\n'), ((10255, 10267), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (10265, 10267), False, 'import uuid\n'), ((10908, 10935), 'freezegun.freeze_time', 'freezegun.freeze_time', (['time'], {}), '(time)\n', (10929, 10935), False, 'import freezegun\n'), ((12591, 12603), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (12601, 12603), False, 'import uuid\n'), ((17646, 17673), 'freezegun.freeze_time', 'freezegun.freeze_time', (['time'], {}), '(time)\n', (17667, 17673), False, 'import freezegun\n'), ((26556, 26583), 'freezegun.freeze_time', 'freezegun.freeze_time', (['time'], {}), '(time)\n', (26577, 26583), False, 'import freezegun\n'), ((26811, 26857), 'keystone.tests.unit.new_user_ref', 'unit.new_user_ref', ([], {'domain_id': "self.domain['id']"}), "(domain_id=self.domain['id'])\n", (26828, 26857), False, 'from keystone.tests import unit\n'), ((26940, 26959), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (26957, 26959), False, 'from keystone.tests import unit\n'), ((53657, 53702), 'random.randint', 'random.randint', (['(1)', 'self.MAX_HIERARCHY_BREADTH'], {}), '(1, self.MAX_HIERARCHY_BREADTH)\n', (53671, 53702), False, 'import random\n'), ((54777, 54820), 'random.randint', 'random.randint', (['(1)', 'self.MAX_HIERARCHY_DEPTH'], {}), '(1, self.MAX_HIERARCHY_DEPTH)\n', (54791, 54820), False, 'import random\n'), ((54921, 54964), 'keystone.tests.unit.new_user_ref', 'unit.new_user_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (54938, 54964), False, 'from keystone.tests import unit\n'), ((55172, 55216), 'keystone.tests.unit.new_group_ref', 'unit.new_group_ref', ([], {'domain_id': 'self.domain_id'}), '(domain_id=self.domain_id)\n', (55190, 55216), False, 'from keystone.tests import unit\n'), ((74662, 74689), 'freezegun.freeze_time', 'freezegun.freeze_time', (['time'], {}), '(time)\n', (74683, 74689), False, 'import freezegun\n'), ((74806, 74825), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (74823, 74825), False, 'from keystone.tests import unit\n'), ((76775, 76794), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (76792, 76794), False, 'from keystone.tests import unit\n'), ((78745, 78764), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (78762, 78764), False, 'from keystone.tests import unit\n'), ((92793, 92812), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (92810, 92812), False, 'from keystone.tests import unit\n'), ((96381, 96400), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (96398, 96400), False, 'from keystone.tests import unit\n'), ((101171, 101190), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (101188, 101190), False, 'from keystone.tests import unit\n'), ((118472, 118493), 'testtools.matchers.HasLength', 'matchers.HasLength', (['(1)'], {}), '(1)\n', (118490, 118493), False, 'from testtools import matchers\n'), ((118982, 119003), 'testtools.matchers.HasLength', 'matchers.HasLength', (['(2)'], {}), '(2)\n', (119000, 119003), False, 'from testtools import matchers\n'), ((119603, 119624), 'testtools.matchers.HasLength', 'matchers.HasLength', (['(1)'], {}), '(1)\n', (119621, 119624), False, 'from testtools import matchers\n'), ((122557, 122578), 'testtools.matchers.HasLength', 'matchers.HasLength', (['(3)'], {}), '(3)\n', (122575, 122578), False, 'from testtools import matchers\n'), ((127402, 127431), 'testtools.matchers.EndsWith', 'matchers.EndsWith', (['prior_link'], {}), '(prior_link)\n', (127419, 127431), False, 'from testtools import matchers\n'), ((128117, 128149), 'testtools.matchers.EndsWith', 'matchers.EndsWith', (["('/v3%s' % url)"], {}), "('/v3%s' % url)\n", (128134, 128149), False, 'from testtools import matchers\n'), ((128441, 128494), 'testtools.matchers.EndsWith', 'matchers.EndsWith', (["('/v3/roles/%s' % expected_prior_id)"], {}), "('/v3/roles/%s' % expected_prior_id)\n", (128458, 128494), False, 'from testtools import matchers\n'), ((128717, 128772), 'testtools.matchers.EndsWith', 'matchers.EndsWith', (["('/v3/roles/%s' % expected_implied_id)"], {}), "('/v3/roles/%s' % expected_implied_id)\n", (128734, 128772), False, 'from testtools import matchers\n'), ((131084, 131103), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (131101, 131103), False, 'from keystone.tests import unit\n'), ((137881, 137902), 'testtools.matchers.HasLength', 'matchers.HasLength', (['(1)'], {}), '(1)\n', (137899, 137902), False, 'from testtools import matchers\n'), ((139646, 139667), 'testtools.matchers.HasLength', 'matchers.HasLength', (['(1)'], {}), '(1)\n', (139664, 139667), False, 'from testtools import matchers\n'), ((141337, 141375), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {'domain_id': 'domain_id'}), '(domain_id=domain_id)\n', (141354, 141375), False, 'from keystone.tests import unit\n'), ((143613, 143625), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (143623, 143625), False, 'import uuid\n'), ((148628, 148649), 'keystone.tests.unit.new_domain_ref', 'unit.new_domain_ref', ([], {}), '()\n', (148647, 148649), False, 'from keystone.tests import unit\n'), ((148741, 148805), 'keystone.tests.unit.create_user', 'unit.create_user', (['PROVIDERS.identity_api'], {'domain_id': "domain['id']"}), "(PROVIDERS.identity_api, domain_id=domain['id'])\n", (148757, 148805), False, 'from keystone.tests import unit\n'), ((148856, 148875), 'keystone.tests.unit.new_role_ref', 'unit.new_role_ref', ([], {}), '()\n', (148873, 148875), False, 'from keystone.tests import unit\n'), ((149097, 149141), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': "domain['id']"}), "(domain_id=domain['id'])\n", (149117, 149141), False, 'from keystone.tests import unit\n'), ((166396, 166408), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (166406, 166408), False, 'import uuid\n'), ((1512, 1524), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1522, 1524), False, 'import uuid\n'), ((1785, 1797), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1795, 1797), False, 'import uuid\n'), ((7375, 7404), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (7393, 7404), False, 'import datetime\n'), ((9686, 9715), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (9704, 9715), False, 'import datetime\n'), ((12025, 12054), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (12043, 12054), False, 'import datetime\n'), ((18721, 18750), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (18739, 18750), False, 'import datetime\n'), ((19249, 19278), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (19267, 19278), False, 'import datetime\n'), ((30179, 30208), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (30197, 30208), False, 'import datetime\n'), ((53805, 53872), 'keystone.tests.unit.new_project_ref', 'unit.new_project_ref', ([], {'domain_id': 'self.domain_id', 'parent_id': 'parent_id'}), '(domain_id=self.domain_id, parent_id=parent_id)\n', (53825, 53872), False, 'from keystone.tests import unit\n'), ((54065, 54095), 'random.randint', 'random.randint', (['(0)', '(breadth - 1)'], {}), '(0, breadth - 1)\n', (54079, 54095), False, 'import random\n'), ((75767, 75796), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (75785, 75796), False, 'import datetime\n'), ((151873, 151885), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (151883, 151885), False, 'import uuid\n')] |
agustinhenze/mibs.snmplabs.com | pysnmp-with-texts/XXX-MIB.py | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | #
# PySNMP MIB module XXX-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/XXX-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:44:42 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Gauge32, ModuleIdentity, iso, Integer32, enterprises, ObjectIdentity, Unsigned32, Counter64, IpAddress, Bits, Counter32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "ModuleIdentity", "iso", "Integer32", "enterprises", "ObjectIdentity", "Unsigned32", "Counter64", "IpAddress", "Bits", "Counter32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "NotificationType")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
company = ModuleIdentity((1, 3, 6, 1, 4, 1, 6688))
company.setRevisions(('2009-03-05 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: company.setRevisionsDescriptions(('1G MC supported',))
if mibBuilder.loadTexts: company.setLastUpdated('200903050000Z')
if mibBuilder.loadTexts: company.setOrganization('FiberRoad')
if mibBuilder.loadTexts: company.setContactInfo('www.fiberroad.com.cn')
if mibBuilder.loadTexts: company.setDescription('Media Converter NMS SNMP mib file')
ipProduct = ObjectIdentity((1, 3, 6, 1, 4, 1, 6688, 1))
if mibBuilder.loadTexts: ipProduct.setStatus('current')
if mibBuilder.loadTexts: ipProduct.setDescription('IP product line')
height2HU = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1))
systemMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1))
alarmMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2))
shelfNum = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfNum.setStatus('current')
if mibBuilder.loadTexts: shelfNum.setDescription('The number of shelf in current system')
shelfTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2), )
if mibBuilder.loadTexts: shelfTable.setStatus('current')
if mibBuilder.loadTexts: shelfTable.setDescription('Shelf table')
shelfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1), ).setIndexNames((0, "XXX-MIB", "shelfName"))
if mibBuilder.loadTexts: shelfEntry.setStatus('current')
if mibBuilder.loadTexts: shelfEntry.setDescription('Shelf entry definition')
shelfName = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("master", 1), ("slave1", 2), ("slave2", 3), ("slave3", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfName.setStatus('current')
if mibBuilder.loadTexts: shelfName.setDescription('Shelf name')
psuA = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("nc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: psuA.setStatus('current')
if mibBuilder.loadTexts: psuA.setDescription('The status fan A of current shelf')
psuB = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("nc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: psuB.setStatus('current')
if mibBuilder.loadTexts: psuB.setDescription('The status psu B of current shelf')
volA = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("abnormal", 2), ("nc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: volA.setStatus('current')
if mibBuilder.loadTexts: volA.setDescription('The voltage status of psuA of current shelf')
volB = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("abnormal", 2), ("nc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: volB.setStatus('current')
if mibBuilder.loadTexts: volB.setDescription('The voltage status of psuB of current shelf')
fan = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("nc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fan.setStatus('current')
if mibBuilder.loadTexts: fan.setDescription('The status fan A of current shelf')
temperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 7), Integer32()).setUnits(' oC').setMaxAccess("readonly")
if mibBuilder.loadTexts: temperature.setStatus('current')
if mibBuilder.loadTexts: temperature.setDescription('The temperature status of current shelf')
coCardNum = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: coCardNum.setStatus('current')
if mibBuilder.loadTexts: coCardNum.setDescription('The number of center card inserting of current shelf')
rmtCardNum = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmtCardNum.setStatus('current')
if mibBuilder.loadTexts: rmtCardNum.setDescription('The number of remote card inserting of current shelf')
slotObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3))
slotTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1), )
if mibBuilder.loadTexts: slotTable.setStatus('current')
if mibBuilder.loadTexts: slotTable.setDescription('Sparse table containing one entry for each slot in exist chassis in the system, indexed by shelfIdx and slotIdx.')
slotEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1), ).setIndexNames((0, "XXX-MIB", "shelfIdx"), (0, "XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: slotEntry.setStatus('current')
if mibBuilder.loadTexts: slotEntry.setDescription("in this table ,user can find the converter module's type inserted in the system's slot.then you can get the detail information about the specified type in the cardObjects table")
shelfIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("master", 1), ("slave1", 2), ("slave2", 3), ("slave3", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfIdx.setStatus('current')
if mibBuilder.loadTexts: shelfIdx.setDescription('Chassis index - 1 = master management module, 2-4 = slave management module')
slotIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17))).clone(namedValues=NamedValues(("slot01", 1), ("slot02", 2), ("slot03", 3), ("slot04", 4), ("slot05", 5), ("slot06", 6), ("slot07", 7), ("slot08", 8), ("slot09", 9), ("slot10", 10), ("slot11", 11), ("slot12", 12), ("slot13", 13), ("slot14", 14), ("slot15", 15), ("slot16", 16), ("slot17", 17)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slotIdx.setStatus('current')
if mibBuilder.loadTexts: slotIdx.setDescription("chassis's slot,whitch is a index in this table")
coCardType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 100, 101, 102))).clone(namedValues=NamedValues(("no-card", 0), ("ip113s", 1), ("ip113f", 2), ("mc-1g-e2o", 3), ("mc-1g-o2o", 4), ("mc-4-25g-oeo", 5), ("mc-ip175d", 6), ("mc-10g-oeo", 7), ("mc-10g-oee", 8), ("mc-FAN", 9), ("mc-10g-oeo-1r", 10), ("mc-2-5g", 11), ("mc-40g-oeo", 12), ("mc-2-5g-t", 13), ("mc-2-5g-f", 14), ("mc-2-5g-mux-t", 15), ("mc-2-5g-mux-f", 16), ("mc-1g-e2o-backup", 17), ("mc-e1-1sfp", 18), ("mc-e1-2sfp", 19), ("mc-100m-sfp", 20), ("mc-1g-o2o-backup", 21), ("mc-cwdm-4", 22), ("mc-cwdm-8", 23), ("mc-10g-oeo-2r", 24), ("mc-qca8334", 25), ("mc-e1t1", 26), ("fr600f-mm", 100), ("fr600f-ms", 101), ("not-support", 102)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: coCardType.setStatus('current')
if mibBuilder.loadTexts: coCardType.setDescription("local card's type inserted in the chassis")
coCardDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: coCardDesc.setStatus('current')
if mibBuilder.loadTexts: coCardDesc.setDescription("local card's description")
rmtCardType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 100, 101, 102))).clone(namedValues=NamedValues(("no-card", 0), ("ip113sr", 1), ("ip113f", 2), ("mc-1g-e2o", 3), ("mc-1g-o2o", 4), ("mc-4-25g-oeor", 5), ("mc-ip175dr", 6), ("mc-10g-oeor", 7), ("mc-10g-oeer", 8), ("mc-FANr", 9), ("mc-10g-oeo-1rr", 10), ("mc-2-5gr", 11), ("mc-40g-oeor", 12), ("mc-2-5g-tr", 13), ("mc-2-5g-fr", 14), ("mc-2-5g-mux-tr", 15), ("mc-2-5g-mux-fr", 16), ("mc-1g-e2o-backupr", 17), ("mc-e1-1sfpr", 18), ("mc-e1-2sfpr", 19), ("mc-100m-sfpr", 20), ("mc-1g-o2o-backupr", 21), ("mc-cwdmr-4", 22), ("mc-cwdmr-8", 23), ("mc-10g-oeo-2rr", 24), ("mc-qca8334r", 25), ("mc-e1t1r", 26), ("fr600f-mm", 100), ("fr600f-ms", 101), ("not-support", 102)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmtCardType.setStatus('current')
if mibBuilder.loadTexts: rmtCardType.setDescription("remote card's type connect with the local converter")
rmtCardDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtCardDesc.setStatus('current')
if mibBuilder.loadTexts: rmtCardDesc.setDescription("remote card's description")
cardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4))
nmuObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1))
nmuConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1))
nmuType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(100, 101, 102))).clone(namedValues=NamedValues(("fr600f-mm", 100), ("fr600f-ms", 101), ("not-support", 102)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nmuType.setStatus('current')
if mibBuilder.loadTexts: nmuType.setDescription('The type of NMU (network management unit)')
ipaddr = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipaddr.setStatus('current')
if mibBuilder.loadTexts: ipaddr.setDescription('The ethernet IP address of NMU (network management unit)')
subnet = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: subnet.setStatus('current')
if mibBuilder.loadTexts: subnet.setDescription('The etherent mask address of NMU (network management unit)')
gateway = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gateway.setStatus('current')
if mibBuilder.loadTexts: gateway.setDescription('The ethernet gateway address of NMU (network management unit)')
sysContact = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sysContact.setStatus('current')
if mibBuilder.loadTexts: sysContact.setDescription('Mirror of the system.sysContact.0')
sysName = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sysName.setStatus('current')
if mibBuilder.loadTexts: sysName.setDescription('Mirror of the system.sysName.0')
sysLocation = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sysLocation.setStatus('current')
if mibBuilder.loadTexts: sysLocation.setDescription('Mirror of the system.sysLocation.0')
trapHost1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 8), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapHost1.setStatus('current')
if mibBuilder.loadTexts: trapHost1.setDescription("The first host's IP address used to receive trap messages, when set 0 it simply delete this entry. This applies to the trap host 2~4 below as well.")
trapHost2 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 9), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapHost2.setStatus('current')
if mibBuilder.loadTexts: trapHost2.setDescription("The second host's IP address used to receive trap messages")
trapHost3 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 10), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapHost3.setStatus('current')
if mibBuilder.loadTexts: trapHost3.setDescription("The third host's IP address used to receive trap messages")
trapHost4 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 11), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapHost4.setStatus('current')
if mibBuilder.loadTexts: trapHost4.setDescription("The fourth host's IP address used to receive trap messages")
mcCmObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2))
mcCmTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1), )
if mibBuilder.loadTexts: mcCmTable.setStatus('current')
if mibBuilder.loadTexts: mcCmTable.setDescription('MC Configuration table')
mcCmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcCmEntry.setStatus('current')
if mibBuilder.loadTexts: mcCmEntry.setDescription('MC Configuration entry definition')
mcShelfIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("master", 1), ("slave1", 2), ("slave2", 3), ("slave3", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcShelfIdx.setStatus('current')
if mibBuilder.loadTexts: mcShelfIdx.setDescription('Shelf index')
mcCardIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))).clone(namedValues=NamedValues(("card01", 1), ("card02", 2), ("card03", 3), ("card04", 4), ("card05", 5), ("card06", 6), ("card07", 7), ("card08", 8), ("card09", 9), ("card10", 10), ("card11", 11), ("card12", 12), ("card13", 13), ("card14", 14), ("card15", 15), ("card16", 16)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcCardIdx.setStatus('current')
if mibBuilder.loadTexts: mcCardIdx.setDescription('Card index')
mcType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26))).clone(namedValues=NamedValues(("no-card", 0), ("ip113s", 1), ("ip113f", 2), ("mc-1g-e2o", 3), ("mc-1g-o2o", 4), ("mc-4-25g-oeo", 5), ("mc-ip175d", 6), ("mc-10g-oeo", 7), ("mc-10g-oee", 8), ("mc-FAN", 9), ("mc-10g-oeo-1r", 10), ("mc-2-5g", 11), ("mc-40g-oeo", 12), ("mc-2-5g-t", 13), ("mc-2-5g-f", 14), ("mc-2-5g-mux-t", 15), ("mc-2-5g-mux-f", 16), ("mc-1g-e2o-backup", 17), ("mc-e1-1sfp", 18), ("mc-e1-2sfp", 19), ("mc-100m-sfp", 20), ("mc-1g-o2o-backup", 21), ("mc-cwdm-4", 22), ("mc-cwdm-8", 23), ("mc-10g-oeo-2r", 24), ("mc-qca8334", 25), ("mc-e1t1", 26)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcType.setStatus('current')
if mibBuilder.loadTexts: mcType.setDescription("Center card's type")
mcTransceiverMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("bidi", 1), ("duplex-fiber", 2), ("sfp", 3), ("not-support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcTransceiverMode.setStatus('current')
if mibBuilder.loadTexts: mcTransceiverMode.setDescription("Center card's optical transceiver mode. 100M card support bidi/duplex-fiber; 1G card support bidi/duplex-fiber/sfp. Once sfp is given, the following mcTransceiverDist should be ignored.")
mcTransceiverDist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 120))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcTransceiverDist.setStatus('current')
if mibBuilder.loadTexts: mcTransceiverDist.setDescription("Center card's optical transceiver distance, 1 means 550m for duplex-fiber mode in case of 1G card, otherwise it represents the real distance (unit of km).")
mcPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("locked", 1), ("unlocked", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcPortState.setStatus('current')
if mibBuilder.loadTexts: mcPortState.setDescription("Center card's port status, locked or unlocked")
mcTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcTransmitMode.setStatus('current')
if mibBuilder.loadTexts: mcTransmitMode.setDescription("Center card's transmmit mode")
mcCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcCurWorkMode.setDescription("Center card's current work mode")
mcCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcCfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcCfgWorkMode.setDescription("Center card's configurable work mode")
mcLFPCfg = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcLFPCfg.setStatus('current')
if mibBuilder.loadTexts: mcLFPCfg.setDescription('Remote fault detect function, valid only on center MC card')
mcUpStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 11), Gauge32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcUpStream.setStatus('current')
if mibBuilder.loadTexts: mcUpStream.setDescription("Center card's up stream of MC")
mcDownStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 12), Gauge32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcDownStream.setStatus('current')
if mibBuilder.loadTexts: mcDownStream.setDescription("Center card's down stream of MC")
mcTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcTxlink.setStatus('current')
if mibBuilder.loadTexts: mcTxlink.setDescription("Center card's electrical port's link status")
mcFxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcFxlink.setStatus('current')
if mibBuilder.loadTexts: mcFxlink.setDescription("Center card's optical port's link status")
mcHWLFP = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcHWLFP.setStatus('current')
if mibBuilder.loadTexts: mcHWLFP.setDescription("Center card's HW LFP, not applicable for 1G card")
mcHWTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcHWTransmitMode.setStatus('current')
if mibBuilder.loadTexts: mcHWTransmitMode.setDescription("Center card's HW transmit mode, not applicable for 1G card")
mcHWWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcHWWorkMode.setStatus('current')
if mibBuilder.loadTexts: mcHWWorkMode.setDescription("Center card's HW work mode, not applicable for 1G card")
mcHWRmtCtrlMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcHWRmtCtrlMode.setStatus('current')
if mibBuilder.loadTexts: mcHWRmtCtrlMode.setDescription("Center card's HW remote control mode (only valid for local card). the disable mode indicates that all SET operations must be prohibited")
mcNtwSfpExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcNtwSfpExist.setStatus('current')
if mibBuilder.loadTexts: mcNtwSfpExist.setDescription("Center 1G card's Network SFP indication")
mcAccSfpExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcAccSfpExist.setStatus('current')
if mibBuilder.loadTexts: mcAccSfpExist.setDescription("Center 1G card's Access SFP indication, applicable only for O2O type")
mcUtility = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("idle", 1), ("reset", 2), ("default", 3), ("set2hw", 4), ("not-support", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcUtility.setStatus('current')
if mibBuilder.loadTexts: mcUtility.setDescription('reset, default to factory, set to HW word, etc...')
mcRmtDetect = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("no-remote", 0), ("yes", 1), ("not-support", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtDetect.setStatus('current')
if mibBuilder.loadTexts: mcRmtDetect.setDescription('An identifier to indicate if there is a remote MC currently connecting to system or not')
mcRmtType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26))).clone(namedValues=NamedValues(("no-card", 0), ("ip113sr", 1), ("ip113f", 2), ("mc-1g-e2or", 3), ("mc-1g-o2or", 4), ("mc-4-25g-oeor", 5), ("mc-ip175dr", 6), ("mc-10g-oeor", 7), ("mc-10g-oeer", 8), ("mc-FANr", 9), ("mc-10g-oeo-1rr", 10), ("mc-2-5gr", 11), ("mc-40g-oeor", 12), ("mc-2-5g-tr", 13), ("mc-2-5g-fr", 14), ("mc-2-5g-mux-tr", 15), ("mc-2-5g-mux-fr", 16), ("mc-1g-e2o-backupr", 17), ("mc-e1-1sfpr", 18), ("mc-e1-2sfpr", 19), ("mc-100m-sfpr", 20), ("mc-1g-o2o-backupr", 21), ("mc-cwdmr-4", 22), ("mc-cwdmr-8", 23), ("mc-10g-oeo-2rr", 24), ("mc-qca8334r", 25), ("mc-e1t1r", 26)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtType.setStatus('current')
if mibBuilder.loadTexts: mcRmtType.setDescription("Remote card's type")
mcRmtTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtTransmitMode.setStatus('current')
if mibBuilder.loadTexts: mcRmtTransmitMode.setDescription("Remote card's transmmit mode")
mcRmtCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmtCurWorkMode.setDescription("Remote card's current work mode")
mcRmtCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtCfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmtCfgWorkMode.setDescription("Remote card's configurable work mode")
mcRmtLFP = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtLFP.setStatus('current')
if mibBuilder.loadTexts: mcRmtLFP.setDescription("Remote card's LFP lamp state")
mcRmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtTxlink.setStatus('current')
if mibBuilder.loadTexts: mcRmtTxlink.setDescription("Remote card's electrial port status")
mcRmtHWLFP = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtHWLFP.setStatus('current')
if mibBuilder.loadTexts: mcRmtHWLFP.setDescription("Remote card's HW LFP, not applicable for 1G card")
mcRmtHWTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtHWTransmitMode.setStatus('current')
if mibBuilder.loadTexts: mcRmtHWTransmitMode.setDescription("Remote card's HW transmit mode, not applicable for 1G card")
mcRmtHWWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtHWWorkMode.setStatus('current')
if mibBuilder.loadTexts: mcRmtHWWorkMode.setDescription("Remote card's HW work mode, not applicable for 1G card")
mcRmtLoopback = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtLoopback.setStatus('current')
if mibBuilder.loadTexts: mcRmtLoopback.setDescription("Remote card's HW Loopback state")
mcRmtPwrDown = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 33), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("powerdown", 1), ("normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtPwrDown.setStatus('current')
if mibBuilder.loadTexts: mcRmtPwrDown.setDescription("Remote card's power down state")
mcRmtAccSfpExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtAccSfpExist.setStatus('current')
if mibBuilder.loadTexts: mcRmtAccSfpExist.setDescription("Remote 1G card's Access SFP indication, applicable only for O2O type")
mcRmtUtility = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 35), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("no-card", 0), ("idle", 1), ("reset", 2), ("default", 3), ("set2hw", 4), ("not-support", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtUtility.setStatus('current')
if mibBuilder.loadTexts: mcRmtUtility.setDescription("Rmote cards's reset, default to factory, set to HW word, etc...")
mcCm1gSpecificObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2))
mcCm1gIpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1))
mcCm1gIpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1), )
if mibBuilder.loadTexts: mcCm1gIpTable.setStatus('current')
if mibBuilder.loadTexts: mcCm1gIpTable.setDescription('MC 1G Ip address table')
mcCm1gIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg"))
if mibBuilder.loadTexts: mcCm1gIpEntry.setStatus('current')
if mibBuilder.loadTexts: mcCm1gIpEntry.setDescription('MC 1G Ip address entry definition')
mcLoOrRmtFg = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("remote", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcLoOrRmtFg.setStatus('current')
if mibBuilder.loadTexts: mcLoOrRmtFg.setDescription('location index, local or remote')
mcIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIpAddr.setStatus('current')
if mibBuilder.loadTexts: mcIpAddr.setDescription('The Ip address of the node')
mcCm1gSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2))
mcCm1gSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1), )
if mibBuilder.loadTexts: mcCm1gSfpTable.setStatus('current')
if mibBuilder.loadTexts: mcCm1gSfpTable.setDescription('MC 1G SFP table')
mcCm1gSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg"))
if mibBuilder.loadTexts: mcCm1gSfpEntry.setStatus('current')
if mibBuilder.loadTexts: mcCm1gSfpEntry.setDescription('MC 1G SFP entry definition')
getSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: getSfpCmd.setStatus('current')
if mibBuilder.loadTexts: getSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
sfpCompliance = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpCompliance.setStatus('current')
if mibBuilder.loadTexts: sfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored')
sfpConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpConnector.setStatus('current')
if mibBuilder.loadTexts: sfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported')
sfpTransCode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpTransCode.setStatus('current')
if mibBuilder.loadTexts: sfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit1: Copper Module bit2: MultiMode bit3: MultiMode others: unsupported')
sfpSmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpSmLength.setStatus('current')
if mibBuilder.loadTexts: sfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode')
sfpMmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpMmLength.setStatus('current')
if mibBuilder.loadTexts: sfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode')
sfpCopperLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpCopperLength.setStatus('current')
if mibBuilder.loadTexts: sfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45')
sfpBrSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpBrSpeed.setStatus('current')
if mibBuilder.loadTexts: sfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)')
sfpWavelength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpWavelength.setStatus('current')
if mibBuilder.loadTexts: sfpWavelength.setDescription('SFP laser wavelength (one word)')
sfpTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpTemperature.setStatus('current')
if mibBuilder.loadTexts: sfpTemperature.setDescription('SFP temperature (one type, signed)')
sfpTranPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpTranPower.setStatus('current')
if mibBuilder.loadTexts: sfpTranPower.setDescription('SFP tx power (one type, signed)')
sfpRecvPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpRecvPower.setStatus('current')
if mibBuilder.loadTexts: sfpRecvPower.setDescription('SFP rx power (one type, signed)')
sfpVoltage = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpVoltage.setStatus('current')
if mibBuilder.loadTexts: sfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)')
mcCm1gAccSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3))
mcCm1gAccSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1), )
if mibBuilder.loadTexts: mcCm1gAccSfpTable.setStatus('current')
if mibBuilder.loadTexts: mcCm1gAccSfpTable.setDescription('MC 1G Access SFP table')
mcCm1gAccSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg"))
if mibBuilder.loadTexts: mcCm1gAccSfpEntry.setStatus('current')
if mibBuilder.loadTexts: mcCm1gAccSfpEntry.setDescription('MC 1G Access SFP entry definition')
getAccSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: getAccSfpCmd.setStatus('current')
if mibBuilder.loadTexts: getAccSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
accsfpCompliance = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpCompliance.setStatus('current')
if mibBuilder.loadTexts: accsfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored')
accsfpConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpConnector.setStatus('current')
if mibBuilder.loadTexts: accsfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported')
accsfpTransCode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpTransCode.setStatus('current')
if mibBuilder.loadTexts: accsfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit2: MultiMode bit3: MultiMode others: unsupported')
accsfpSmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpSmLength.setStatus('current')
if mibBuilder.loadTexts: accsfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode')
accsfpMmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpMmLength.setStatus('current')
if mibBuilder.loadTexts: accsfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode')
accsfpCopperLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpCopperLength.setStatus('current')
if mibBuilder.loadTexts: accsfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45')
accsfpBrSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpBrSpeed.setStatus('current')
if mibBuilder.loadTexts: accsfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)')
accsfpWavelength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpWavelength.setStatus('current')
if mibBuilder.loadTexts: accsfpWavelength.setDescription('SFP laser wavelength (one word)')
accsfpTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpTemperature.setStatus('current')
if mibBuilder.loadTexts: accsfpTemperature.setDescription('SFP temperature (one type, signed)')
accsfpTranPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpTranPower.setStatus('current')
if mibBuilder.loadTexts: accsfpTranPower.setDescription('SFP tx power (one type, signed)')
accsfpRecvPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpRecvPower.setStatus('current')
if mibBuilder.loadTexts: accsfpRecvPower.setDescription('SFP rx power (one type, signed)')
accsfpVoltage = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpVoltage.setStatus('current')
if mibBuilder.loadTexts: accsfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)')
mcIP175DObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3))
mcIP175DCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1))
mcIP175DCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1, 1), )
if mibBuilder.loadTexts: mcIP175DCardTable.setStatus('current')
if mibBuilder.loadTexts: mcIP175DCardTable.setDescription('MC IP175D Configuration table')
mcIP175DCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcIP175DCardEntry.setStatus('current')
if mibBuilder.loadTexts: mcIP175DCardEntry.setDescription('MC Configuration entry definition')
mcIP175DVlanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Normal", 1), ("mode1", 2), ("mode2", 3), ("not-support", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIP175DVlanMode.setStatus('current')
if mibBuilder.loadTexts: mcIP175DVlanMode.setDescription("Center card's vlan mode")
mcIP175DPortObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2))
mcIP175DPortTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1), )
if mibBuilder.loadTexts: mcIP175DPortTable.setStatus('current')
if mibBuilder.loadTexts: mcIP175DPortTable.setDescription('MC IP175D Configuration table')
mcIP175DPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcIP175DPortIdx"))
if mibBuilder.loadTexts: mcIP175DPortEntry.setStatus('current')
if mibBuilder.loadTexts: mcIP175DPortEntry.setDescription('MC Configuration entry definition')
mcIP175DPortIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("port1", 1), ("port2", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcIP175DPortIdx.setStatus('current')
if mibBuilder.loadTexts: mcIP175DPortIdx.setDescription('Port index')
mcIP175DCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcIP175DCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcIP175DCurWorkMode.setDescription("Center card's port current work mode")
mcIP175DCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIP175DCfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcIP175DCfgWorkMode.setDescription("Center card's port configurable work mode")
mcIP175DUpStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(64, 100000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIP175DUpStream.setStatus('current')
if mibBuilder.loadTexts: mcIP175DUpStream.setDescription("Center card's port up stream of MC")
mcIP175DDownStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 5), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(64, 100000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIP175DDownStream.setStatus('current')
if mibBuilder.loadTexts: mcIP175DDownStream.setDescription("Center card's port down stream of MC")
mcIP175DTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcIP175DTxlink.setStatus('current')
if mibBuilder.loadTexts: mcIP175DTxlink.setDescription("Center card's port 1 electrical port's link status")
mcIP175DRmtCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcIP175DRmtCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcIP175DRmtCurWorkMode.setDescription("Remote card's port 1 current work mode")
mcIP175DRmtCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIP175DRmtCfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcIP175DRmtCfgWorkMode.setDescription("Remote card's port1 configurable work mode")
mcIP175DRmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcIP175DRmtTxlink.setStatus('current')
if mibBuilder.loadTexts: mcIP175DRmtTxlink.setDescription("Remote card's port electrial port status")
mc4_25G_OEOObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4)).setLabel("mc4-25G-OEOObjects")
mc4_25G_OEOCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1)).setLabel("mc4-25G-OEOCardObjects")
mc4_25G_OEOCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1), ).setLabel("mc4-25G-OEOCardTable")
if mibBuilder.loadTexts: mc4_25G_OEOCardTable.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOCardTable.setDescription('MC 4.25G OEO Configuration table')
mc4_25G_OEOCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1), ).setLabel("mc4-25G-OEOCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc4_25G_OEOCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOCardEntry.setDescription('MC Configuration entry definition')
mc4_25G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mc4-25G-OEOCurSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEOCurSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc4_25G_OEOCurSpdMode.setDescription("Center card's config speed mode")
mc4_25G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mc4-25G-OEOCfgSpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc4_25G_OEOCfgSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc4_25G_OEOCfgSpdMode.setDescription("Center card's current speed mode")
mc4_25G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOLoopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc4_25G_OEOLoopback.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOLoopback.setDescription("card's Loopback state")
mc4_25G_OEOWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOWorkMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc4_25G_OEOWorkMode.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOWorkMode.setDescription("card's Work Mode")
mc4_25G_OEONtwPD = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc4-25G-OEONtwPD").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEONtwPD.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEONtwPD.setDescription("Center card's network side PD status")
mc4_25G_OEOAccPD = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOAccPD").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEOAccPD.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOAccPD.setDescription("Center card's access side PD status")
mc4_25G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mc4-25G-OEOHWSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEOHWSpdMode.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOHWSpdMode.setDescription("Center card's HW speed mode")
mc4_25G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOHWLoopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEOHWLoopback.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOHWLoopback.setDescription("card's HW Loopback state")
mc4_25G_OEOHWWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOHWWorkMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEOHWWorkMode.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOHWWorkMode.setDescription("card's HW Work Mode")
mc4_25G_OEO_Test_Lock = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Lock", 1), ("Unlock", 2)))).setLabel("mc4-25G-OEO-Test-Lock").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Lock.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Lock.setDescription('test result lock or unlock')
mc4_25G_OEO_Test_Error_Counter = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 11), Integer32()).setLabel("mc4-25G-OEO-Test-Error-Counter").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Error_Counter.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Error_Counter.setDescription('test result error counter')
mc4_25G_OEO_Test_Continue_Time = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 12), Integer32()).setLabel("mc4-25G-OEO-Test-Continue-Time").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Continue_Time.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Continue_Time.setDescription('test continue time unit is second')
mc4_25G_OEO_Test_Result = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Pass", 1), ("Error", 2)))).setLabel("mc4-25G-OEO-Test-Result").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Result.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Result.setDescription('test result')
mc4_25G_OEO_Start_Test = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Start", 1), ("Stop", 2)))).setLabel("mc4-25G-OEO-Start-Test").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc4_25G_OEO_Start_Test.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Start_Test.setDescription('start test and stop test')
mc4_25G_OEO_Get_Test_Rst = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("Get", 1)))).setLabel("mc4-25G-OEO-Get-Test-Rst").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc4_25G_OEO_Get_Test_Rst.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Get_Test_Rst.setDescription('get test result')
mcRmt4_25G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mcRmt4-25G-OEOCurSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt4_25G_OEOCurSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmt4_25G_OEOCurSpdMode.setDescription("Center card's config speed mode")
mcRmt4_25G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mcRmt4-25G-OEOCfgSpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmt4_25G_OEOCfgSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmt4_25G_OEOCfgSpdMode.setDescription("Center card's current speed mode")
mcRmt4_25G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOLoopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmt4_25G_OEOLoopback.setStatus('current')
if mibBuilder.loadTexts: mcRmt4_25G_OEOLoopback.setDescription("card's Loopback state")
mcRmt4_25G_OEOWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOWorkMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmt4_25G_OEOWorkMode.setStatus('current')
if mibBuilder.loadTexts: mcRmt4_25G_OEOWorkMode.setDescription("card's Work Mode")
mcRmt4_25G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mcRmt4-25G-OEOHWSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWSpdMode.setStatus('current')
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWSpdMode.setDescription("Center card's HW speed mode")
mcRmt4_25G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOHWLoopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWLoopback.setStatus('current')
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWLoopback.setDescription("card's HW Loopback state")
mcRmt4_25G_OEOHWWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOHWWorkMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWWorkMode.setStatus('current')
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWWorkMode.setDescription("card's HW Work Mode")
mc10G_OEOObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5)).setLabel("mc10G-OEOObjects")
mc10G_OEOCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1)).setLabel("mc10G-OEOCardObjects")
mc10G_OEOCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1), ).setLabel("mc10G-OEOCardTable")
if mibBuilder.loadTexts: mc10G_OEOCardTable.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOCardTable.setDescription('MC 10G OEO Configuration table')
mc10G_OEOCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1), ).setLabel("mc10G-OEOCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10G_OEOCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOCardEntry.setDescription('MC Configuration entry definition')
mc10G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mc10G-OEOCurSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEOCurSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEOCurSpdMode.setDescription("Center card's current speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)")
mc10G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mc10G-OEOCfgSpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEOCfgSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEOCfgSpdMode.setDescription("Center card's config speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)")
mc10G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEOLoopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEOLoopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOLoopback.setDescription("card's Loopback state")
mc10G_OEOSFP1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEOSFP1").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEOSFP1.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOSFP1.setDescription("Center card's SFP1 link status")
mc10G_OEOSFP2 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEOSFP2").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEOSFP2.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOSFP2.setDescription("Center card's SFP2 link status")
mc10G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mc10G-OEOHWSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEOHWSpdMode.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOHWSpdMode.setDescription("Center card's HW speed mode")
mc10G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEOHWLoopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEOHWLoopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOHWLoopback.setDescription("card's HW Loopback state")
mc10G_OEO_Test_Lock = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Lock", 1), ("Unlock", 2)))).setLabel("mc10G-OEO-Test-Lock").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_Test_Lock.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Test_Lock.setDescription('test result lock or unlock')
mc10G_OEO_Test_Error_Counter = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 9), Integer32()).setLabel("mc10G-OEO-Test-Error-Counter").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_Test_Error_Counter.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Test_Error_Counter.setDescription('test result error counter')
mc10G_OEO_Test_Continue_Time = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 10), Integer32()).setLabel("mc10G-OEO-Test-Continue-Time").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_Test_Continue_Time.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Test_Continue_Time.setDescription('test continue time unit is second')
mc10G_OEO_Test_Result = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Pass", 1), ("Error", 2)))).setLabel("mc10G-OEO-Test-Result").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_Test_Result.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Test_Result.setDescription('test result')
mc10G_OEO_Start_Test = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Start", 1), ("Stop", 2)))).setLabel("mc10G-OEO-Start-Test").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEO_Start_Test.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Start_Test.setDescription('start test and stop test')
mc10G_OEO_Get_Test_Rst = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("Get", 1)))).setLabel("mc10G-OEO-Get-Test-Rst").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEO_Get_Test_Rst.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Get_Test_Rst.setDescription('get test result')
mcRmt10G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mcRmt10G-OEOCurSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEOCurSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmt10G_OEOCurSpdMode.setDescription("Center card's current speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)")
mcRmt10G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mcRmt10G-OEOCfgSpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmt10G_OEOCfgSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmt10G_OEOCfgSpdMode.setDescription("Center card's config speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)")
mcRmt10G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt10G-OEOLoopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmt10G_OEOLoopback.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEOLoopback.setDescription("card's Loopback state")
mcRmt10G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mcRmt10G-OEOHWSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEOHWSpdMode.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEOHWSpdMode.setDescription("Center card's HW speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)")
mcRmt10G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt10G-OEOHWLoopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEOHWLoopback.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEOHWLoopback.setDescription("card's HW Loopback state")
mcRmt10G_OEOSFP1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mcRmt10G-OEOSFP1").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEOSFP1.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEOSFP1.setDescription("card's SFP1 link status")
mc10G_OEO_accType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO-accType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_accType.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_accType.setDescription('')
mc10G_OEO_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO-ntwType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_ntwType.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_ntwType.setDescription('')
mcRmt10G_OEO_accType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mcRmt10G-OEO-accType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEO_accType.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEO_accType.setDescription('')
mcRmt10G_OEO_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mcRmt10G-OEO-ntwType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEO_ntwType.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEO_ntwType.setDescription('')
mc10G_OEEObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6)).setLabel("mc10G-OEEObjects")
mc10G_OEECardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1)).setLabel("mc10G-OEECardObjects")
mc10G_OEECardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1), ).setLabel("mc10G-OEECardTable")
if mibBuilder.loadTexts: mc10G_OEECardTable.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEECardTable.setDescription('MC 10G OEE Configuration table')
mc10G_OEECardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1), ).setLabel("mc10G-OEECardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10G_OEECardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEECardEntry.setDescription('MC Configuration entry definition')
mc10G_OEETxlink = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEETxlink").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEETxlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEETxlink.setDescription("Center card's electrical port's link status")
mc10G_OEEFxlink = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEEFxlink").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEEFxlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEEFxlink.setDescription("Center card's optical port's link status")
mc10G_OEECurSpd = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("m10G-Master", 7), ("m10G-Slave", 8), ("not-support", 9)))).setLabel("mc10G-OEECurSpd").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEECurSpd.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEECurSpd.setDescription("Local card's current spd")
mc10G_OEELoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEELoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEELoopMode.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEELoopMode.setDescription("card's Loopback state")
mc10G_OEESpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 7, 8))).clone(namedValues=NamedValues(("auto", 1), ("m10G-Master", 7), ("m10G-Slave", 8)))).setLabel("mc10G-OEESpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEESpdMode.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEESpdMode.setDescription("card's speed mode")
mc10G_OEEHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEEHWLoopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEEHWLoopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEEHWLoopback.setDescription("card's Loopback state")
mc10G_OEE_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEE-ntwType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEE_ntwType.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEE_ntwType.setDescription('')
mc10G_OEE_checkResult = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 8), Integer32()).setLabel("mc10G-OEE-checkResult").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEE_checkResult.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEE_checkResult.setDescription('test result')
mcFanObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7))
mcFanCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1))
mcFanCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1, 1), )
if mibBuilder.loadTexts: mcFanCardTable.setStatus('current')
if mibBuilder.loadTexts: mcFanCardTable.setDescription('MC fan card table')
mcFanCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcFanCardEntry.setStatus('current')
if mibBuilder.loadTexts: mcFanCardEntry.setDescription('MC Configuration entry definition')
mcFanStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Normal", 1), ("Abnormal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcFanStatus.setStatus('mandatory')
if mibBuilder.loadTexts: mcFanStatus.setDescription("Center card's fan status")
mc40G_OEOObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8)).setLabel("mc40G-OEOObjects")
mc40G_OEOCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1)).setLabel("mc40G-OEOCardObjects")
mc40G_OEOCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1), ).setLabel("mc40G-OEOCardTable")
if mibBuilder.loadTexts: mc40G_OEOCardTable.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOCardTable.setDescription('MC 40G OEO Configuration table')
mc40G_OEOCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1), ).setLabel("mc40G-OEOCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc40G_OEOCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOCardEntry.setDescription('MC Configuration entry definition')
mc40G_OEOQsfp1Lane1_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane1-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane1_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane1_link.setDescription("Center card's Qsfp1 Lane1 link status")
mc40G_OEOQsfp1Lane2_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane2-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane2_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane2_link.setDescription("Center card's Qsfp1 Lane2 link status")
mc40G_OEOQsfp1Lane3_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane3-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane3_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane3_link.setDescription("Center card's Qsfp1 Lane3 link status")
mc40G_OEOQsfp1Lane4_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane4-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane4_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane4_link.setDescription("Center card's Qsfp1 Lane4 link status")
mc40G_OEOQsfp2Lane1_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane1-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane1_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane1_link.setDescription("Center card's Qsfp2 Lane1 link status")
mc40G_OEOQsfp2Lane2_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane2-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane2_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane2_link.setDescription("Center card's Qsfp2 Lane2 link status")
mc40G_OEOQsfp2Lane3_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane3-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane3_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane3_link.setDescription("Center card's Qsfp2 Lane3 link status")
mc40G_OEOQsfp2Lane4_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane4-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane4_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane4_link.setDescription("Center card's Qsfp2 Lane4 link status")
mc40G_OEOLane1LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane1LoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOLane1LoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOLane1LoopMode.setDescription("card's Lane1 Loopback state")
mc40G_OEOLane2LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane2LoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOLane2LoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOLane2LoopMode.setDescription("card's Lane2 Loopback state")
mc40G_OEOLane3LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane3LoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOLane3LoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOLane3LoopMode.setDescription("card's Lane3 Loopback state")
mc40G_OEOLane4LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane4LoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOLane4LoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOLane4LoopMode.setDescription("card's Lane4 Loopback state")
mc40G_OEOLoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("all", 1), ("line-side-enable", 2), ("host-side-enable", 3), ("disable", 4), ("not-support", 5)))).setLabel("mc40G-OEOLoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOLoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOLoopMode.setDescription("card's Loopback state")
mc40G_OEOSpeedMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("no-card", 0), ("mc40GSpeed-1", 1), ("mc40GSpeed-2", 2), ("mc40GSpeed-3", 3), ("mc40GSpeed-4", 4), ("mc40GSpeed-5", 5), ("mc40GSpeed-6", 6), ("mc40GSpeed-7", 7), ("mc40GSpeed-8", 8), ("mc40GSpeed-9", 9), ("mc40GSpeed-10", 10), ("mc40GSpeed-11", 11), ("mc40GSpeed-12", 12), ("mc40GSpeed-13", 13), ("not-support", 14)))).setLabel("mc40G-OEOSpeedMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOSpeedMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOSpeedMode.setDescription('speed1: 1X40G: 10G LAN(10312.5Mbps) speed2: 1X40G: OTU3(10754.60325Mbps) speed3: 1X40G: OTU3e2(11145.83875Mbps) speed4: 4X10G: 10G LAN(10312.5Mbps) speed5: 4X10G: CPRI(9830.4 Mbps) speed6: 4X10G: OC-192/STM-64(9953.28Mbps) speed7: 4X10G: OC-192/STM-64(10664.228571427Mbps) speed8: 4X10G: OC-192/STM-64(10709.225316455Mbps) speed9: 4X10G: 10G Ethernet(11049.107142857Mbps) speed10: 4X10G: 10GFibreChannel(10518.750Mbps) speed11: 4X10G: 10GFibreChannel(11270.089285714Mbps) speed12: 4X10G: 10GFibreChannel(11317.642405063Mbps) speed13: 4X10G: 10GInfiniband(10000.00Mbps)')
mc40G_OEOHWLoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOHWLoopMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOHWLoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOHWLoopMode.setDescription("card's HW Loopback state")
mc40G_OEOHWSpeedMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("no-card", 0), ("mc40GSpeed-1", 1), ("mc40GSpeed-2", 2), ("mc40GSpeed-3", 3), ("mc40GSpeed-4", 4), ("mc40GSpeed-5", 5), ("mc40GSpeed-6", 6), ("mc40GSpeed-7", 7), ("mc40GSpeed-8", 8), ("mc40GSpeed-9", 9), ("mc40GSpeed-10", 10), ("mc40GSpeed-11", 11), ("mc40GSpeed-12", 12), ("not-support", 13)))).setLabel("mc40G-OEOHWSpeedMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOHWSpeedMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOHWSpeedMode.setDescription('speed1: 1X40G: 10G LAN(10312.5Mbps) speed2: 1X40G: OTU3(10754.60325Mbps) speed3: 1X40G: OTU3e2(11145.83875Mbps) speed4: 4X10G: 10G LAN(10312.5Mbps) speed5: 4X10G: CPRI(9830.4 Mbps) speed6: 4X10G: OC-192/STM-64(9953.28Mbps) speed7: 4X10G: OC-192/STM-64(10664.228571427Mbps) speed8: 4X10G: OC-192/STM-64(10709.225316455Mbps) speed9: 4X10G: 10G Ethernet(11049.107142857Mbps) speed10: 4X10G: 10GFibreChannel(10518.750Mbps) speed11: 4X10G: 10GFibreChannel(11270.089285714Mbps) speed12: 4X10G: 10GFibreChannel(11317.642405063Mbps)')
mcQsfpSpecificObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9))
mcNtwQSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1))
mcNtwQSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1), )
if mibBuilder.loadTexts: mcNtwQSfpTable.setStatus('current')
if mibBuilder.loadTexts: mcNtwQSfpTable.setDescription('MC Ntw QSFP table')
mcNtwQSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcNtwQSfpEntry.setStatus('current')
if mibBuilder.loadTexts: mcNtwQSfpEntry.setDescription('MC Ntw QSFP entry definition')
getNtwQSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: getNtwQSfpCmd.setStatus('current')
if mibBuilder.loadTexts: getNtwQSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
qsfpNtwConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwConnector.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwConnector.setDescription('SFP connector type (one byte) 0x07: LC 0x0B: Optical Pigtail 0x0C: MPO 0x21: Copper Pigtail others: unsupported')
qsfpNtwTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwTemperature.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwTemperature.setDescription('SFP temperature (one type, signed)')
qsfpNtwTxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwTxPower1.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwTxPower1.setDescription('SFP tx power (one type, signed)')
qsfpNtwTxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwTxPower2.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwTxPower2.setDescription('SFP tx power (one type, signed)')
qsfpNtwTxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwTxPower3.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwTxPower3.setDescription('SFP tx power (one type, signed)')
qsfpNtwTxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwTxPower4.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwTxPower4.setDescription('SFP tx power (one type, signed)')
qsfpNtwRxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwRxPower1.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwRxPower1.setDescription('SFP rx power (one type, signed)')
qsfpNtwRxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwRxPower2.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwRxPower2.setDescription('SFP rx power (one type, signed)')
qsfpNtwRxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwRxPower3.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwRxPower3.setDescription('SFP rx power (one type, signed)')
qsfpNtwRxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwRxPower4.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwRxPower4.setDescription('SFP rx power (one type, signed)')
mcAccQSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2))
mcAccQSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1), )
if mibBuilder.loadTexts: mcAccQSfpTable.setStatus('current')
if mibBuilder.loadTexts: mcAccQSfpTable.setDescription('MC Acc QSFP table')
mcAccQSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcAccQSfpEntry.setStatus('current')
if mibBuilder.loadTexts: mcAccQSfpEntry.setDescription('MC Acc QSFP entry definition')
getAccQSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: getAccQSfpCmd.setStatus('current')
if mibBuilder.loadTexts: getAccQSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
qsfpAccConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccConnector.setStatus('current')
if mibBuilder.loadTexts: qsfpAccConnector.setDescription('SFP connector type (one byte) 0x07: LC 0x0B: Optical Pigtail 0x0C: MPO 0x21: Copper Pigtail others: unsupported')
qsfpAccTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccTemperature.setStatus('current')
if mibBuilder.loadTexts: qsfpAccTemperature.setDescription('SFP temperature (one type, signed)')
qsfpAccTxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccTxPower1.setStatus('current')
if mibBuilder.loadTexts: qsfpAccTxPower1.setDescription('SFP tx power (one type, signed)')
qsfpAccTxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccTxPower2.setStatus('current')
if mibBuilder.loadTexts: qsfpAccTxPower2.setDescription('SFP tx power (one type, signed)')
qsfpAccTxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccTxPower3.setStatus('current')
if mibBuilder.loadTexts: qsfpAccTxPower3.setDescription('SFP tx power (one type, signed)')
qsfpAccTxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccTxPower4.setStatus('current')
if mibBuilder.loadTexts: qsfpAccTxPower4.setDescription('SFP tx power (one type, signed)')
qsfpAccRxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccRxPower1.setStatus('current')
if mibBuilder.loadTexts: qsfpAccRxPower1.setDescription('SFP rx power (one type, signed)')
qsfpAccRxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccRxPower2.setStatus('current')
if mibBuilder.loadTexts: qsfpAccRxPower2.setDescription('SFP rx power (one type, signed)')
qsfpAccRxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccRxPower3.setStatus('current')
if mibBuilder.loadTexts: qsfpAccRxPower3.setDescription('SFP rx power (one type, signed)')
qsfpAccRxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccRxPower4.setStatus('current')
if mibBuilder.loadTexts: qsfpAccRxPower4.setDescription('SFP rx power (one type, signed)')
mc2_5GMCObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10)).setLabel("mc2-5GMCObjects")
mc2_5GMCSFP3Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1)).setLabel("mc2-5GMCSFP3Objects")
mc2_5Cm1gSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1), ).setLabel("mc2-5Cm1gSfpTable")
if mibBuilder.loadTexts: mc2_5Cm1gSfpTable.setStatus('current')
if mibBuilder.loadTexts: mc2_5Cm1gSfpTable.setDescription('MC 1G SFP table')
mc2_5Cm1gSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1), ).setLabel("mc2-5Cm1gSfpEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg"))
if mibBuilder.loadTexts: mc2_5Cm1gSfpEntry.setStatus('current')
if mibBuilder.loadTexts: mc2_5Cm1gSfpEntry.setDescription('MC 1G SFP entry definition')
mc2_5g_getSfpCmd = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setLabel("mc2-5g-getSfpCmd").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc2_5g_getSfpCmd.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_getSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
mc2_5g_sfpCompliance = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 2), Integer32()).setLabel("mc2-5g-sfpCompliance").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpCompliance.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored')
mc2_5g_sfpConnector = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 3), Integer32()).setLabel("mc2-5g-sfpConnector").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpConnector.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported')
mc2_5g_sfpTransCode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 4), Integer32()).setLabel("mc2-5g-sfpTransCode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpTransCode.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit2: MultiMode bit3: MultiMode others: unsupported')
mc2_5g_sfpSmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 5), Integer32()).setLabel("mc2-5g-sfpSmLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpSmLength.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode')
mc2_5g_sfpMmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 6), Integer32()).setLabel("mc2-5g-sfpMmLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpMmLength.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode')
mc2_5g_sfpCopperLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 7), Integer32()).setLabel("mc2-5g-sfpCopperLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpCopperLength.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45')
mc2_5g_sfpBrSpeed = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 8), Integer32()).setLabel("mc2-5g-sfpBrSpeed").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpBrSpeed.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)')
mc2_5g_sfpWavelength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 9), Integer32()).setLabel("mc2-5g-sfpWavelength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpWavelength.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpWavelength.setDescription('SFP laser wavelength (one word)')
mc2_5g_sfpTemperature = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 10), Integer32()).setLabel("mc2-5g-sfpTemperature").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpTemperature.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpTemperature.setDescription('SFP temperature (one type, signed)')
mc2_5g_sfpTranPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 11), Integer32()).setLabel("mc2-5g-sfpTranPower").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpTranPower.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpTranPower.setDescription('SFP tx power (one type, signed)')
mc2_5g_sfpRecvPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 12), Integer32()).setLabel("mc2-5g-sfpRecvPower").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpRecvPower.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpRecvPower.setDescription('SFP rx power (one type, signed)')
mc2_5g_sfpVoltage = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 13), Integer32()).setLabel("mc2-5g-sfpVoltage").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpVoltage.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)')
mc2_5GMCCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2)).setLabel("mc2-5GMCCardObjects")
mc2_5GMCCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1), ).setLabel("mc2-5GMCCardTable")
if mibBuilder.loadTexts: mc2_5GMCCardTable.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCCardTable.setDescription('MC 2-5GMC Configuration table')
mc2_5GMCCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1), ).setLabel("mc2-5GMCCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc2_5GMCCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCCardEntry.setDescription('MC Configuration entry definition')
mc2_5GMCSfp3Exist = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setLabel("mc2-5GMCSfp3Exist").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5GMCSfp3Exist.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCSfp3Exist.setDescription("Center 1G card's SFP3 indication")
mc2_5GMCPort1link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc2-5GMCPort1link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5GMCPort1link.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCPort1link.setDescription("Center card's electrical port1's link status")
mc2_5GMCPort2link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc2-5GMCPort2link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5GMCPort2link.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCPort2link.setDescription("Center card's electrical port2's link status")
mc2_5GMCPort3link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc2-5GMCPort3link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5GMCPort3link.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCPort3link.setDescription("Center card's electrical port3's link status")
mcE1Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11))
mcE1CardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1))
mcE1CardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1), )
if mibBuilder.loadTexts: mcE1CardTable.setStatus('current')
if mibBuilder.loadTexts: mcE1CardTable.setDescription('MC E1 + Eth Configuration table')
mcE1CardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcE1CardEntry.setStatus('current')
if mibBuilder.loadTexts: mcE1CardEntry.setDescription('MC Configuration entry definition')
mcE1Txlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Txlink.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1Txlink.setDescription("Center card's electrical port's link status")
mcE1TxCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1TxCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1TxCurWorkMode.setDescription("Center card's current work mode")
mcE1SFP1Link = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1SFP1Link.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1SFP1Link.setDescription("Center card's SFP1 port's link status")
mcE1Port1LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port1LOS.setStatus('current')
if mibBuilder.loadTexts: mcE1Port1LOS.setDescription("card's E1 Port1 Los state")
mcE1Port1AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port1AIS.setStatus('current')
if mibBuilder.loadTexts: mcE1Port1AIS.setDescription("card's E1 Port1 AIS state")
mcE1Port1CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port1CV.setStatus('current')
if mibBuilder.loadTexts: mcE1Port1CV.setDescription("card's E1 Port1 CV state")
mcE1Port2LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port2LOS.setStatus('current')
if mibBuilder.loadTexts: mcE1Port2LOS.setDescription("card's E1 Port2 Los state")
mcE1Port2AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port2AIS.setStatus('current')
if mibBuilder.loadTexts: mcE1Port2AIS.setDescription("card's E1 Port2 AIS state")
mcE1Port2CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port2CV.setStatus('current')
if mibBuilder.loadTexts: mcE1Port2CV.setDescription("card's E1 Port2 CV state")
mcE1Port1Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1Port1Loop.setStatus('current')
if mibBuilder.loadTexts: mcE1Port1Loop.setDescription("card's Port1 Loopback state")
mcE1Port2Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1Port2Loop.setStatus('current')
if mibBuilder.loadTexts: mcE1Port2Loop.setDescription("card's Port2 Loopback state")
mcRmtE1Txlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Txlink.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmtE1Txlink.setDescription("Remote card's electrical port's link status")
mcRmtE1TxCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1TxCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmtE1TxCurWorkMode.setDescription("Remote card's current work mode")
mcRmtE1SFP1Link = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1SFP1Link.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmtE1SFP1Link.setDescription("Remote card's SFP1 port's link status")
mcRmtE1Port1LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port1LOS.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port1LOS.setDescription("Remote card's E1 Port1 Los state")
mcRmtE1Port1AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port1AIS.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port1AIS.setDescription("Remote card's E1 Port1 AIS state")
mcRmtE1Port1CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port1CV.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port1CV.setDescription("Remote card's E1 Port1 CV state")
mcRmtE1Port2LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port2LOS.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port2LOS.setDescription("Remote card's E1 Port2 Los state")
mcRmtE1Port2AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port2AIS.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port2AIS.setDescription("Remote card's E1 Port2 AIS state")
mcRmtE1Port2CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port2CV.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port2CV.setDescription("Remote card's E1 Port2 CV state")
mcRmtE1Port1Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtE1Port1Loop.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port1Loop.setDescription("Remote card's Port1 Loopback state")
mcRmtE1Port2Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtE1Port2Loop.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port2Loop.setDescription("Remote card's Port2 Loopback state")
mc1GE2OObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12))
mc1GE2OCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1))
mc1GE2OCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1), )
if mibBuilder.loadTexts: mc1GE2OCardTable.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OCardTable.setDescription('MC E2O Fiber backup Configuration table')
mc1GE2OCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc1GE2OCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OCardEntry.setDescription('MC Configuration entry definition')
mc1GE2OPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OPort1SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2OPort1SFPlink.setDescription("Center card's port1 SFP's link status")
mc1GE2OPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OPort2SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2OPort2SFPlink.setDescription("Center card's port2 SFP's link status")
mc1GE2OTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OTxlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2OTxlink.setDescription("Center card's electrical port's link status")
mc1GE2OPortPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc1GE2OPortPri.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OPortPri.setDescription("Center card's Port Pri state")
mc1GE2OPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OPort1SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OPort1SFPExist.setDescription('E2O Port1 SFP indication')
mc1GE2OPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OPort2SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OPort2SFPExist.setDescription('E2O Port2 SFP indication')
mc1GE2OPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OPortHWPri.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OPortHWPri.setDescription("Center card's Port Hardward Pri state")
mc1GE2ORmtPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPlink.setDescription("Remote card's port1 SFP's link status")
mc1GE2ORmtPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPlink.setDescription("Remote card's port2 SFP's link status")
mc1GE2ORmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtTxlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2ORmtTxlink.setDescription("Remote card's electrical port's link status")
mc1GE2ORmtPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPExist.setDescription('E2O Port1 SFP indication')
mc1GE2ORmtPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPExist.setDescription('E2O Port2 SFP indication')
mc1GE2ORmtPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtPortHWPri.setStatus('current')
if mibBuilder.loadTexts: mc1GE2ORmtPortHWPri.setDescription("Remote card's Port Hardward Pri state")
mc1GO2OObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13))
mc1GO2OCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1))
mc1GO2OCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1), )
if mibBuilder.loadTexts: mc1GO2OCardTable.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OCardTable.setDescription('MC O2O Fiber backup Configuration table')
mc1GO2OCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc1GO2OCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OCardEntry.setDescription('MC Configuration entry definition')
mc1GO2OPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort1SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2OPort1SFPlink.setDescription("Center card's port1 SFP's link status")
mc1GO2OPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort2SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2OPort2SFPlink.setDescription("Center card's port2 SFP's link status")
mc1GO2OPort3SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort3SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2OPort3SFPlink.setDescription("Center card's port3 SFP's link status")
mc1GO2OPortPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc1GO2OPortPri.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPortPri.setDescription("Center card's Port Pri state")
mc1GO2OPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort1SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPort1SFPExist.setDescription('O2O Port1 SFP indication')
mc1GO2OPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort2SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPort2SFPExist.setDescription('O2O Port2 SFP indication')
mc1GO2OPort3SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort3SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPort3SFPExist.setDescription('O2O Port3 SFP indication')
mc1GO2OPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPortHWPri.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPortHWPri.setDescription("Local card's Port Hardward Pri state")
mc1GO2OPort3HWSpd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("M100", 1), ("M1000", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort3HWSpd.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPort3HWSpd.setDescription("Local card's Port3 Hardward Speed state")
mc1GO2ORmtPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPlink.setDescription("Remote card's port1 SFP's link status")
mc1GO2ORmtPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPlink.setDescription("Remote card's port2 SFP's link status")
mc1GO2ORmtPort3SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPlink.setDescription("Remote card's port3 SFP's link status")
mc1GO2ORmtPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPExist.setDescription('O2O Port1 SFP indication')
mc1GO2ORmtPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPExist.setDescription('O2O Port2 SFP indication')
mc1GO2ORmtPort3SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPExist.setDescription("Remote card's SFP3 indication")
mc1GO2ORmtPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPortHWPri.setStatus('current')
if mibBuilder.loadTexts: mc1GO2ORmtPortHWPri.setDescription("Remot card's Port Hardward Pri state")
mc1GO2ORmtPort3HWSpd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("M100", 1), ("M1000", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort3HWSpd.setStatus('current')
if mibBuilder.loadTexts: mc1GO2ORmtPort3HWSpd.setDescription("Remot card's Port3 Hardward Speed state")
mc1GO2OSFP3Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2))
mc1GO2OSfp3Table = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1), )
if mibBuilder.loadTexts: mc1GO2OSfp3Table.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OSfp3Table.setDescription('MC 1G SFP table')
mc1GO2OSfp3Entry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg"))
if mibBuilder.loadTexts: mc1GO2OSfp3Entry.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OSfp3Entry.setDescription('MC 1G O2O SFP3 entry definition')
mc1go2o_getSfpCmd = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setLabel("mc1go2o-getSfpCmd").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc1go2o_getSfpCmd.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_getSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
mc1go2o_sfpCompliance = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 2), Integer32()).setLabel("mc1go2o-sfpCompliance").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpCompliance.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored')
mc1go2o_sfpConnector = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 3), Integer32()).setLabel("mc1go2o-sfpConnector").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpConnector.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported')
mc1go2o_sfpTransCode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 4), Integer32()).setLabel("mc1go2o-sfpTransCode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpTransCode.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit2: MultiMode bit3: MultiMode others: unsupported')
mc1go2o_sfpSmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 5), Integer32()).setLabel("mc1go2o-sfpSmLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpSmLength.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode')
mc1go2o_sfpMmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 6), Integer32()).setLabel("mc1go2o-sfpMmLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpMmLength.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode')
mc1go2o_sfpCopperLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 7), Integer32()).setLabel("mc1go2o-sfpCopperLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpCopperLength.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45')
mc1go2o_sfpBrSpeed = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 8), Integer32()).setLabel("mc1go2o-sfpBrSpeed").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpBrSpeed.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)')
mc1go2o_sfpWavelength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 9), Integer32()).setLabel("mc1go2o-sfpWavelength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpWavelength.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpWavelength.setDescription('SFP laser wavelength (one word)')
mc1go2o_sfpTemperature = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 10), Integer32()).setLabel("mc1go2o-sfpTemperature").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpTemperature.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpTemperature.setDescription('SFP temperature (one type, signed)')
mc1go2o_sfpTranPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 11), Integer32()).setLabel("mc1go2o-sfpTranPower").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpTranPower.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpTranPower.setDescription('SFP tx power (one type, signed)')
mc1go2o_sfpRecvPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 12), Integer32()).setLabel("mc1go2o-sfpRecvPower").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpRecvPower.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpRecvPower.setDescription('SFP rx power (one type, signed)')
mc1go2o_sfpVoltage = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 13), Integer32()).setLabel("mc1go2o-sfpVoltage").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpVoltage.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)')
mc10GOEO1RObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14))
mc10GOEO1RCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1))
mc10GOEO1RCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1), )
if mibBuilder.loadTexts: mc10GOEO1RCardTable.setStatus('current')
if mibBuilder.loadTexts: mc10GOEO1RCardTable.setDescription('MC 10G OEO 1R Configuration table')
mc10GOEO1RCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10GOEO1RCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10GOEO1RCardEntry.setDescription('MC Configuration entry definition')
mcAccXFP1WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunability.setDescription("XFP1's wavelength tunability")
mcAccXFP1WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunable.setDescription("XFP1's wavelength tunable status")
mcAccXFP1WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcAccXFP1WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: mcAccXFP1WaveLength.setDescription("XFP1's wavelength")
mcNtwXFP2WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunability.setDescription("XFP2's wavelength tunability")
mcNtwXFP2WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunable.setDescription("XFP2's wavelength tunable status")
mcNtwXFP2WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcNtwXFP2WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: mcNtwXFP2WaveLength.setDescription("XFP2's wavelength")
mcAccXFP1TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcAccXFP1TunableType.setStatus('mandatory')
if mibBuilder.loadTexts: mcAccXFP1TunableType.setDescription("XFP1's wavelength tunable type")
mcNtwXFP2TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcNtwXFP2TunableType.setStatus('mandatory')
if mibBuilder.loadTexts: mcNtwXFP2TunableType.setDescription("XFP2's wavelength tunable type")
mc10GOEO3RObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15))
mc10GOEO3RCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1))
mc10GOEO3RCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1), )
if mibBuilder.loadTexts: mc10GOEO3RCardTable.setStatus('current')
if mibBuilder.loadTexts: mc10GOEO3RCardTable.setDescription('MC 10G OEO 3R tunable wavelength Configuration table')
mc10GOEO3RCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10GOEO3RCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10GOEO3RCardEntry.setDescription('MC Configuration entry definition')
accXFP1WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: accXFP1WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: accXFP1WaveLengthTunability.setDescription("XFP1's wavelength tunability")
accXFP1WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: accXFP1WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: accXFP1WaveLengthTunable.setDescription("XFP1's wavelength tunable status")
accXFP1WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: accXFP1WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: accXFP1WaveLength.setDescription("XFP1's wavelength")
ntwXFP2WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwXFP2WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: ntwXFP2WaveLengthTunability.setDescription("XFP2's wavelength tunability")
ntwXFP2WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwXFP2WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: ntwXFP2WaveLengthTunable.setDescription("XFP2's wavelength tunable status")
ntwXFP2WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ntwXFP2WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: ntwXFP2WaveLength.setDescription("XFP2's wavelength")
accXFP1TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: accXFP1TunableType.setStatus('mandatory')
if mibBuilder.loadTexts: accXFP1TunableType.setDescription("XFP1's wavelength tunable type")
ntwXFP2TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwXFP2TunableType.setStatus('mandatory')
if mibBuilder.loadTexts: ntwXFP2TunableType.setDescription("XFP2's wavelength tunable type")
mcCWDMObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16))
mcCWDMCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1))
mcCWDMCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1), )
if mibBuilder.loadTexts: mcCWDMCardTable.setStatus('current')
if mibBuilder.loadTexts: mcCWDMCardTable.setDescription('MC CWDM table')
mcCWDMCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcCWDMCardEntry.setStatus('current')
if mibBuilder.loadTexts: mcCWDMCardEntry.setDescription('MC Configuration entry definition')
cwdmWavelengthCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelengthCount.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelengthCount.setDescription('wavelength count')
cwdmWavelength1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength1.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength1.setDescription('CWDM Card wavelenth 1')
cwdmWavelength2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength2.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength2.setDescription('CWDM Card wavelenth 2')
cwdmWavelength3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength3.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength3.setDescription('CWDM Card wavelenth 3')
cwdmWavelength4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength4.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength4.setDescription('CWDM Card wavelenth 4')
cwdmWavelength5 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength5.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength5.setDescription('CWDM Card wavelenth 5')
cwdmWavelength6 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength6.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength6.setDescription('CWDM Card wavelenth 6')
cwdmWavelength7 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength7.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength7.setDescription('CWDM Card wavelenth 7')
cwdmWavelength8 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength8.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength8.setDescription('CWDM Card wavelenth 8')
mc10G_OEO2RObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17)).setLabel("mc10G-OEO2RObjects")
mc10G_OEO2RCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1)).setLabel("mc10G-OEO2RCardObjects")
mc10G_OEO2RCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1), ).setLabel("mc10G-OEO2RCardTable")
if mibBuilder.loadTexts: mc10G_OEO2RCardTable.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RCardTable.setDescription('MC 10G OEO 2R Configuration table')
mc10G_OEO2RCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1), ).setLabel("mc10G-OEO2RCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10G_OEO2RCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RCardEntry.setDescription('MC Configuration entry definition')
mc10G_OEO2RCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Speed85", 1), ("Speed103to117", 2), ("Speed995to113", 3), ("not-support", 4)))).setLabel("mc10G-OEO2RCurSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RCurSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEO2RCurSpdMode.setDescription("Center card's current speed mode 1: 8.5G 2: 10.3G-11.7G 3: 9.95G-11.3G ")
mc10G_OEO2RCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Speed85", 1), ("Speed103to117", 2), ("Speed995to113", 3), ("not-support", 4)))).setLabel("mc10G-OEO2RCfgSpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEO2RCfgSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEO2RCfgSpdMode.setDescription("Center card's current speed mode 1: 8.5G 2: 10.3G-11.7G 3: 9.95G-11.3G ")
mc10G_OEO2RSFP1Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP1Loopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEO2RSFP1Loopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RSFP1Loopback.setDescription("card's SFP1 Loopback state")
mc10G_OEO2RSFP2Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP2Loopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEO2RSFP2Loopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RSFP2Loopback.setDescription("card's SFP2 Loopback state")
mc10G_OEO2RSFP1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP1").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RSFP1.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RSFP1.setDescription("Center card's SFP1 link status")
mc10G_OEO2RSFP2 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP2").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RSFP2.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RSFP2.setDescription("Center card's SFP2 link status")
mc10G_OEO2RHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Speed85", 1), ("Speed103to117", 2), ("Speed995to113", 3), ("not-support", 4)))).setLabel("mc10G-OEO2RHWSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RHWSpdMode.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RHWSpdMode.setDescription("Center card's current speed mode 1: 8.5G 2: 10.3G-11.7G 3: 9.95G-11.3G ")
mc10G_OEO2RHWSFP1Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RHWSFP1Loopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RHWSFP1Loopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RHWSFP1Loopback.setDescription("card's HW Loopback state")
mc10G_OEO2RHWSFP2Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RHWSFP2Loopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RHWSFP2Loopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RHWSFP2Loopback.setDescription("card's HW Loopback state")
mc10G_OEO2RVersion = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 10), DisplayString()).setLabel("mc10G-OEO2RVersion").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RVersion.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEO2RVersion.setDescription('MC version')
mc10GXFP1WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunability.setDescription("XFP1's wavelength tunability")
mc10GXFP1WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunable.setDescription("XFP1's wavelength tunable status")
mc10GXFP1WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 13), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10GXFP1WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP1WaveLength.setDescription("XFP1's wavelength")
mc10GXFP2WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunability.setDescription("XFP2's wavelength tunability")
mc10GXFP2WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunable.setDescription("XFP2's wavelength tunable status")
mc10GXFP2WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10GXFP2WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP2WaveLength.setDescription("XFP2's wavelength")
mc10G_OEO2R_accType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO2R-accType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2R_accType.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2R_accType.setDescription('')
mc10G_OEO2R_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO2R-ntwType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2R_ntwType.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2R_ntwType.setDescription('')
mc10G_OEO2R_accTunableType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setLabel("mc10G-OEO2R-accTunableType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2R_accTunableType.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEO2R_accTunableType.setDescription("XFP1's wavelength tunable type")
mc10G_OEO2R_ntwTunableType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setLabel("mc10G-OEO2R-ntwTunableType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2R_ntwTunableType.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEO2R_ntwTunableType.setDescription("XFP2's wavelength tunable type")
mcQCA8334Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18))
mcQCA8334CardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1))
mcQCA8334CardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1, 1), )
if mibBuilder.loadTexts: mcQCA8334CardTable.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334CardTable.setDescription('MC QCA8334 Configuration table')
mcQCA8334CardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcQCA8334CardEntry.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334CardEntry.setDescription('MC Configuration entry definition')
mcQCA8334VlanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Normal", 1), ("mode1", 2), ("mode2", 3), ("not-support", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcQCA8334VlanMode.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334VlanMode.setDescription("Center card's vlan mode")
mcQCA8334PortObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2))
mcQCA8334PortTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1), )
if mibBuilder.loadTexts: mcQCA8334PortTable.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334PortTable.setDescription('MC QCA8334 Configuration table')
mcQCA8334PortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcQCA8334PortIdx"))
if mibBuilder.loadTexts: mcQCA8334PortEntry.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334PortEntry.setDescription('MC Configuration entry definition')
mcQCA8334PortIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("port1", 1), ("port2", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcQCA8334PortIdx.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334PortIdx.setDescription('Port index')
mcQCA8334CurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcQCA8334CurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcQCA8334CurWorkMode.setDescription("Center card's port current work mode")
mcQCA8334CfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcQCA8334CfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcQCA8334CfgWorkMode.setDescription("Center card's port configurable work mode")
mcQCA8334UpStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(32, 1000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcQCA8334UpStream.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334UpStream.setDescription("Center card's port up stream of MC")
mcQCA8334DownStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 5), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(32, 1000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcQCA8334DownStream.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334DownStream.setDescription("Center card's port down stream of MC")
mcQCA8334Txlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcQCA8334Txlink.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334Txlink.setDescription("Center card's port 1 electrical port's link status")
mcQCA8334RmtCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcQCA8334RmtCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcQCA8334RmtCurWorkMode.setDescription("Remote card's port 1 current work mode")
mcQCA8334RmtCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcQCA8334RmtCfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcQCA8334RmtCfgWorkMode.setDescription("Remote card's port1 configurable work mode")
mcQCA8334RmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcQCA8334RmtTxlink.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334RmtTxlink.setDescription("Remote card's port electrial port status")
mcE1T1Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19))
mcE1T1CardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1))
mcE1T1CardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1), )
if mibBuilder.loadTexts: mcE1T1CardTable.setStatus('current')
if mibBuilder.loadTexts: mcE1T1CardTable.setDescription('MC E1T1 Configuration table')
mcE1T1CardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcE1T1CardEntry.setStatus('current')
if mibBuilder.loadTexts: mcE1T1CardEntry.setDescription('MC Configuration entry definition')
mcE1T1Type = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("E1", 1), ("T1", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1Type.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1T1Type.setDescription("Center card's current type")
mcE1T1FLink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Up", 1), ("Down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1FLink.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1T1FLink.setDescription("Center card's current link")
mcE1T1FLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1FLossAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1FLossAlarm.setDescription('')
mcE1T1TLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1TLossAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1TLossAlarm.setDescription('')
mcE1T1AISAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1AISAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1AISAlarm.setDescription('')
mcE1T1TLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1TLoop.setStatus('current')
if mibBuilder.loadTexts: mcE1T1TLoop.setDescription('Tx Loopback state')
mcE1T1FLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1FLoop.setStatus('current')
if mibBuilder.loadTexts: mcE1T1FLoop.setDescription('Fx Loopback state')
mcE1T1CodeType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("E1-HDB3-Or-T1-B8ZS", 1), ("AMI", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1CodeType.setStatus('current')
if mibBuilder.loadTexts: mcE1T1CodeType.setDescription('coding type')
mcE1T1Version = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1Version.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1T1Version.setDescription('MC version')
mcE1T1RmtFLink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Up", 1), ("Down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1RmtFLink.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1T1RmtFLink.setDescription("Center card's current link")
mcE1T1RmtFLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1RmtFLossAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtFLossAlarm.setDescription('')
mcE1T1RmtTLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1RmtTLossAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtTLossAlarm.setDescription('')
mcE1T1RmtAISAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1RmtAISAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtAISAlarm.setDescription('')
mcE1T1RmtTLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1RmtTLoop.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtTLoop.setDescription('Tx Loopback state')
mcE1T1RmtFLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1RmtFLoop.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtFLoop.setDescription('Fx Loopback state')
mcE1T1RmtCodeType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("E1-HDB3-Or-T1-B8ZS", 1), ("AMI", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1RmtCodeType.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtCodeType.setDescription('coding type')
mc10GOEEXFPTunableObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20))
mc10GOEEXFPTunableCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1))
mc10GOEEXFPTunableCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1), )
if mibBuilder.loadTexts: mc10GOEEXFPTunableCardTable.setStatus('current')
if mibBuilder.loadTexts: mc10GOEEXFPTunableCardTable.setDescription('MC 10G OEE tunable wavelength Configuration table')
mc10GOEEXFPTunableCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10GOEEXFPTunableCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10GOEEXFPTunableCardEntry.setDescription('MC Configuration entry definition')
xfpWaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: xfpWaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: xfpWaveLengthTunability.setDescription("XFP's wavelength tunability")
xfpWaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: xfpWaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: xfpWaveLengthTunable.setDescription("XFP's wavelength tunable status")
xfpWaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: xfpWaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: xfpWaveLength.setDescription("XFP's wavelength")
xfpTunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: xfpTunableType.setStatus('mandatory')
if mibBuilder.loadTexts: xfpTunableType.setDescription("XFP's wavelength tunable type")
mcPmObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3))
mcPmTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1), )
if mibBuilder.loadTexts: mcPmTable.setStatus('current')
if mibBuilder.loadTexts: mcPmTable.setDescription('MC Performance table')
mcPmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcPmEntry.setStatus('current')
if mibBuilder.loadTexts: mcPmEntry.setDescription('MC Performance entry definition')
mcRxByteHi = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRxByteHi.setStatus('current')
if mibBuilder.loadTexts: mcRxByteHi.setDescription('The total number of reveive bytes (high)')
mcRxByteLo = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRxByteLo.setStatus('current')
if mibBuilder.loadTexts: mcRxByteLo.setDescription('The total number of reveive bytes (low)')
mcTxByteHi = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcTxByteHi.setStatus('current')
if mibBuilder.loadTexts: mcTxByteHi.setDescription('The total number of transmit bytes (high)')
mcTxByteLo = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcTxByteLo.setStatus('current')
if mibBuilder.loadTexts: mcTxByteLo.setDescription('The total number of transmit bytes (low)')
mcPmRest = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("idle", 1), ("reset", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcPmRest.setStatus('current')
if mibBuilder.loadTexts: mcPmRest.setDescription('reset counter')
shelf_Detected = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 1)).setLabel("shelf-Detected").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_Detected.setStatus('current')
if mibBuilder.loadTexts: shelf_Detected.setDescription('A slave shelf is detected (1~19)')
shelf_Lost = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 2)).setLabel("shelf-Lost").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_Lost.setStatus('current')
if mibBuilder.loadTexts: shelf_Lost.setDescription('A shelf is lost')
shelf_psuA_On = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 3)).setLabel("shelf-psuA-On").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_psuA_On.setStatus('current')
if mibBuilder.loadTexts: shelf_psuA_On.setDescription('PSU A is detected')
shelf_psuA_Off = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 4)).setLabel("shelf-psuA-Off").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_psuA_Off.setStatus('current')
if mibBuilder.loadTexts: shelf_psuA_Off.setDescription('PSU A is lost')
shelf_psuB_On = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 5)).setLabel("shelf-psuB-On").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_psuB_On.setStatus('current')
if mibBuilder.loadTexts: shelf_psuB_On.setDescription('PSU B is detected')
shelf_psuB_Off = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 6)).setLabel("shelf-psuB-Off").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_psuB_Off.setStatus('current')
if mibBuilder.loadTexts: shelf_psuB_Off.setDescription('PSU B is lost')
shelf_fan_On = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 7)).setLabel("shelf-fan-On").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_fan_On.setStatus('current')
if mibBuilder.loadTexts: shelf_fan_On.setDescription('Fan A is detected')
shelf_fan_Off = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 8)).setLabel("shelf-fan-Off").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_fan_Off.setStatus('current')
if mibBuilder.loadTexts: shelf_fan_Off.setDescription('Fan A is lost')
card_Detected = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 20)).setLabel("card-Detected").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_Detected.setStatus('current')
if mibBuilder.loadTexts: card_Detected.setDescription('A card is detected (20~29)')
card_Lost = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 21)).setLabel("card-Lost").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_Lost.setStatus('current')
if mibBuilder.loadTexts: card_Lost.setDescription('A card is lost')
card_MC_Co_Tx_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 30)).setLabel("card-MC-Co-Tx-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Up.setDescription('The tx link of mc in center side is up (above 30)')
card_MC_Co_Tx_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 31)).setLabel("card-MC-Co-Tx-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Down.setDescription('The tx link of mc in center side is broken')
card_MC_Co_Fx_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 32)).setLabel("card-MC-Co-Fx-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Fx_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Fx_Up.setDescription('The fx link of mc in center side is up')
card_MC_Co_Fx_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 33)).setLabel("card-MC-Co-Fx-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Fx_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Fx_Down.setDescription('The fx link of mc in center side is broken')
card_MC_Rmt_Tx_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 34)).setLabel("card-MC-Rmt-Tx-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up.setDescription('The tx link of mc in customer side is up')
card_MC_Rmt_Tx_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 35)).setLabel("card-MC-Rmt-Tx-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down.setDescription('The tx link of mc in customer side is broken')
card_MC_Rmt_PwrDown = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 36)).setLabel("card-MC-Rmt-PwrDown").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_PwrDown.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_PwrDown.setDescription('Remote mc power down detected')
card_MC_Co_Ntw_SFP_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 37)).setLabel("card-MC-Co-Ntw-SFP-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Inserted.setDescription('Local network port SFP inserted')
card_MC_Co_Ntw_SFP_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 38)).setLabel("card-MC-Co-Ntw-SFP-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Removed.setDescription('Local network port SFP removed')
card_MC_Co_Acc_SFP_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 39)).setLabel("card-MC-Co-Acc-SFP-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Inserted.setDescription('Local access port SFP inserted')
card_MC_Co_Acc_SFP_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 40)).setLabel("card-MC-Co-Acc-SFP-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Removed.setDescription('Local access port SFP removed')
card_MC_Rmt_Acc_SFP_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 41)).setLabel("card-MC-Rmt-Acc-SFP-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Inserted.setDescription('Remote access port SFP inserted')
card_MC_Rmt_Acc_SFP_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 42)).setLabel("card-MC-Rmt-Acc-SFP-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Removed.setDescription('Remote access port SFP removed')
card_MC_Co_Tx_Up1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 43)).setLabel("card-MC-Co-Tx-Up1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Up1.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Up1.setDescription('The tx1 link of mc in center side is up')
card_MC_Co_Tx_Down1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 44)).setLabel("card-MC-Co-Tx-Down1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Down1.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Down1.setDescription('The tx1 link of mc in center side is broken')
card_MC_Co_Tx_Up2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 45)).setLabel("card-MC-Co-Tx-Up2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Up2.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Up2.setDescription('The tx2 link of mc in center side is up')
card_MC_Co_Tx_Down2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 46)).setLabel("card-MC-Co-Tx-Down2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Down2.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Down2.setDescription('The tx2 link of mc in center side is broken')
card_MC_Rmt_Tx_Up1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 47)).setLabel("card-MC-Rmt-Tx-Up1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up1.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up1.setDescription('The tx1 link of mc in customer side is up')
card_MC_Rmt_Tx_Down1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 48)).setLabel("card-MC-Rmt-Tx-Down1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down1.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down1.setDescription('The tx1 link of mc in customer side is broken')
card_MC_Rmt_Tx_Up2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 49)).setLabel("card-MC-Rmt-Tx-Up2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up2.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up2.setDescription('The tx2 link of mc in customer side is up')
card_MC_Rmt_Tx_Down2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 50)).setLabel("card-MC-Rmt-Tx-Down2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down2.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down2.setDescription('The tx2 link of mc in customer side is broken')
card_MC_Co_SFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 51)).setLabel("card-MC-Co-SFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP1_Inserted.setDescription('Local SFP1 inserted')
card_MC_Co_SFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 52)).setLabel("card-MC-Co-SFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP1_Removed.setDescription('Local SFP1 removed')
card_MC_Co_SFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 53)).setLabel("card-MC-Co-SFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP2_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP2_Inserted.setDescription('Local SFP2 inserted')
card_MC_Co_SFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 54)).setLabel("card-MC-Co-SFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP2_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP2_Removed.setDescription('Local SFP2 removed')
card_MC_Co_SFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 55)).setLabel("card-MC-Co-SFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP1_Up.setDescription('The SFP1 link of mc in center side is up')
card_MC_Co_SFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 56)).setLabel("card-MC-Co-SFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP1_Down.setDescription('The SFP1 link of mc in center side is broken')
card_MC_Co_SFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 57)).setLabel("card-MC-Co-SFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP2_Up.setDescription('The SFP2 link of mc in center side is up')
card_MC_Co_SFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 58)).setLabel("card-MC-Co-SFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP2_Down.setDescription('The SFP2 link of mc in center side is broken')
card_MC_Rmt_SFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 59)).setLabel("card-MC-Rmt-SFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Inserted.setDescription('Remote SFP1 inserted')
card_MC_Rmt_SFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 60)).setLabel("card-MC-Rmt-SFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Removed.setDescription('Remote SFP1 removed')
card_MC_Rmt_SFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 61)).setLabel("card-MC-Rmt-SFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Up.setDescription('The SFP1 link of mc in customer side is up')
card_MC_Rmt_SFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 62)).setLabel("card-MC-Rmt-SFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Down.setDescription('The SFP1 link of mc in customer side is broken')
card_MC_Co_SFPSFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 63)).setLabel("card-MC-Co-SFPSFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Inserted.setDescription('Local SFP+1 inserted')
card_MC_Co_SFPSFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 64)).setLabel("card-MC-Co-SFPSFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Removed.setDescription('Local SFP+1 removed')
card_MC_Co_SFPSFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 65)).setLabel("card-MC-Co-SFPSFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Inserted.setDescription('Local SFPSFP2 inserted')
card_MC_Co_SFPSFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 66)).setLabel("card-MC-Co-SFPSFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Removed.setDescription('Local SFP+2 removed')
card_MC_Rmt_SFPSFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 67)).setLabel("card-MC-Rmt-SFPSFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Inserted.setDescription('Remote SFP+1 inserted')
card_MC_Rmt_SFPSFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 68)).setLabel("card-MC-Rmt-SFPSFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Removed.setDescription('Remote SFP+1 removed')
card_MC_Co_XFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 69)).setLabel("card-MC-Co-XFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP1_Inserted.setDescription('Local XFP+1 inserted')
card_MC_Co_XFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 70)).setLabel("card-MC-Co-XFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP1_Removed.setDescription('Local XFP+1 removed')
card_MC_Co_XFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 71)).setLabel("card-MC-Co-XFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP2_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP2_Inserted.setDescription('Local XFP2 inserted')
card_MC_Co_XFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 72)).setLabel("card-MC-Co-XFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP2_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP2_Removed.setDescription('Local XFP+2 removed')
card_MC_Rmt_XFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 73)).setLabel("card-MC-Rmt-XFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Inserted.setDescription('Remote XFP+1 inserted')
card_MC_Rmt_XFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 74)).setLabel("card-MC-Rmt-XFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Removed.setDescription('Remote XFP+1 removed')
card_MC_Co_SFPSFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 75)).setLabel("card-MC-Co-SFPSFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Up.setDescription('The SFP+1 link of mc in center side is up')
card_MC_Co_SFPSFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 76)).setLabel("card-MC-Co-SFPSFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Down.setDescription('The SFP+1 link of mc in center side is broken')
card_MC_Co_SFPSFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 77)).setLabel("card-MC-Co-SFPSFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Up.setDescription('The SFP+2 link of mc in center side is up')
card_MC_Co_SFPSFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 78)).setLabel("card-MC-Co-SFPSFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Down.setDescription('The SFP+2 link of mc in center side is broken')
card_MC_Rmt_SFPSFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 79)).setLabel("card-MC-Rmt-SFPSFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Up.setDescription('The SFPSFP1 link of mc in customer side is up')
card_MC_Rmt_SFPSFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 80)).setLabel("card-MC-Rmt-SFPSFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Down.setDescription('The SFP+1 link of mc in customer side is broken')
card_MC_Co_XFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 81)).setLabel("card-MC-Co-XFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP1_Up.setDescription('The XFP1 link of mc in center side is up')
card_MC_Co_XFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 82)).setLabel("card-MC-Co-XFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP1_Down.setDescription('The XFP1 link of mc in center side is broken')
card_MC_Co_XFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 83)).setLabel("card-MC-Co-XFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP2_Up.setDescription('The XFP2 link of mc in center side is up')
card_MC_Co_XFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 84)).setLabel("card-MC-Co-XFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP2_Down.setDescription('The XFP2 link of mc in center side is broken')
card_MC_Rmt_XFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 85)).setLabel("card-MC-Rmt-XFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Up.setDescription('The XFP1 link of mc in customer side is up')
card_MC_Rmt_XFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 86)).setLabel("card-MC-Rmt-XFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Down.setDescription('The XFP link of mc in customer side is broken')
card_MC_Co_SFP3_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 87)).setLabel("card-MC-Co-SFP3-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP3_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP3_Inserted.setDescription('Local SFP3 inserted')
card_MC_Co_SFP3_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 88)).setLabel("card-MC-Co-SFP3-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP3_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP3_Removed.setDescription('Local SFP3 removed')
card_MC_Co_Port1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 89)).setLabel("card-MC-Co-Port1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port1_Up.setDescription('The Port1 link of mc in center side is up')
card_MC_Co_Port1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 90)).setLabel("card-MC-Co-Port1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port1_Down.setDescription('The Port1 link of mc in center side is broken')
card_MC_Co_Port2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 91)).setLabel("card-MC-Co-Port2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port2_Up.setDescription('The Port2 link of mc in center side is up')
card_MC_Co_Port2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 92)).setLabel("card-MC-Co-Port2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port2_Down.setDescription('The Port2 link of mc in center side is broken')
card_MC_Co_Port3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 93)).setLabel("card-MC-Co-Port3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port3_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port3_Up.setDescription('The Port3 link of mc in center side is up')
card_MC_Co_Port3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 94)).setLabel("card-MC-Co-Port3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port3_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port3_Down.setDescription('The Port3 link of mc in center side is broken')
card_MC_FAN_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 100)).setLabel("card-MC-FAN-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_FAN_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_FAN_Normal.setDescription('Fan card work normally')
card_MC_FAN_Abnormal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 101)).setLabel("card-MC-FAN-Abnormal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_FAN_Abnormal.setStatus('current')
if mibBuilder.loadTexts: card_MC_FAN_Abnormal.setDescription('Fan card work abnormally')
card_MC_Co_QSFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 102)).setLabel("card-MC-Co-QSFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Inserted.setDescription('Local QSFP1 inserted')
card_MC_Co_QSFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 103)).setLabel("card-MC-Co-QSFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Removed.setDescription('Local QSFP1 removed')
card_MC_Co_QSFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 104)).setLabel("card-MC-Co-QSFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Inserted.setDescription('Local QSFP2 inserted')
card_MC_Co_QSFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 105)).setLabel("card-MC-Co-QSFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Removed.setDescription('Local QSFP2 removed')
card_MC_Co_QSFP1_Lane1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 106)).setLabel("card-MC-Co-QSFP1-Lane1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Up.setDescription('The QSFP1 Lane1 link of mc in center side is up')
card_MC_Co_QSFP1_Lane1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 107)).setLabel("card-MC-Co-QSFP1-Lane1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Down.setDescription('The QSFP1 lane1 link of mc in center side is broken')
card_MC_Co_QSFP1_Lane2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 108)).setLabel("card-MC-Co-QSFP1-Lane2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Up.setDescription('The QSFP1 Lane2 link of mc in center side is up')
card_MC_Co_QSFP1_Lane2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 109)).setLabel("card-MC-Co-QSFP1-Lane2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Down.setDescription('The QSFP1 lane2 link of mc in center side is broken')
card_MC_Co_QSFP1_Lane3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 110)).setLabel("card-MC-Co-QSFP1-Lane3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Up.setDescription('The QSFP1 Lane3 link of mc in center side is up')
card_MC_Co_QSFP1_Lane3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 111)).setLabel("card-MC-Co-QSFP1-Lane3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Down.setDescription('The QSFP1 lane3 link of mc in center side is broken')
card_MC_Co_QSFP1_Lane4_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 112)).setLabel("card-MC-Co-QSFP1-Lane4-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Up.setDescription('The QSFP1 Lane4 link of mc in center side is up')
card_MC_Co_QSFP1_Lane4_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 113)).setLabel("card-MC-Co-QSFP1-Lane4-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Down.setDescription('The QSFP1 lane4 link of mc in center side is broken')
card_MC_Co_QSFP2_Lane1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 114)).setLabel("card-MC-Co-QSFP2-Lane1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Up.setDescription('The QSFP2 Lane1 link of mc in center side is up')
card_MC_Co_QSFP2_Lane1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 115)).setLabel("card-MC-Co-QSFP2-Lane1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Down.setDescription('The QSFP2 lane1 link of mc in center side is broken')
card_MC_Co_QSFP2_Lane2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 116)).setLabel("card-MC-Co-QSFP2-Lane2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Up.setDescription('The QSFP2 Lane2 link of mc in center side is up')
card_MC_Co_QSFP2_Lane2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 117)).setLabel("card-MC-Co-QSFP2-Lane2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Down.setDescription('The QSFP2 lane2 link of mc in center side is broken')
card_MC_Co_QSFP2_Lane3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 118)).setLabel("card-MC-Co-QSFP2-Lane3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Up.setDescription('The QSFP2 Lane3 link of mc in center side is up')
card_MC_Co_QSFP2_Lane3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 119)).setLabel("card-MC-Co-QSFP2-Lane3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Down.setDescription('The QSFP2 lane3 link of mc in center side is broken')
card_MC_Co_QSFP2_Lane4_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 120)).setLabel("card-MC-Co-QSFP2-Lane4-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Up.setDescription('The QSFP2 Lane4 link of mc in center side is up')
card_MC_Co_QSFP2_Lane4_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 121)).setLabel("card-MC-Co-QSFP2-Lane4-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Down.setDescription('The QSFP2 lane4 link of mc in center side is broken')
card_MC_Rmt_SFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 122)).setLabel("card-MC-Rmt-SFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Inserted.setDescription('Remote SFP2 inserted')
card_MC_Rmt_SFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 123)).setLabel("card-MC-Rmt-SFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Removed.setDescription('Remote SFP2 removed')
card_MC_Rmt_SFP3_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 124)).setLabel("card-MC-Rmt-SFP3-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Inserted.setDescription('Remote SFP3 inserted')
card_MC_Rmt_SFP3_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 125)).setLabel("card-MC-Rmt-SFP3-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Removed.setDescription('Remote SFP3 removed')
card_MC_Rmt_SFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 126)).setLabel("card-MC-Rmt-SFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Up.setDescription('The SFP2 link of mc in customer side is up')
card_MC_Rmt_SFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 127)).setLabel("card-MC-Rmt-SFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Down.setDescription('The SFP2 link of mc in customer side is broken')
card_MC_Rmt_SFP3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 128)).setLabel("card-MC-Rmt-SFP3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Up.setDescription('The SFP3 link of mc in customer side is up')
card_MC_Rmt_SFP3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 129)).setLabel("card-MC-Rmt-SFP3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Down.setDescription('The SFP3 link of mc in customer side is broken')
card_MC_E1_Co_Port1_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 130)).setLabel("card-MC-E1-Co-Port1-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Alarm.setDescription('Port1 LOS alarm in center side')
card_MC_E1_Co_Port1_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 131)).setLabel("card-MC-E1-Co-Port1-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Normal.setDescription('Port1 LOS normal in center side')
card_MC_E1_Co_Port1_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 132)).setLabel("card-MC-E1-Co-Port1-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Alarm.setDescription('Port1 AIS alarm in center side')
card_MC_E1_Co_Port1_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 133)).setLabel("card-MC-E1-Co-Port1-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Normal.setDescription('Port1 AIS normal in center side')
card_MC_E1_Co_Port1_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 134)).setLabel("card-MC-E1-Co-Port1-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Alarm.setDescription('Port1 CV alarm in center side')
card_MC_E1_Co_Port1_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 135)).setLabel("card-MC-E1-Co-Port1-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Normal.setDescription('Port1 CV normal in center side')
card_MC_E1_Co_Port2_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 136)).setLabel("card-MC-E1-Co-Port2-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Alarm.setDescription('Port2 LOS alarm in center side')
card_MC_E1_Co_Port2_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 137)).setLabel("card-MC-E1-Co-Port2-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Normal.setDescription('Port2 LOS normal in center side')
card_MC_E1_Co_Port2_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 138)).setLabel("card-MC-E1-Co-Port2-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Alarm.setDescription('Port2 AIS alarm in center side')
card_MC_E1_Co_Port2_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 139)).setLabel("card-MC-E1-Co-Port2-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Normal.setDescription('Port2 AIS normal in center side')
card_MC_E1_Co_Port2_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 140)).setLabel("card-MC-E1-Co-Port2-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Alarm.setDescription('Port2 CV alarm in center side')
card_MC_E1_Co_Port2_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 141)).setLabel("card-MC-E1-Co-Port2-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Normal.setDescription('Port2 CV normal in center side')
card_MC_E1_Rmt_Port1_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 142)).setLabel("card-MC-E1-Rmt-Port1-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Alarm.setDescription('Port1 LOS alarm in customer side')
card_MC_E1_Rmt_Port1_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 143)).setLabel("card-MC-E1-Rmt-Port1-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Normal.setDescription('Port1 LOS normal in customer side')
card_MC_E1_Rmt_Port1_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 144)).setLabel("card-MC-E1-Rmt-Port1-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Alarm.setDescription('Port1 AIS alarm in customer side')
card_MC_E1_Rmt_Port1_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 145)).setLabel("card-MC-E1-Rmt-Port1-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Normal.setDescription('Port1 AIS normal in customer side')
card_MC_E1_Rmt_Port1_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 146)).setLabel("card-MC-E1-Rmt-Port1-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Alarm.setDescription('Port1 CV alarm in customer side')
card_MC_E1_Rmt_Port1_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 147)).setLabel("card-MC-E1-Rmt-Port1-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Normal.setDescription('Port1 CV normal in customer side')
card_MC_E1_Rmt_Port2_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 148)).setLabel("card-MC-E1-Rmt-Port2-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Alarm.setDescription('Port2 LOS alarm in customer side')
card_MC_E1_Rmt_Port2_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 149)).setLabel("card-MC-E1-Rmt-Port2-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Normal.setDescription('Port2 LOS normal in customer side')
card_MC_E1_Rmt_Port2_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 150)).setLabel("card-MC-E1-Rmt-Port2-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Alarm.setDescription('Port2 AIS alarm in customer side')
card_MC_E1_Rmt_Port2_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 151)).setLabel("card-MC-E1-Rmt-Port2-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Normal.setDescription('Port2 AIS normal in customer side')
card_MC_E1_Rmt_Port2_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 152)).setLabel("card-MC-E1-Rmt-Port2-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Alarm.setDescription('Port2 CV alarm in customer side')
card_MC_E1_Rmt_Port2_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 153)).setLabel("card-MC-E1-Rmt-Port2-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Normal.setDescription('Port2 CV normal in customer side')
card_MC_Co_SFP3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 154)).setLabel("card-MC-Co-SFP3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP3_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP3_Up.setDescription('The SFP3 link of mc in center side is up')
card_MC_Co_SFP3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 155)).setLabel("card-MC-Co-SFP3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP3_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP3_Down.setDescription('The SFP3 link of mc in center side is broken')
card_MC_E1T1_Co_TXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 156)).setLabel("card-MC-E1T1-Co-TXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Alarm.setDescription('Tx LOS alarm in center side')
card_MC_E1T1_Co_TXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 157)).setLabel("card-MC-E1T1-Co-TXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Normal.setDescription('Tx LOS normal in center side')
card_MC_E1T1_Co_FXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 158)).setLabel("card-MC-E1T1-Co-FXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Alarm.setDescription('Fx LOS alarm in center side')
card_MC_E1T1_Co_FXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 159)).setLabel("card-MC-E1T1-Co-FXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Normal.setDescription('Fx LOS normal in center side')
card_MC_E1T1_Co_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 160)).setLabel("card-MC-E1T1-Co-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Alarm.setDescription('AIS alarm in center side')
card_MC_E1T1_Co_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 161)).setLabel("card-MC-E1T1-Co-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Normal.setDescription('AIS normal in center side')
card_MC_E1T1_Rmt_TXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 162)).setLabel("card-MC-E1T1-Rmt-TXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Alarm.setDescription('Tx LOS alarm in customer side')
card_MC_E1T1_Rmt_TXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 163)).setLabel("card-MC-E1T1-Rmt-TXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Normal.setDescription('Tx LOS normal in customer side')
card_MC_E1T1_Rmt_FXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 164)).setLabel("card-MC-E1T1-Rmt-FXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Alarm.setDescription('Fx LOS alarm in customer side')
card_MC_E1T1_Rmt_FXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 165)).setLabel("card-MC-E1T1-Rmt-FXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Normal.setDescription('Fx LOS normal in customer side')
card_MC_E1T1_Rmt_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 166)).setLabel("card-MC-E1T1-Rmt-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Alarm.setDescription('AIS alarm in customer side')
card_MC_E1T1_Rmt_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 167)).setLabel("card-MC-E1T1-Rmt-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Normal.setDescription('AIS normal in customer side')
mibBuilder.exportSymbols("XXX-MIB", mc40G_OEOLane2LoopMode=mc40G_OEOLane2LoopMode, company=company, mcFanCardObjects=mcFanCardObjects, mc10G_OEECurSpd=mc10G_OEECurSpd, card_MC_Co_QSFP1_Lane2_Up=card_MC_Co_QSFP1_Lane2_Up, mc4_25G_OEONtwPD=mc4_25G_OEONtwPD, mcCm1gSfpEntry=mcCm1gSfpEntry, card_MC_Co_QSFP1_Lane1_Down=card_MC_Co_QSFP1_Lane1_Down, mc10G_OEO2RObjects=mc10G_OEO2RObjects, mcTransceiverDist=mcTransceiverDist, mc2_5GMCCardObjects=mc2_5GMCCardObjects, mcPortState=mcPortState, mc1GE2OPort2SFPExist=mc1GE2OPort2SFPExist, shelfNum=shelfNum, mcIP175DCurWorkMode=mcIP175DCurWorkMode, card_MC_Rmt_Tx_Up1=card_MC_Rmt_Tx_Up1, mc1go2o_sfpTransCode=mc1go2o_sfpTransCode, sysName=sysName, psuB=psuB, ntwXFP2WaveLength=ntwXFP2WaveLength, mcAccXFP1WaveLengthTunability=mcAccXFP1WaveLengthTunability, mc10G_OEO_Test_Error_Counter=mc10G_OEO_Test_Error_Counter, mc2_5g_sfpWavelength=mc2_5g_sfpWavelength, mc2_5g_sfpCompliance=mc2_5g_sfpCompliance, card_MC_E1_Co_Port1_CV_Alarm=card_MC_E1_Co_Port1_CV_Alarm, mcIP175DCardTable=mcIP175DCardTable, mc4_25G_OEO_Test_Result=mc4_25G_OEO_Test_Result, mc40G_OEOHWLoopMode=mc40G_OEOHWLoopMode, mcE1T1RmtFLink=mcE1T1RmtFLink, ntwXFP2WaveLengthTunability=ntwXFP2WaveLengthTunability, card_MC_E1T1_Co_TXLOS_Normal=card_MC_E1T1_Co_TXLOS_Normal, mcCmEntry=mcCmEntry, mc1go2o_sfpCompliance=mc1go2o_sfpCompliance, shelfIdx=shelfIdx, trapHost1=trapHost1, mcCm1gSfpObjects=mcCm1gSfpObjects, volA=volA, getNtwQSfpCmd=getNtwQSfpCmd, mc1GO2OPortHWPri=mc1GO2OPortHWPri, card_MC_Co_SFP3_Up=card_MC_Co_SFP3_Up, mcE1Port2Loop=mcE1Port2Loop, xfpWaveLengthTunable=xfpWaveLengthTunable, rmtCardNum=rmtCardNum, shelf_fan_Off=shelf_fan_Off, mcDownStream=mcDownStream, mcE1SFP1Link=mcE1SFP1Link, mc10G_OEOCardEntry=mc10G_OEOCardEntry, mcAccXFP1WaveLengthTunable=mcAccXFP1WaveLengthTunable, nmuType=nmuType, mc10G_OEO_Get_Test_Rst=mc10G_OEO_Get_Test_Rst, mc1GE2ORmtPort1SFPlink=mc1GE2ORmtPort1SFPlink, slotObjects=slotObjects, mcIP175DObjects=mcIP175DObjects, card_MC_Co_Tx_Up1=card_MC_Co_Tx_Up1, mcRmtE1Port1CV=mcRmtE1Port1CV, mcRmtHWTransmitMode=mcRmtHWTransmitMode, mcIP175DTxlink=mcIP175DTxlink, mc10G_OEOCurSpdMode=mc10G_OEOCurSpdMode, mc10G_OEELoopMode=mc10G_OEELoopMode, mcE1T1FLossAlarm=mcE1T1FLossAlarm, shelf_psuA_On=shelf_psuA_On, mcQCA8334RmtCfgWorkMode=mcQCA8334RmtCfgWorkMode, shelfTable=shelfTable, ipaddr=ipaddr, mc1GE2OPort1SFPlink=mc1GE2OPort1SFPlink, mc10G_OEO_accType=mc10G_OEO_accType, card_MC_E1_Rmt_Port1_AIS_Normal=card_MC_E1_Rmt_Port1_AIS_Normal, sfpVoltage=sfpVoltage, mc10GOEO1RCardTable=mc10GOEO1RCardTable, mc10GXFP2WaveLengthTunable=mc10GXFP2WaveLengthTunable, mcRmtE1Port1Loop=mcRmtE1Port1Loop, mc4_25G_OEOCardEntry=mc4_25G_OEOCardEntry, card_MC_E1_Rmt_Port1_AIS_Alarm=card_MC_E1_Rmt_Port1_AIS_Alarm, mcRmt4_25G_OEOHWWorkMode=mcRmt4_25G_OEOHWWorkMode, mcCm1gIpEntry=mcCm1gIpEntry, cwdmWavelength5=cwdmWavelength5, mcE1Port1AIS=mcE1Port1AIS, accXFP1TunableType=accXFP1TunableType, mcRmtPwrDown=mcRmtPwrDown, mc10G_OEOObjects=mc10G_OEOObjects, card_MC_Rmt_Tx_Down=card_MC_Rmt_Tx_Down, mc10G_OEEHWLoopback=mc10G_OEEHWLoopback, card_MC_Co_Port3_Down=card_MC_Co_Port3_Down, card_MC_Co_SFP3_Down=card_MC_Co_SFP3_Down, mcQCA8334RmtTxlink=mcQCA8334RmtTxlink, card_MC_E1T1_Co_AIS_Normal=card_MC_E1T1_Co_AIS_Normal, mcRmtE1TxCurWorkMode=mcRmtE1TxCurWorkMode, qsfpAccRxPower4=qsfpAccRxPower4, mcRmt10G_OEOLoopback=mcRmt10G_OEOLoopback, card_MC_E1_Rmt_Port2_CV_Normal=card_MC_E1_Rmt_Port2_CV_Normal, card_MC_E1_Co_Port2_CV_Normal=card_MC_E1_Co_Port2_CV_Normal, mcTxByteLo=mcTxByteLo, mcCardIdx=mcCardIdx, card_MC_Rmt_SFP3_Up=card_MC_Rmt_SFP3_Up, card_MC_E1T1_Co_FXLOS_Alarm=card_MC_E1T1_Co_FXLOS_Alarm, mcRmt4_25G_OEOHWSpdMode=mcRmt4_25G_OEOHWSpdMode, card_MC_Co_QSFP1_Lane4_Up=card_MC_Co_QSFP1_Lane4_Up, mcUtility=mcUtility, mc4_25G_OEOObjects=mc4_25G_OEOObjects, mc10GOEO1RCardEntry=mc10GOEO1RCardEntry, getAccSfpCmd=getAccSfpCmd, mc1GO2OPort3SFPExist=mc1GO2OPort3SFPExist, mcRmtCfgWorkMode=mcRmtCfgWorkMode, mcNtwQSfpObjects=mcNtwQSfpObjects, mcShelfIdx=mcShelfIdx, mc40G_OEOCardEntry=mc40G_OEOCardEntry, mcE1T1CardTable=mcE1T1CardTable, accsfpRecvPower=accsfpRecvPower, mc10GXFP1WaveLengthTunability=mc10GXFP1WaveLengthTunability, cwdmWavelength7=cwdmWavelength7, cwdmWavelength1=cwdmWavelength1, mc40G_OEOQsfp2Lane3_link=mc40G_OEOQsfp2Lane3_link, mcE1Port2CV=mcE1Port2CV, mcAccQSfpObjects=mcAccQSfpObjects, mcRmtE1Port1AIS=mcRmtE1Port1AIS, mcIP175DCardEntry=mcIP175DCardEntry, accsfpTemperature=accsfpTemperature, mc10G_OEO2R_accTunableType=mc10G_OEO2R_accTunableType, card_MC_Co_QSFP2_Lane2_Up=card_MC_Co_QSFP2_Lane2_Up, mc4_25G_OEO_Test_Lock=mc4_25G_OEO_Test_Lock, mcRmtUtility=mcRmtUtility, mc2_5GMCSfp3Exist=mc2_5GMCSfp3Exist, mc4_25G_OEO_Start_Test=mc4_25G_OEO_Start_Test, qsfpAccTemperature=qsfpAccTemperature, card_MC_Rmt_Tx_Down2=card_MC_Rmt_Tx_Down2, sfpConnector=sfpConnector, mcFanObjects=mcFanObjects, card_MC_Co_SFP2_Inserted=card_MC_Co_SFP2_Inserted, mc1GE2OPortPri=mc1GE2OPortPri, mc1GO2OPort1SFPExist=mc1GO2OPort1SFPExist, card_MC_Rmt_SFP3_Down=card_MC_Rmt_SFP3_Down, mcRmt10G_OEOCfgSpdMode=mcRmt10G_OEOCfgSpdMode, mc1go2o_sfpWavelength=mc1go2o_sfpWavelength, card_MC_Co_SFP1_Removed=card_MC_Co_SFP1_Removed, mc40G_OEOQsfp1Lane1_link=mc40G_OEOQsfp1Lane1_link, card_MC_Rmt_XFP1_Up=card_MC_Rmt_XFP1_Up, card_MC_E1T1_Co_TXLOS_Alarm=card_MC_E1T1_Co_TXLOS_Alarm, sfpCopperLength=sfpCopperLength, mc2_5GMCCardTable=mc2_5GMCCardTable, mc2_5g_getSfpCmd=mc2_5g_getSfpCmd, mcE1T1RmtTLossAlarm=mcE1T1RmtTLossAlarm, trapHost3=trapHost3, mcRmtE1Port2LOS=mcRmtE1Port2LOS, card_MC_Co_SFPSFP1_Removed=card_MC_Co_SFPSFP1_Removed, card_MC_E1T1_Co_FXLOS_Normal=card_MC_E1T1_Co_FXLOS_Normal, mc1GO2OPort3SFPlink=mc1GO2OPort3SFPlink, mc1go2o_getSfpCmd=mc1go2o_getSfpCmd, sfpCompliance=sfpCompliance, card_MC_Co_QSFP2_Lane2_Down=card_MC_Co_QSFP2_Lane2_Down, mc10GXFP1WaveLength=mc10GXFP1WaveLength, mc10GOEO3RCardTable=mc10GOEO3RCardTable, mc1GO2ORmtPortHWPri=mc1GO2ORmtPortHWPri, card_MC_Rmt_Tx_Up2=card_MC_Rmt_Tx_Up2, mcRmtHWLFP=mcRmtHWLFP, card_MC_Co_QSFP1_Lane1_Up=card_MC_Co_QSFP1_Lane1_Up, accsfpSmLength=accsfpSmLength, mcIP175DPortObjects=mcIP175DPortObjects, mcType=mcType, accXFP1WaveLength=accXFP1WaveLength, mc40G_OEOQsfp1Lane3_link=mc40G_OEOQsfp1Lane3_link, mc10GXFP2WaveLengthTunability=mc10GXFP2WaveLengthTunability, card_MC_E1T1_Rmt_TXLOS_Normal=card_MC_E1T1_Rmt_TXLOS_Normal, mcRmtHWWorkMode=mcRmtHWWorkMode, mcQCA8334UpStream=mcQCA8334UpStream, card_MC_Rmt_SFP1_Down=card_MC_Rmt_SFP1_Down, mc4_25G_OEOHWLoopback=mc4_25G_OEOHWLoopback, mc10G_OEEFxlink=mc10G_OEEFxlink, mcE1Port1LOS=mcE1Port1LOS, mc2_5GMCPort1link=mc2_5GMCPort1link, qsfpAccRxPower3=qsfpAccRxPower3, mcNtwSfpExist=mcNtwSfpExist, mcNtwXFP2WaveLength=mcNtwXFP2WaveLength, mcRmt10G_OEOHWSpdMode=mcRmt10G_OEOHWSpdMode, cwdmWavelengthCount=cwdmWavelengthCount, card_MC_Rmt_SFPSFP1_Up=card_MC_Rmt_SFPSFP1_Up, mc10G_OEOHWSpdMode=mc10G_OEOHWSpdMode, mc40G_OEOQsfp2Lane4_link=mc40G_OEOQsfp2Lane4_link, mcNtwXFP2WaveLengthTunable=mcNtwXFP2WaveLengthTunable, mc2_5GMCSFP3Objects=mc2_5GMCSFP3Objects, mcHWTransmitMode=mcHWTransmitMode, slotIdx=slotIdx, qsfpNtwTxPower1=qsfpNtwTxPower1, mcHWLFP=mcHWLFP, mcE1T1RmtAISAlarm=mcE1T1RmtAISAlarm, volB=volB, mcRmtCurWorkMode=mcRmtCurWorkMode, mc1GE2OCardTable=mc1GE2OCardTable, accsfpWavelength=accsfpWavelength, mcAccQSfpEntry=mcAccQSfpEntry, mcTxByteHi=mcTxByteHi, mc10G_OEO2RHWSFP1Loopback=mc10G_OEO2RHWSFP1Loopback, sysContact=sysContact, slotEntry=slotEntry, mcCurWorkMode=mcCurWorkMode, card_MC_E1_Rmt_Port2_AIS_Alarm=card_MC_E1_Rmt_Port2_AIS_Alarm, mcIP175DUpStream=mcIP175DUpStream, mcRmtDetect=mcRmtDetect, mc10G_OEO_Test_Lock=mc10G_OEO_Test_Lock, mc2_5g_sfpTranPower=mc2_5g_sfpTranPower, mc2_5g_sfpBrSpeed=mc2_5g_sfpBrSpeed, mc40G_OEOSpeedMode=mc40G_OEOSpeedMode, mc1GO2OSfp3Table=mc1GO2OSfp3Table, mc1GE2ORmtPort2SFPlink=mc1GE2ORmtPort2SFPlink, mcCm1gAccSfpObjects=mcCm1gAccSfpObjects, rmtCardType=rmtCardType, card_MC_Co_SFP2_Removed=card_MC_Co_SFP2_Removed, card_MC_Co_Tx_Down1=card_MC_Co_Tx_Down1, card_MC_Co_QSFP1_Lane2_Down=card_MC_Co_QSFP1_Lane2_Down, card_MC_Co_QSFP2_Lane4_Down=card_MC_Co_QSFP2_Lane4_Down, mcRxByteHi=mcRxByteHi, card_MC_Rmt_SFP3_Inserted=card_MC_Rmt_SFP3_Inserted, mcE1CardEntry=mcE1CardEntry, card_Detected=card_Detected, card_MC_Co_XFP2_Up=card_MC_Co_XFP2_Up, card_MC_Rmt_SFP2_Up=card_MC_Rmt_SFP2_Up, trapHost2=trapHost2, mc2_5Cm1gSfpTable=mc2_5Cm1gSfpTable, mcCWDMCardEntry=mcCWDMCardEntry, mcE1T1TLoop=mcE1T1TLoop, mc1GO2OPortPri=mc1GO2OPortPri, mcLFPCfg=mcLFPCfg, qsfpAccRxPower1=qsfpAccRxPower1, temperature=temperature, qsfpAccTxPower3=qsfpAccTxPower3, card_MC_Co_SFP1_Down=card_MC_Co_SFP1_Down, mc4_25G_OEOCurSpdMode=mc4_25G_OEOCurSpdMode, mc10G_OEECardEntry=mc10G_OEECardEntry, mcFanStatus=mcFanStatus, mc2_5GMCPort3link=mc2_5GMCPort3link, systemMIB=systemMIB, mcTxlink=mcTxlink, mcCmTable=mcCmTable, mcRmtTxlink=mcRmtTxlink, mc40G_OEOQsfp1Lane4_link=mc40G_OEOQsfp1Lane4_link, sfpTranPower=sfpTranPower, coCardType=coCardType, mcFanCardEntry=mcFanCardEntry, cwdmWavelength3=cwdmWavelength3, card_MC_E1_Co_Port1_AIS_Normal=card_MC_E1_Co_Port1_AIS_Normal, mc1GE2OPortHWPri=mc1GE2OPortHWPri, mcE1T1TLossAlarm=mcE1T1TLossAlarm, mcQCA8334DownStream=mcQCA8334DownStream, mcIP175DPortEntry=mcIP175DPortEntry, mcE1T1Version=mcE1T1Version, card_MC_Co_QSFP1_Lane3_Down=card_MC_Co_QSFP1_Lane3_Down, card_MC_E1_Rmt_Port1_CV_Normal=card_MC_E1_Rmt_Port1_CV_Normal, card_MC_Rmt_SFP2_Removed=card_MC_Rmt_SFP2_Removed, card_MC_Co_SFPSFP1_Down=card_MC_Co_SFPSFP1_Down, accsfpBrSpeed=accsfpBrSpeed, cwdmWavelength8=cwdmWavelength8, mcRmt10G_OEOCurSpdMode=mcRmt10G_OEOCurSpdMode, mc10GOEEXFPTunableCardObjects=mc10GOEEXFPTunableCardObjects, mc1GE2ORmtPortHWPri=mc1GE2ORmtPortHWPri, mc10G_OEO_Test_Continue_Time=mc10G_OEO_Test_Continue_Time)
mibBuilder.exportSymbols("XXX-MIB", mc4_25G_OEO_Test_Error_Counter=mc4_25G_OEO_Test_Error_Counter, mc1GO2OPort1SFPlink=mc1GO2OPort1SFPlink, card_MC_E1_Rmt_Port1_LOS_Normal=card_MC_E1_Rmt_Port1_LOS_Normal, mc1GO2OSFP3Objects=mc1GO2OSFP3Objects, xfpWaveLength=xfpWaveLength, card_MC_Co_XFP1_Removed=card_MC_Co_XFP1_Removed, card_MC_Rmt_SFP2_Inserted=card_MC_Rmt_SFP2_Inserted, mc4_25G_OEOWorkMode=mc4_25G_OEOWorkMode, card_MC_Co_XFP2_Inserted=card_MC_Co_XFP2_Inserted, mc2_5g_sfpSmLength=mc2_5g_sfpSmLength, card_MC_Co_Port2_Down=card_MC_Co_Port2_Down, card_MC_Rmt_Acc_SFP_Removed=card_MC_Rmt_Acc_SFP_Removed, card_MC_Co_QSFP2_Removed=card_MC_Co_QSFP2_Removed, mc10GOEEXFPTunableCardTable=mc10GOEEXFPTunableCardTable, card_MC_Co_SFPSFP2_Down=card_MC_Co_SFPSFP2_Down, mcQCA8334PortIdx=mcQCA8334PortIdx, card_MC_E1T1_Rmt_AIS_Alarm=card_MC_E1T1_Rmt_AIS_Alarm, mcIP175DPortTable=mcIP175DPortTable, mc1go2o_sfpRecvPower=mc1go2o_sfpRecvPower, mc10G_OEO2R_ntwType=mc10G_OEO2R_ntwType, card_MC_Rmt_Tx_Up=card_MC_Rmt_Tx_Up, mcCm1gAccSfpTable=mcCm1gAccSfpTable, mcE1T1RmtFLoop=mcE1T1RmtFLoop, card_MC_E1_Co_Port1_CV_Normal=card_MC_E1_Co_Port1_CV_Normal, mc10GXFP1WaveLengthTunable=mc10GXFP1WaveLengthTunable, card_MC_Co_Tx_Up2=card_MC_Co_Tx_Up2, mcRmt10G_OEOHWLoopback=mcRmt10G_OEOHWLoopback, mc10G_OEECardObjects=mc10G_OEECardObjects, mcRmt4_25G_OEOWorkMode=mcRmt4_25G_OEOWorkMode, card_MC_Co_Port1_Up=card_MC_Co_Port1_Up, mc10GXFP2WaveLength=mc10GXFP2WaveLength, mc10G_OEOCardTable=mc10G_OEOCardTable, mc10G_OEE_ntwType=mc10G_OEE_ntwType, mcRmt10G_OEO_ntwType=mcRmt10G_OEO_ntwType, card_MC_Co_SFPSFP1_Up=card_MC_Co_SFPSFP1_Up, mc1GO2OPort2SFPlink=mc1GO2OPort2SFPlink, mcE1T1AISAlarm=mcE1T1AISAlarm, mcLoOrRmtFg=mcLoOrRmtFg, mc10G_OEOCfgSpdMode=mc10G_OEOCfgSpdMode, mcE1T1CardObjects=mcE1T1CardObjects, card_MC_Co_SFP3_Removed=card_MC_Co_SFP3_Removed, mcE1T1FLink=mcE1T1FLink, nmuObjects=nmuObjects, mc1go2o_sfpCopperLength=mc1go2o_sfpCopperLength, mc1GO2OPort3HWSpd=mc1GO2OPort3HWSpd, mc1GO2ORmtPort3SFPExist=mc1GO2ORmtPort3SFPExist, mc10G_OEO2R_ntwTunableType=mc10G_OEO2R_ntwTunableType, shelf_psuB_On=shelf_psuB_On, mcPmTable=mcPmTable, mcCm1gSpecificObjects=mcCm1gSpecificObjects, mc4_25G_OEO_Get_Test_Rst=mc4_25G_OEO_Get_Test_Rst, mcE1T1CardEntry=mcE1T1CardEntry, mc4_25G_OEO_Test_Continue_Time=mc4_25G_OEO_Test_Continue_Time, mcQsfpSpecificObjects=mcQsfpSpecificObjects, mc10GOEO3RCardObjects=mc10GOEO3RCardObjects, card_MC_E1_Co_Port2_AIS_Alarm=card_MC_E1_Co_Port2_AIS_Alarm, mc1GE2OCardObjects=mc1GE2OCardObjects, mc2_5g_sfpRecvPower=mc2_5g_sfpRecvPower, height2HU=height2HU, mc2_5g_sfpMmLength=mc2_5g_sfpMmLength, mc1GE2OObjects=mc1GE2OObjects, mc1GO2OObjects=mc1GO2OObjects, mcCm1gIpTable=mcCm1gIpTable, mcNtwQSfpEntry=mcNtwQSfpEntry, mc40G_OEOLane1LoopMode=mc40G_OEOLane1LoopMode, mc1go2o_sfpConnector=mc1go2o_sfpConnector, card_MC_Co_SFPSFP2_Up=card_MC_Co_SFPSFP2_Up, mcE1Port1CV=mcE1Port1CV, shelfEntry=shelfEntry, sfpBrSpeed=sfpBrSpeed, mcRmtE1SFP1Link=mcRmtE1SFP1Link, card_MC_Co_Acc_SFP_Removed=card_MC_Co_Acc_SFP_Removed, mc10G_OEOSFP2=mc10G_OEOSFP2, card_MC_E1_Co_Port2_LOS_Normal=card_MC_E1_Co_Port2_LOS_Normal, mcUpStream=mcUpStream, mc4_25G_OEOCardObjects=mc4_25G_OEOCardObjects, card_MC_Co_SFP2_Up=card_MC_Co_SFP2_Up, mc2_5g_sfpTransCode=mc2_5g_sfpTransCode, card_MC_E1_Rmt_Port2_LOS_Normal=card_MC_E1_Rmt_Port2_LOS_Normal, mc10G_OEO_Start_Test=mc10G_OEO_Start_Test, accXFP1WaveLengthTunable=accXFP1WaveLengthTunable, mcIpAddr=mcIpAddr, mc40G_OEOCardTable=mc40G_OEOCardTable, mc1GO2ORmtPort3SFPlink=mc1GO2ORmtPort3SFPlink, mcRmt4_25G_OEOLoopback=mcRmt4_25G_OEOLoopback, mcE1T1Type=mcE1T1Type, qsfpNtwRxPower3=qsfpNtwRxPower3, mc10G_OEOCardObjects=mc10G_OEOCardObjects, shelfName=shelfName, card_Lost=card_Lost, accsfpCopperLength=accsfpCopperLength, card_MC_E1_Co_Port1_LOS_Alarm=card_MC_E1_Co_Port1_LOS_Alarm, mcIP175DVlanMode=mcIP175DVlanMode, card_MC_E1T1_Rmt_TXLOS_Alarm=card_MC_E1T1_Rmt_TXLOS_Alarm, mc10G_OEESpdMode=mc10G_OEESpdMode, card_MC_Co_Fx_Up=card_MC_Co_Fx_Up, rmtCardDesc=rmtCardDesc, mcAccXFP1TunableType=mcAccXFP1TunableType, card_MC_Rmt_SFPSFP1_Removed=card_MC_Rmt_SFPSFP1_Removed, mcNtwXFP2WaveLengthTunability=mcNtwXFP2WaveLengthTunability, card_MC_Co_QSFP1_Removed=card_MC_Co_QSFP1_Removed, card_MC_Co_Ntw_SFP_Removed=card_MC_Co_Ntw_SFP_Removed, mcPmRest=mcPmRest, card_MC_Co_Acc_SFP_Inserted=card_MC_Co_Acc_SFP_Inserted, mcE1T1RmtFLossAlarm=mcE1T1RmtFLossAlarm, mc10G_OEO2RCardTable=mc10G_OEO2RCardTable, card_MC_E1_Rmt_Port2_CV_Alarm=card_MC_E1_Rmt_Port2_CV_Alarm, card_MC_Co_Port3_Up=card_MC_Co_Port3_Up, mc1GE2OPort2SFPlink=mc1GE2OPort2SFPlink, mc10GOEO3RObjects=mc10GOEO3RObjects, card_MC_E1_Co_Port1_AIS_Alarm=card_MC_E1_Co_Port1_AIS_Alarm, mc10G_OEO_Test_Result=mc10G_OEO_Test_Result, card_MC_Co_SFPSFP1_Inserted=card_MC_Co_SFPSFP1_Inserted, mc4_25G_OEOHWWorkMode=mc4_25G_OEOHWWorkMode, card_MC_Co_Tx_Down=card_MC_Co_Tx_Down, mcCWDMObjects=mcCWDMObjects, mcHWRmtCtrlMode=mcHWRmtCtrlMode, mcCfgWorkMode=mcCfgWorkMode, mcQCA8334PortEntry=mcQCA8334PortEntry, mcE1CardTable=mcE1CardTable, mc40G_OEOQsfp1Lane2_link=mc40G_OEOQsfp1Lane2_link, mc10G_OEO2RCurSpdMode=mc10G_OEO2RCurSpdMode, PYSNMP_MODULE_ID=company, mc10G_OEOLoopback=mc10G_OEOLoopback, mcE1T1FLoop=mcE1T1FLoop, card_MC_Rmt_SFP1_Up=card_MC_Rmt_SFP1_Up, mcRmt4_25G_OEOCfgSpdMode=mcRmt4_25G_OEOCfgSpdMode, card_MC_Rmt_PwrDown=card_MC_Rmt_PwrDown, mc1go2o_sfpMmLength=mc1go2o_sfpMmLength, mcQCA8334CfgWorkMode=mcQCA8334CfgWorkMode, mcRmtType=mcRmtType, mcQCA8334VlanMode=mcQCA8334VlanMode, mc4_25G_OEOHWSpdMode=mc4_25G_OEOHWSpdMode, card_MC_Co_QSFP2_Lane1_Up=card_MC_Co_QSFP2_Lane1_Up, mcE1Port2LOS=mcE1Port2LOS, mc1GO2OCardObjects=mc1GO2OCardObjects, mcRmt4_25G_OEOHWLoopback=mcRmt4_25G_OEOHWLoopback, mcRmt10G_OEO_accType=mcRmt10G_OEO_accType, mc2_5GMCObjects=mc2_5GMCObjects, mcTransceiverMode=mcTransceiverMode, mc40G_OEOObjects=mc40G_OEOObjects, mcCm1gAccSfpEntry=mcCm1gAccSfpEntry, mcIP175DCfgWorkMode=mcIP175DCfgWorkMode, mc1GE2ORmtTxlink=mc1GE2ORmtTxlink, mcE1T1RmtTLoop=mcE1T1RmtTLoop, qsfpNtwTxPower2=qsfpNtwTxPower2, mc1GO2OPort2SFPExist=mc1GO2OPort2SFPExist, ntwXFP2WaveLengthTunable=ntwXFP2WaveLengthTunable, qsfpNtwRxPower2=qsfpNtwRxPower2, card_MC_Co_XFP1_Inserted=card_MC_Co_XFP1_Inserted, mc10G_OEOSFP1=mc10G_OEOSFP1, mcQCA8334CardObjects=mcQCA8334CardObjects, card_MC_Co_QSFP1_Lane3_Up=card_MC_Co_QSFP1_Lane3_Up, mcAccQSfpTable=mcAccQSfpTable, mc10G_OEO_ntwType=mc10G_OEO_ntwType, mc40G_OEOLane3LoopMode=mc40G_OEOLane3LoopMode, card_MC_Rmt_SFP3_Removed=card_MC_Rmt_SFP3_Removed, card_MC_Co_QSFP2_Lane3_Down=card_MC_Co_QSFP2_Lane3_Down, psuA=psuA, ipProduct=ipProduct, mcFanCardTable=mcFanCardTable, mcRmtE1Port2CV=mcRmtE1Port2CV, mc40G_OEOQsfp2Lane2_link=mc40G_OEOQsfp2Lane2_link, qsfpAccRxPower2=qsfpAccRxPower2, mc10G_OEO2RCfgSpdMode=mc10G_OEO2RCfgSpdMode, card_MC_E1_Rmt_Port2_LOS_Alarm=card_MC_E1_Rmt_Port2_LOS_Alarm, card_MC_E1T1_Rmt_FXLOS_Normal=card_MC_E1T1_Rmt_FXLOS_Normal, mc2_5g_sfpCopperLength=mc2_5g_sfpCopperLength, mcCmObjects=mcCmObjects, mc10GOEEXFPTunableObjects=mc10GOEEXFPTunableObjects, mc10GOEO1RObjects=mc10GOEO1RObjects, mc40G_OEOHWSpeedMode=mc40G_OEOHWSpeedMode, getAccQSfpCmd=getAccQSfpCmd, mcQCA8334CardTable=mcQCA8334CardTable, mc10G_OEO2R_accType=mc10G_OEO2R_accType, card_MC_E1_Co_Port1_LOS_Normal=card_MC_E1_Co_Port1_LOS_Normal, card_MC_Rmt_SFPSFP1_Inserted=card_MC_Rmt_SFPSFP1_Inserted, mc2_5g_sfpTemperature=mc2_5g_sfpTemperature, mcRmtE1Txlink=mcRmtE1Txlink, accsfpTranPower=accsfpTranPower, mcE1T1CodeType=mcE1T1CodeType, sfpRecvPower=sfpRecvPower, card_MC_Co_Fx_Down=card_MC_Co_Fx_Down, card_MC_Co_QSFP2_Inserted=card_MC_Co_QSFP2_Inserted, mcRmtLFP=mcRmtLFP, card_MC_Rmt_SFP1_Inserted=card_MC_Rmt_SFP1_Inserted, accsfpTransCode=accsfpTransCode, sysLocation=sysLocation, qsfpNtwRxPower4=qsfpNtwRxPower4, card_MC_E1_Rmt_Port1_LOS_Alarm=card_MC_E1_Rmt_Port1_LOS_Alarm, sfpSmLength=sfpSmLength, cwdmWavelength6=cwdmWavelength6, mcE1TxCurWorkMode=mcE1TxCurWorkMode, mc1GE2ORmtPort1SFPExist=mc1GE2ORmtPort1SFPExist, mcCm1gIpObjects=mcCm1gIpObjects, mcRmtE1Port2Loop=mcRmtE1Port2Loop, mc10G_OEO2RCardEntry=mc10G_OEO2RCardEntry, card_MC_Co_SFP1_Up=card_MC_Co_SFP1_Up, qsfpAccTxPower2=qsfpAccTxPower2, card_MC_Rmt_XFP1_Removed=card_MC_Rmt_XFP1_Removed, qsfpAccTxPower1=qsfpAccTxPower1, qsfpAccConnector=qsfpAccConnector, mcRmtE1Port2AIS=mcRmtE1Port2AIS, card_MC_E1T1_Rmt_FXLOS_Alarm=card_MC_E1T1_Rmt_FXLOS_Alarm, card_MC_Co_QSFP1_Inserted=card_MC_Co_QSFP1_Inserted, card_MC_FAN_Normal=card_MC_FAN_Normal, mcNtwQSfpTable=mcNtwQSfpTable, mc10G_OEE_checkResult=mc10G_OEE_checkResult, card_MC_E1T1_Rmt_AIS_Normal=card_MC_E1T1_Rmt_AIS_Normal, mc2_5GMCCardEntry=mc2_5GMCCardEntry, mcE1Txlink=mcE1Txlink, mcQCA8334CardEntry=mcQCA8334CardEntry, mcRmtLoopback=mcRmtLoopback, mcQCA8334CurWorkMode=mcQCA8334CurWorkMode, card_MC_Rmt_SFP2_Down=card_MC_Rmt_SFP2_Down, card_MC_Rmt_SFP1_Removed=card_MC_Rmt_SFP1_Removed, mcRmtE1Port1LOS=mcRmtE1Port1LOS, card_MC_Co_SFP1_Inserted=card_MC_Co_SFP1_Inserted, qsfpNtwRxPower1=qsfpNtwRxPower1, mc10G_OEO2RSFP2=mc10G_OEO2RSFP2, slotTable=slotTable, mc10G_OEECardTable=mc10G_OEECardTable, mc40G_OEOLoopMode=mc40G_OEOLoopMode, mc1go2o_sfpTranPower=mc1go2o_sfpTranPower, mc1GO2ORmtPort2SFPlink=mc1GO2ORmtPort2SFPlink, mc10G_OEO2RCardObjects=mc10G_OEO2RCardObjects, mcCm1gSfpTable=mcCm1gSfpTable, mc1go2o_sfpTemperature=mc1go2o_sfpTemperature, mc1GO2ORmtPort2SFPExist=mc1GO2ORmtPort2SFPExist, card_MC_E1_Co_Port2_CV_Alarm=card_MC_E1_Co_Port2_CV_Alarm, mc1GE2OPort1SFPExist=mc1GE2OPort1SFPExist, accsfpConnector=accsfpConnector, coCardNum=coCardNum, mc1GO2OCardTable=mc1GO2OCardTable, gateway=gateway, qsfpNtwTemperature=qsfpNtwTemperature, card_MC_Rmt_XFP1_Inserted=card_MC_Rmt_XFP1_Inserted, mcIP175DRmtCurWorkMode=mcIP175DRmtCurWorkMode, mc2_5g_sfpConnector=mc2_5g_sfpConnector, mcE1T1RmtCodeType=mcE1T1RmtCodeType, mc1GO2ORmtPort1SFPExist=mc1GO2ORmtPort1SFPExist, mc4_25G_OEOCfgSpdMode=mc4_25G_OEOCfgSpdMode, card_MC_Co_SFP2_Down=card_MC_Co_SFP2_Down, sfpMmLength=sfpMmLength, mc10GOEEXFPTunableCardEntry=mc10GOEEXFPTunableCardEntry, mcIP175DPortIdx=mcIP175DPortIdx, sfpWavelength=sfpWavelength, shelf_psuA_Off=shelf_psuA_Off, card_MC_Co_Port1_Down=card_MC_Co_Port1_Down, mcRmtAccSfpExist=mcRmtAccSfpExist, mcAccXFP1WaveLength=mcAccXFP1WaveLength, card_MC_Co_QSFP2_Lane4_Up=card_MC_Co_QSFP2_Lane4_Up, card_MC_Co_QSFP2_Lane3_Up=card_MC_Co_QSFP2_Lane3_Up, mcE1CardObjects=mcE1CardObjects)
mibBuilder.exportSymbols("XXX-MIB", ntwXFP2TunableType=ntwXFP2TunableType, card_MC_Co_Ntw_SFP_Inserted=card_MC_Co_Ntw_SFP_Inserted, card_MC_E1_Rmt_Port2_AIS_Normal=card_MC_E1_Rmt_Port2_AIS_Normal, mc2_5GMCPort2link=mc2_5GMCPort2link, mc1GE2ORmtPort2SFPExist=mc1GE2ORmtPort2SFPExist, getSfpCmd=getSfpCmd, mc10G_OEO2RSFP1=mc10G_OEO2RSFP1, card_MC_Co_SFPSFP2_Removed=card_MC_Co_SFPSFP2_Removed, mc1GO2ORmtPort1SFPlink=mc1GO2ORmtPort1SFPlink, mcPmObjects=mcPmObjects, card_MC_Co_Tx_Up=card_MC_Co_Tx_Up, qsfpAccTxPower4=qsfpAccTxPower4, shelf_Lost=shelf_Lost, mcPmEntry=mcPmEntry, mc2_5Cm1gSfpEntry=mc2_5Cm1gSfpEntry, mc1GO2OSfp3Entry=mc1GO2OSfp3Entry, cwdmWavelength4=cwdmWavelength4, xfpWaveLengthTunability=xfpWaveLengthTunability, card_MC_FAN_Abnormal=card_MC_FAN_Abnormal, mc10G_OEO2RSFP2Loopback=mc10G_OEO2RSFP2Loopback, subnet=subnet, card_MC_Co_Port2_Up=card_MC_Co_Port2_Up, card_MC_Co_XFP2_Removed=card_MC_Co_XFP2_Removed, shelf_fan_On=shelf_fan_On, mcQCA8334PortObjects=mcQCA8334PortObjects, mcE1Port1Loop=mcE1Port1Loop, mc10G_OEETxlink=mc10G_OEETxlink, mc10G_OEOHWLoopback=mc10G_OEOHWLoopback, mcRmt4_25G_OEOCurSpdMode=mcRmt4_25G_OEOCurSpdMode, coCardDesc=coCardDesc, nmuConfig=nmuConfig, mc1GO2OCardEntry=mc1GO2OCardEntry, mc1go2o_sfpBrSpeed=mc1go2o_sfpBrSpeed, fan=fan, mc10GOEO3RCardEntry=mc10GOEO3RCardEntry, card_MC_Co_XFP1_Down=card_MC_Co_XFP1_Down, card_MC_Rmt_XFP1_Down=card_MC_Rmt_XFP1_Down, mcE1Objects=mcE1Objects, card_MC_Rmt_Tx_Down1=card_MC_Rmt_Tx_Down1, mcHWWorkMode=mcHWWorkMode, card_MC_Rmt_SFPSFP1_Down=card_MC_Rmt_SFPSFP1_Down, card_MC_Co_QSFP2_Lane1_Down=card_MC_Co_QSFP2_Lane1_Down, mc4_25G_OEOAccPD=mc4_25G_OEOAccPD, cwdmWavelength2=cwdmWavelength2, trapHost4=trapHost4, mcQCA8334PortTable=mcQCA8334PortTable, mcCWDMCardObjects=mcCWDMCardObjects, mcTransmitMode=mcTransmitMode, sfpTemperature=sfpTemperature, qsfpNtwConnector=qsfpNtwConnector, mc4_25G_OEOCardTable=mc4_25G_OEOCardTable, mc40G_OEOLane4LoopMode=mc40G_OEOLane4LoopMode, mcQCA8334Objects=mcQCA8334Objects, mc40G_OEOCardObjects=mc40G_OEOCardObjects, shelf_psuB_Off=shelf_psuB_Off, mcIP175DRmtTxlink=mcIP175DRmtTxlink, mc2_5g_sfpVoltage=mc2_5g_sfpVoltage, mcCWDMCardTable=mcCWDMCardTable, mcRmtTransmitMode=mcRmtTransmitMode, mcFxlink=mcFxlink, mcRxByteLo=mcRxByteLo, mc1GE2OTxlink=mc1GE2OTxlink, mc1go2o_sfpVoltage=mc1go2o_sfpVoltage, mc10GOEO1RCardObjects=mc10GOEO1RCardObjects, card_MC_E1_Rmt_Port1_CV_Alarm=card_MC_E1_Rmt_Port1_CV_Alarm, card_MC_Co_XFP1_Up=card_MC_Co_XFP1_Up, mc10G_OEEObjects=mc10G_OEEObjects, card_MC_E1T1_Co_AIS_Alarm=card_MC_E1T1_Co_AIS_Alarm, mc1go2o_sfpSmLength=mc1go2o_sfpSmLength, shelf_Detected=shelf_Detected, card_MC_Co_SFPSFP2_Inserted=card_MC_Co_SFPSFP2_Inserted, card_MC_Rmt_Acc_SFP_Inserted=card_MC_Rmt_Acc_SFP_Inserted, mcIP175DCardObjects=mcIP175DCardObjects, card_MC_Co_XFP2_Down=card_MC_Co_XFP2_Down, mcQCA8334Txlink=mcQCA8334Txlink, mcE1Port2AIS=mcE1Port2AIS, card_MC_Co_Tx_Down2=card_MC_Co_Tx_Down2, sfpTransCode=sfpTransCode, mc4_25G_OEOLoopback=mc4_25G_OEOLoopback, mcNtwXFP2TunableType=mcNtwXFP2TunableType, accsfpVoltage=accsfpVoltage, mcRmt10G_OEOSFP1=mcRmt10G_OEOSFP1, mc1GO2ORmtPort3HWSpd=mc1GO2ORmtPort3HWSpd, mc10G_OEO2RHWSFP2Loopback=mc10G_OEO2RHWSFP2Loopback, qsfpNtwTxPower3=qsfpNtwTxPower3, mc10G_OEO2RVersion=mc10G_OEO2RVersion, accsfpMmLength=accsfpMmLength, mc10G_OEO2RHWSpdMode=mc10G_OEO2RHWSpdMode, mc10G_OEO2RSFP1Loopback=mc10G_OEO2RSFP1Loopback, mc1GE2OCardEntry=mc1GE2OCardEntry, mcQCA8334RmtCurWorkMode=mcQCA8334RmtCurWorkMode, xfpTunableType=xfpTunableType, card_MC_Co_QSFP1_Lane4_Down=card_MC_Co_QSFP1_Lane4_Down, mcIP175DDownStream=mcIP175DDownStream, cardObjects=cardObjects, alarmMIB=alarmMIB, accXFP1WaveLengthTunability=accXFP1WaveLengthTunability, qsfpNtwTxPower4=qsfpNtwTxPower4, mcE1T1Objects=mcE1T1Objects, mcAccSfpExist=mcAccSfpExist, card_MC_E1_Co_Port2_AIS_Normal=card_MC_E1_Co_Port2_AIS_Normal, card_MC_E1_Co_Port2_LOS_Alarm=card_MC_E1_Co_Port2_LOS_Alarm, mcIP175DRmtCfgWorkMode=mcIP175DRmtCfgWorkMode, mc40G_OEOQsfp2Lane1_link=mc40G_OEOQsfp2Lane1_link, accsfpCompliance=accsfpCompliance, card_MC_Co_SFP3_Inserted=card_MC_Co_SFP3_Inserted)
| [] |
zhanghang1989/notedown | setup.py | b0fa1eac88d1cd7fa2261d6c454f82669e6f552b | from setuptools import setup
# create __version__
exec(open('./_version.py').read())
setup(
name="notedown",
version=__version__,
description="Convert markdown to IPython notebook.",
author="Aaron O'Leary",
author_email='dev@aaren.me',
url='http://github.com/aaren/notedown',
install_requires=['ipython', ],
entry_points={
'console_scripts': [
'notedown = notedown:cli',
],
}
)
| [((87, 387), 'setuptools.setup', 'setup', ([], {'name': '"""notedown"""', 'version': '__version__', 'description': '"""Convert markdown to IPython notebook."""', 'author': '"""Aaron O\'Leary"""', 'author_email': '"""dev@aaren.me"""', 'url': '"""http://github.com/aaren/notedown"""', 'install_requires': "['ipython']", 'entry_points': "{'console_scripts': ['notedown = notedown:cli']}"}), '(name=\'notedown\', version=__version__, description=\n \'Convert markdown to IPython notebook.\', author="Aaron O\'Leary",\n author_email=\'dev@aaren.me\', url=\'http://github.com/aaren/notedown\',\n install_requires=[\'ipython\'], entry_points={\'console_scripts\': [\n \'notedown = notedown:cli\']})\n', (92, 387), False, 'from setuptools import setup\n')] |
the-muses-ltd/Multithreaded-Webcrawler-Cassandra- | multithreaded_webcrawler.py | eee68faf3c6ecb548edd0e96ce445dcd366fb735 | # This is a reusable webcraawler architecture that can be adapted to scrape any webstie.
# RESULTS:
# Roughly 24 seconds per thousand courses scraped for ThreadPoolExecutor vs 63s for unthreaded script.
# This is a very basic implementation of multithreading in order to show the proof of concept, but is a good base to build off of.
import requests
from bs4 import BeautifulSoup
import csv
from concurrent.futures import ProcessPoolExecutor, as_completed, ThreadPoolExecutor
import time
import logging
from mitopencourseware_crawler_worker import mit_crawler
def courses_spider(max_pages):
data_to_csv = [] #holds all data to send to csv
print("Webcrawler workers have started, please wait while we finish crawling...")
# remove max pages loop (unecessary)
page = 1
while page <= max_pages:
url = 'https://ocw.mit.edu/courses/'
source_code = requests.get(url)
plain_text = source_code.text
soup = BeautifulSoup(plain_text, 'html.parser')
# Multithread only the work:
# Tuning is required to find the most efficient amount of workers in the thread pool.
with ThreadPoolExecutor(max_workers=30) as executor:
start = time.time()
futures = [ executor.submit(work, link) for link in soup.findAll('h4', {'class': 'course_title'}, limit=100) ]
data_to_csv = []
for result in as_completed(futures):
data_to_csv.append(result.result())
end = time.time()
print("Time Taken to complete: {:.6f}s".format(end-start))
print("Courses extracted: ", len(data_to_csv))
page += 1
export_to_csv(data_to_csv)
def work(link):
# replace this fucntion with the specific crawler you want to use:
return mit_crawler(link)
# Exports data to a formatted csv file, this will be replaced with multithreaded API calls to the Cassandra Prisma Database
# or on the cloud in production, it will be sent to the S3 temporary database to be picked up by the AWS Lambda funtion which will push it to the Cassandra Database
def export_to_csv(csv_data):
with open('web_crawl_data.csv',mode='w') as csv_file:
field_names = ['Title','URL extension','External Website Logo','URL(href)','Description','Course logo URL']
csv_writer = csv.DictWriter(csv_file, fieldnames=field_names)#delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
csv_writer.writeheader()
for course in csv_data:
course_data = {
'Title':course[0],
'URL extension':course[1],
'External Website Logo':course[2],
'URL(href)':course[3],
'Description':course[4],
'Course logo URL':course[5],
}
csv_writer.writerow(course_data)
| [((1782, 1799), 'mitopencourseware_crawler_worker.mit_crawler', 'mit_crawler', (['link'], {}), '(link)\n', (1793, 1799), False, 'from mitopencourseware_crawler_worker import mit_crawler\n'), ((885, 902), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (897, 902), False, 'import requests\n'), ((956, 996), 'bs4.BeautifulSoup', 'BeautifulSoup', (['plain_text', '"""html.parser"""'], {}), "(plain_text, 'html.parser')\n", (969, 996), False, 'from bs4 import BeautifulSoup\n'), ((2316, 2364), 'csv.DictWriter', 'csv.DictWriter', (['csv_file'], {'fieldnames': 'field_names'}), '(csv_file, fieldnames=field_names)\n', (2330, 2364), False, 'import csv\n'), ((1141, 1175), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {'max_workers': '(30)'}), '(max_workers=30)\n', (1159, 1175), False, 'from concurrent.futures import ProcessPoolExecutor, as_completed, ThreadPoolExecutor\n'), ((1209, 1220), 'time.time', 'time.time', ([], {}), '()\n', (1218, 1220), False, 'import time\n'), ((1399, 1420), 'concurrent.futures.as_completed', 'as_completed', (['futures'], {}), '(futures)\n', (1411, 1420), False, 'from concurrent.futures import ProcessPoolExecutor, as_completed, ThreadPoolExecutor\n'), ((1492, 1503), 'time.time', 'time.time', ([], {}), '()\n', (1501, 1503), False, 'import time\n')] |
jumblesale/genyrator | genyrator/entities/Template.py | c4429f689e92e8447b0b944e7d9b434f99cae51d | from typing import List, Optional, NewType, Tuple, NamedTuple, Type
import attr
from jinja2 import Template as JinjaTemplate, StrictUndefined
from genyrator.entities.Entity import Entity
from genyrator.path import create_relative_path
OutPath = NewType('OutPath', Tuple[List[str], str])
Import = NamedTuple('Import',
[('module_name', str),
('imports', List[str]), ])
@attr.s
class Template(object):
template_name: str = attr.ib()
template_file_name: str = attr.ib()
template_file_path: List[str] = attr.ib()
relative_path: List[str] = attr.ib()
out_path: Optional[OutPath] = attr.ib()
def create_template(self):
path = create_relative_path(
[*self.template_file_path, self.template_file_name]
)
with open(path) as f:
template = JinjaTemplate(f.read(), undefined=StrictUndefined)
return template
def render(self):
return self.create_template().render(template=self)
def create_template(
constructor,
template_path: Optional[List[str]] = None,
out_path: Optional[OutPath] = None,
**kwargs,
) -> Template:
relative_path = template_path[0:-1]
path = ['genyrator', 'templates'] + relative_path
template_name = template_path[-1]
return constructor(
template_name=template_name,
template_file_name='{}.j2'.format(template_name),
template_file_path=path,
out_path=out_path,
relative_path=relative_path,
**kwargs,
)
@attr.s
class RootInit(Template):
db_import_path: str = attr.ib()
module_name: str = attr.ib()
@attr.s
class RootSchema(Template):
module_name: str = attr.ib()
entities: List[Entity] = attr.ib()
@attr.s
class ConvertDict(Template):
module_name: str = attr.ib()
@attr.s
class SQLAlchemyModel(Template):
module_name: str = attr.ib()
db_import_path: str = attr.ib()
entity: Entity = attr.ib()
@attr.s
class ModelToDict(Template):
module_name: str = attr.ib()
@attr.s
class Config(Template):
module_name: str = attr.ib()
@attr.s
class SQLAlchemyModelInit(Template):
module_name: str = attr.ib()
db_import_path: str = attr.ib()
imports: List[Import] = attr.ib()
@attr.s
class RestplusModel(Template):
entity: Entity = attr.ib()
@attr.s
class Resource(Template):
module_name: str = attr.ib()
db_import_path: str = attr.ib()
entity: Entity = attr.ib()
restplus_template: str = attr.ib()
TypeOption: Type = attr.ib()
@attr.s
class ResourcesInit(Template):
entities: List[Entity] = attr.ib()
module_name: str = attr.ib()
api_name: str = attr.ib()
api_description: str = attr.ib()
@attr.s
class DomainModel(Template):
entity: Entity = attr.ib()
module_name: str = attr.ib()
def sqlalchemy_model_imports(self):
return list(set([
rel.target_entity_class_name
for rel in self.entity.relationships
]))
@attr.s
class ConvertProperties(Template):
module_name: str = attr.ib()
@attr.s
class ConvertModels(Template):
module_name: str = attr.ib()
@attr.s
class JoinEntities(Template):
module_name: str = attr.ib()
@attr.s
class ConvertDictToMarshmallow(Template):
module_name: str = attr.ib()
db_import_path: str = attr.ib()
@attr.s
class Fixture(Template):
db_import_path: str = attr.ib()
module_name: str = attr.ib()
entity: Entity = attr.ib()
| [((247, 288), 'typing.NewType', 'NewType', (['"""OutPath"""', 'Tuple[List[str], str]'], {}), "('OutPath', Tuple[List[str], str])\n", (254, 288), False, 'from typing import List, Optional, NewType, Tuple, NamedTuple, Type\n'), ((298, 366), 'typing.NamedTuple', 'NamedTuple', (['"""Import"""', "[('module_name', str), ('imports', List[str])]"], {}), "('Import', [('module_name', str), ('imports', List[str])])\n", (308, 366), False, 'from typing import List, Optional, NewType, Tuple, NamedTuple, Type\n'), ((492, 501), 'attr.ib', 'attr.ib', ([], {}), '()\n', (499, 501), False, 'import attr\n'), ((546, 555), 'attr.ib', 'attr.ib', ([], {}), '()\n', (553, 555), False, 'import attr\n'), ((600, 609), 'attr.ib', 'attr.ib', ([], {}), '()\n', (607, 609), False, 'import attr\n'), ((654, 663), 'attr.ib', 'attr.ib', ([], {}), '()\n', (661, 663), False, 'import attr\n'), ((708, 717), 'attr.ib', 'attr.ib', ([], {}), '()\n', (715, 717), False, 'import attr\n'), ((1683, 1692), 'attr.ib', 'attr.ib', ([], {}), '()\n', (1690, 1692), False, 'import attr\n'), ((1719, 1728), 'attr.ib', 'attr.ib', ([], {}), '()\n', (1726, 1728), False, 'import attr\n'), ((1799, 1808), 'attr.ib', 'attr.ib', ([], {}), '()\n', (1806, 1808), False, 'import attr\n'), ((1841, 1850), 'attr.ib', 'attr.ib', ([], {}), '()\n', (1848, 1850), False, 'import attr\n'), ((1913, 1922), 'attr.ib', 'attr.ib', ([], {}), '()\n', (1920, 1922), False, 'import attr\n'), ((1992, 2001), 'attr.ib', 'attr.ib', ([], {}), '()\n', (1999, 2001), False, 'import attr\n'), ((2028, 2037), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2035, 2037), False, 'import attr\n'), ((2064, 2073), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2071, 2073), False, 'import attr\n'), ((2136, 2145), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2143, 2145), False, 'import attr\n'), ((2203, 2212), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2210, 2212), False, 'import attr\n'), ((2295, 2304), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2302, 2304), False, 'import attr\n'), ((2340, 2349), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2347, 2349), False, 'import attr\n'), ((2385, 2394), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2392, 2394), False, 'import attr\n'), ((2457, 2466), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2464, 2466), False, 'import attr\n'), ((2535, 2544), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2542, 2544), False, 'import attr\n'), ((2577, 2586), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2584, 2586), False, 'import attr\n'), ((2619, 2628), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2626, 2628), False, 'import attr\n'), ((2661, 2670), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2668, 2670), False, 'import attr\n'), ((2703, 2712), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2710, 2712), False, 'import attr\n'), ((2790, 2799), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2797, 2799), False, 'import attr\n'), ((2836, 2845), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2843, 2845), False, 'import attr\n'), ((2882, 2891), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2889, 2891), False, 'import attr\n'), ((2928, 2937), 'attr.ib', 'attr.ib', ([], {}), '()\n', (2935, 2937), False, 'import attr\n'), ((3003, 3012), 'attr.ib', 'attr.ib', ([], {}), '()\n', (3010, 3012), False, 'import attr\n'), ((3039, 3048), 'attr.ib', 'attr.ib', ([], {}), '()\n', (3046, 3048), False, 'import attr\n'), ((3286, 3295), 'attr.ib', 'attr.ib', ([], {}), '()\n', (3293, 3295), False, 'import attr\n'), ((3360, 3369), 'attr.ib', 'attr.ib', ([], {}), '()\n', (3367, 3369), False, 'import attr\n'), ((3433, 3442), 'attr.ib', 'attr.ib', ([], {}), '()\n', (3440, 3442), False, 'import attr\n'), ((3521, 3530), 'attr.ib', 'attr.ib', ([], {}), '()\n', (3528, 3530), False, 'import attr\n'), ((3557, 3566), 'attr.ib', 'attr.ib', ([], {}), '()\n', (3564, 3566), False, 'import attr\n'), ((3628, 3637), 'attr.ib', 'attr.ib', ([], {}), '()\n', (3635, 3637), False, 'import attr\n'), ((3664, 3673), 'attr.ib', 'attr.ib', ([], {}), '()\n', (3671, 3673), False, 'import attr\n'), ((3700, 3709), 'attr.ib', 'attr.ib', ([], {}), '()\n', (3707, 3709), False, 'import attr\n'), ((765, 838), 'genyrator.path.create_relative_path', 'create_relative_path', (['[*self.template_file_path, self.template_file_name]'], {}), '([*self.template_file_path, self.template_file_name])\n', (785, 838), False, 'from genyrator.path import create_relative_path\n')] |
aarkwright/ableton_devices | MIDI Remote Scripts/Push2/mode_collector.py | fe5df3bbd64ccbc136bba722ba1e131a02969798 | # uncompyle6 version 3.3.5
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)]
# Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\Push2\mode_collector.py
# Compiled at: 2018-11-30 15:48:11
from __future__ import absolute_import, print_function, unicode_literals
from ableton.v2.base import listenable_property, listens, EventObject
class ModeCollector(EventObject):
def __init__(self, main_modes=None, mix_modes=None, global_mix_modes=None, device_modes=None, *a, **k):
super(ModeCollector, self).__init__(*a, **k)
self._main_modes = main_modes
self._mix_modes = mix_modes
self._global_mix_modes = global_mix_modes
self._device_modes = device_modes
self._on_selected_main_mode_changed.subject = main_modes
self._on_selected_mix_mode_changed.subject = mix_modes
self._on_selected_global_mix_mode_changed.subject = global_mix_modes
self._on_selected_device_mode_changed.subject = device_modes
@listenable_property
def main_mode(self):
return self._main_modes.selected_mode
@listens(b'selected_mode')
def _on_selected_main_mode_changed(self, mode):
self.notify_main_mode()
@listenable_property
def mix_mode(self):
return self._mix_modes.selected_mode
@listens(b'selected_mode')
def _on_selected_mix_mode_changed(self, mode):
self.notify_mix_mode()
@listenable_property
def global_mix_mode(self):
return self._global_mix_modes.selected_mode
@listens(b'selected_mode')
def _on_selected_global_mix_mode_changed(self, mode):
self.notify_global_mix_mode()
@listenable_property
def device_mode(self):
return self._device_modes.selected_mode
@listens(b'selected_mode')
def _on_selected_device_mode_changed(self, mode):
self.notify_device_mode() | [((1193, 1218), 'ableton.v2.base.listens', 'listens', (["b'selected_mode'"], {}), "(b'selected_mode')\n", (1200, 1218), False, 'from ableton.v2.base import listenable_property, listens, EventObject\n'), ((1404, 1429), 'ableton.v2.base.listens', 'listens', (["b'selected_mode'"], {}), "(b'selected_mode')\n", (1411, 1429), False, 'from ableton.v2.base import listenable_property, listens, EventObject\n'), ((1627, 1652), 'ableton.v2.base.listens', 'listens', (["b'selected_mode'"], {}), "(b'selected_mode')\n", (1634, 1652), False, 'from ableton.v2.base import listenable_property, listens, EventObject\n'), ((1856, 1881), 'ableton.v2.base.listens', 'listens', (["b'selected_mode'"], {}), "(b'selected_mode')\n", (1863, 1881), False, 'from ableton.v2.base import listenable_property, listens, EventObject\n')] |
daidaotong/SingleView | src/topicModel.py | db3249ca5afba97f750495cccbc185de88bf2287 | from gensim import corpora, models, similarities, matutils,utils
from gensim.models import KeyedVectors
import numpy as np
#Word2vec Experiment
testString = ['PAST_MEDICAL_HISTORY','PAST_SURGICAL_HISTORY','PHYSICAL_EXAMINATION']
'''
word_vectors = KeyedVectors.load_word2vec_format('~/Downloads/GoogleNews-vectors-negative300.bin', binary=True)
#model.save("file.txt")
print word_vectors.most_similar(positive=['woman', 'king'], negative=['man'])
print "******************************************************"
print word_vectors.similarity('woman', 'man')
#print word_vectors.most_similar(positive=['san_francisco'])
print word_vectors.most_similar(positive=['SURGICAL'])
#word_vectors.similarity(testString[0],testString[1])
'''
a=[1,4,3,6,3,6]
print a[:-1]
#print zip(a[:-1],a[1:])
print np.random.randn(3, 2)
| [] |
drewbitt/lightnovel-crawler | src/bots/test/test_inputs.py | fa9546ad9dcff49c75296b0b8772f6578689adcc | from base64 import decodestring as b64decode
allowed_failures = [
'https://ranobelib.me/',
'https://www.aixdzs.com/',
'https://webnovelindonesia.com/',
b64decode("aHR0cHM6Ly9jb21yYWRlbWFvLmNvbS8=".encode()).decode()
]
test_user_inputs = {
b64decode("aHR0cHM6Ly9jb21yYWRlbWFvLmNvbS8=".encode()).decode(): [
b64decode(
"aHR0cHM6Ly9jb21yYWRlbWFvLmNvbS9ub3ZlbC90c3VydWdpLW5vLWpvb3UtdG8tcmFrdWluLW5vLWtvLw==".encode()).decode()
],
'https://novelsrock.com/': [
'https://novelsrock.com/novel/the-returner/',
'kuro'
],
'http://gravitytales.com/': [
'http://gravitytales.com/posts/novel/a-dragons-curiosity'
],
'http://novelfull.com/': [
'http://novelfull.com/dungeon-defense.html',
'Sinister Ex Girlfriend',
],
'http://www.machinenoveltranslation.com/': [
'http://www.machinenoveltranslation.com/a-thought-through-eternity',
],
'http://zenithnovels.com/': [
'http://zenithnovels.com/infinity-armament/',
],
'https://anythingnovel.com/': [
'https://anythingnovel.com/novel/king-of-gods/',
],
'https://boxnovel.com/': [
'https://boxnovel.com/novel/the-rest-of-my-life-is-for-you/',
'cultivation chat',
],
'https://crescentmoon.blog/': [
'https://crescentmoon.blog/dark-blue-and-moonlight/',
],
'https://litnet.com/': [
'https://litnet.com/en/book/candy-lips-1-b106232',
'candy lips',
],
'https://lnmtl.com/': [
'https://lnmtl.com/novel/the-strongest-dan-god',
],
'https://m.chinesefantasynovels.com/': [
'https://m.chinesefantasynovels.com/3838/',
],
'https://m.novelspread.com/': [
'https://m.novelspread.com/novel/the-legend-of-the-concubine-s-daughter-minglan',
],
'https://m.romanticlovebooks.com/': [
'https://m.romanticlovebooks.com/xuanhuan/207.html',
],
'http://www.tiknovel.com/': [
'http://www.tiknovel.com/book/index?id=717',
],
'https://www.wuxiaworld.co/': [
'sword',
],
'https://m.wuxiaworld.co/': [
'https://m.wuxiaworld.co/Reincarnation-Of-The-Strongest-Sword-God/',
],
'https://meionovel.id/': [
'https://meionovel.id/novel/the-legendary-mechanic/',
],
'https://mtled-novels.com/': [
'https://mtled-novels.com/novels/great-ruler/',
'great ruler'
],
'https://bestlightnovel.com/': [
'https://bestlightnovel.com/novel_888103800',
'martial'
],
'https://novelplanet.com/': [
'https://novelplanet.com/Novel/Returning-from-the-Immortal-World',
'immortal'
],
'https://www.volarenovels.com/': [
'https://www.volarenovels.com/novel/adorable-creature-attacks',
],
'https://webnovel.online/': [
'https://webnovel.online/full-marks-hidden-marriage-pick-up-a-son-get-a-free-husband',
],
'https://www.idqidian.us/': [
'https://www.idqidian.us/novel/peerless-martial-god/'
],
'https://www.novelall.com/': [
'https://www.novelall.com/novel/Virtual-World-Close-Combat-Mage.html',
'combat'
],
'https://www.novelspread.com/': [
'https://www.novelspread.com/novel/the-legend-of-the-concubine-s-daughter-minglan'
],
'https://www.readlightnovel.org/': [
'https://www.readlightnovel.org/top-furious-doctor-soldier'
],
'https://www.romanticlovebooks.com/': [
'https://www.romanticlovebooks.com/xianxia/251.html'
],
'https://www.royalroad.com/': [
'https://www.royalroad.com/fiction/21220/mother-of-learning',
'mother'
],
'https://www.scribblehub.com/': [
'https://www.scribblehub.com/series/73550/modern-life-of-the-exalted-immortal/',
'cultivation'
],
'https://www.webnovel.com/': [
'https://www.webnovel.com/book/8212987205006305/Trial-Marriage-Husband%3A-Need-to-Work-Hard',
'martial',
],
'https://www.worldnovel.online/': [
'https://www.worldnovel.online/novel/solo-leveling/',
],
'https://www.wuxiaworld.co/': [
'https://www.wuxiaworld.co/Reincarnation-Of-The-Strongest-Sword-God/',
'sword'
],
'https://rewayat.club/': [
'https://rewayat.club/novel/almighty-sword-domain/'
],
'https://www.wuxiaworld.com/': [
'https://www.wuxiaworld.com/novel/martial-god-asura',
'martial',
],
'https://creativenovels.com/': [
'https://creativenovels.com/novel/eternal-reverence/',
],
'https://www.tapread.com/': [
'https://www.tapread.com/book/detail/80',
],
'http://www.tapread.com/': [
'http://www.tapread.com/book/detail/80',
],
'https://readnovelfull.com/': [
'https://readnovelfull.com/lord-of-all-realms.html',
'cultivation'
],
'https://myoniyonitranslations.com/': [
'https://myoniyonitranslations.com/top-management/',
'https://myoniyonitranslations.com/category/god-of-tennis',
],
'https://babelnovel.com/': [
'https://babelnovel.com/books/ceo-let-me-go',
'dazzle Good'
],
'https://wuxiaworld.online/': [
'https://wuxiaworld.online/trial-marriage-husband-need-to-work-hard',
'cultivation',
],
'https://www.novelv.com/': [
'https://www.novelv.com/0/349/'
],
'http://fullnovel.live/': [
'http://fullnovel.live/novel-a-will-eternal',
'will eternal',
],
'https://www.noveluniverse.com/': [
'https://www.noveluniverse.com/index/novel/info/id/15.html'
],
'https://novelraw.blogspot.com/': [
'https://novelraw.blogspot.com/2019/03/dragon-king-son-in-law-mtl.html'
],
'https://light-novel.online/': [
'https://light-novel.online/great-tyrannical-deity',
'tyrannical'
],
'https://www.rebirth.online/': [
'https://www.rebirth.online/novel/upside-down'
],
'https://www.jieruihao.cn/': [
'https://www.jieruihao.cn/novel/against-the-gods/',
],
'https://www.wattpad.com/': [
'https://www.wattpad.com/story/87505567-loving-mr-jerkface-%E2%9C%94%EF%B8%8F'
],
'https://novelgo.id/': [
'https://novelgo.id/novel/the-mightiest-leveling-system/'
],
'https://yukinovel.me/': [
'https://yukinovel.me/novel/the-second-coming-of-avarice/',
],
'https://www.asianhobbyist.com/': [
'https://www.asianhobbyist.com/series/that-time-i-got-reincarnated-as-a-slime/'
],
'https://kisslightnovels.info/': [
'https://kisslightnovels.info/novel/solo-leveling/'
],
'https://novelonlinefull.com/': [
'https://novelonlinefull.com/novel/abo1520855001564322110'
],
'https://www.machine-translation.org/': [
'https://www.machine-translation.org/novel/bace21c9b10d34e9/world-of-cultivation.html'
],
'https://www.fanfiction.net/': [
'https://www.fanfiction.net/s/7268451/1/Facebook-For-wizards'
],
'https://www.mtlnovel.com/': [
'https://www.mtlnovel.com/trapped-in-a-typical-idol-drama/'
],
'https://wordexcerpt.com/': [
'https://wordexcerpt.com/series/transmigration-raising-the-child-of-the-male-lead-boss/'
],
'https://www.translateindo.com/': [
'https://www.translateindo.com/demon-wang-golden-status-favoured-fei/'
],
'https://ranobelib.me/': [
'https://ranobelib.me/sozvezdie-klinka'
],
'https://novelringan.com/': [
'https://novelringan.com/series/the-most-loving-marriage-in-history-master-mus-pampered-wife/'
],
'https://wuxiaworld.site/': [
'https://wuxiaworld.site/novel/only-i-level-up/'
],
'https://id.mtlnovel.com/': [
'https://id.mtlnovel.com/the-strongest-plane-becomes-god/'
],
'https://www.shinsori.com/': [
'https://www.shinsori.com/akuyaku-reijou-ni-nanka-narimasen/'
],
'https://www.flying-lines.com/': [
'https://www.flying-lines.com/novel/one-useless-rebirth'
],
'https://book.qidian.com/': [
'https://book.qidian.com/info/1016597088'
],
'https://kiss-novel.com/': [
'https://kiss-novel.com/the-first-order'
],
'https://www.machine-translation.org/': [
'https://www.machine-translation.org/novel/a5eee127d75da0d2/long-live-summons.html'
],
'https://www.aixdzs.com/': [
'https://www.aixdzs.com/d/66/66746/'
],
'https://webnovelonline.com/': [
'https://webnovelonline.com/novel/the_anarchic_consort'
],
'https://4scanlation.com/': [
'https://4scanlation.com/tensei-shitara-slime-datta-ken-wn/'
],
'https://listnovel.com/': [
'https://listnovel.com/novel/my-sassy-crown-princess/'
],
'https://tomotranslations.com/': [
'https://tomotranslations.com/this-hero-is-invincible-but-too-cautious/'
],
'https://www.wuxialeague.com/': [
'https://www.wuxialeague.com/novel/245/'
],
'http://liberspark.com/': [
'http://liberspark.com/novel/black-irons-glory'
],
'https://webnovelindonesia.com/': [
'https://webnovelindonesia.com/nv/almighty-student'
],
'https://webnovelindonesia.com/': [
'https://webnovelindonesia.com/nv/almighty-student'
],
'http://tiknovel.com/': [
'http://tiknovel.com/book/index?id=717'
],
'http://boxnovel.org/': [
'http://boxnovel.org/novel/martial-god-asura'
]
}
| [] |
ostropunk/wikisourcesort | wikisourcesort.py | 3af2d086df0818a75b3e6c34550e2cc1382911a5 | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import re
# In[2]:
def get_excel_dict(excelfile, key=None, index_col=0, header=0):
dataframe = pd.read_excel(excelfile, index_col=index_col, header=header)
dictionary = dataframe.to_dict()
if key is None:
return dictionary
else:
return dictionary[key]
# In[3]:
def textreader(text):
'''Opens textfile and returns the content as a string'''
with open(text, 'rt', encoding="utf8") as wiki:
txtstring = wiki.read()
return txtstring
# In[44]:
def replace_from_dict(text, dictionary):
'''Replaces words in text with new words in dictionary'''
for word in dictionary:
text = text.replace(word, dictionary[word])
return text
# In[172]:
def get_ref(text):
'''
Finds references between the <ref>- and </ref>-tags
and returns them as a list of strings
'''
ref = re.findall("\<ref.+?\<\/ref\>", text)
return ref
# In[171]:
def getrefurl(ref):
'''Finds the reference url in references and returns it as a string'''
url = re.search("http.+?(?=\s|\|title=|\|titel|\}\})", ref)
url = url.group()
return url
# In[30]:
def get_domain_name(url):
'''
Finds the domain name of the reference url and
returns that name as a string.
'''
domain_name = re.search('(?<=\/\/).+?(?=\/)', url)
domain_name = domain_name.group()
if domain_name.startswith('www.'):
domain_name = domain_name.replace('www.', '')
return domain_name
# In[32]:
def update_ref_dict(ref, ref_dict, ref_counts):
refurl = getrefurl(ref)
domain_name = get_domain_name(refurl)
if refurl not in ref_dict:
if domain_name not in ref_counts:
ref_counts.update({domain_name:1})
refname = domain_name + '.' + str(ref_counts[domain_name])
else:
ref_counts[domain_name] = ref_counts[domain_name] + 1
refname = domain_name + '.' + str(ref_counts[domain_name])
ref_dict.update({refurl:{'refs': [ref], 'refname': refname, 'refurl': refurl}})
else:
if ref not in ref_dict[refurl]['refs']:
ref_dict[refurl]['refs'].append(ref)
return ref_dict, ref_counts
# In[36]:
def create_ref_dict(refs):
'''
Takes a list of references, extracts the reference url and name,
and returns a dictionary sorted on the referenceurl as key.
'''
ref_dict = {}
ref_counts = {}
for ref in refs:
ref_dict, ref_counts = update_ref_dict(ref, ref_dict, ref_counts)
return ref_dict
# In[79]:
def get_ref_tag(text):
'''
Finds references between the <ref>- and </ref>-tags
and returns them as a list of strings
'''
ref = re.findall("\<ref name\=.+?\/\>", text)
#ref = re.findall("\<ref.+?\<\/ref\>|\<ref name\=.+?\/\>", text)
#ref = re.findall("\<ref.+?(?!\"\s\/\>)\<\/ref>", text)
#ref = re.findall("\<ref.+?\<\/ref\>", text)
return set(ref)
# In[130]:
def get_spec_ref(text, ref_tag):
'''
Finds references between the <ref>- and </ref>-tags
and returns them as a list of strings
'''
#ref = re.findall("\<ref name\=.+?\/\>", text)
#ref = re.findall("\<ref.+?\<\/ref\>|\<ref name\=.+?\/\>", text)
#ref = re.findall("\<ref.+?(?!\"\s\/\>)\<\/ref>", text)
ref = re.findall(f'\<ref name\=\"{ref_tag}\"\>.+?\<\/ref\>', text)
ref = ref[0]
return ref
# In[115]:
def get_ref_tag_name(ref_tag):
ref_tag_name = re.findall('\".+\"', ref_tag)
ref_tag_name = ref_tag_name[0].replace('"', '')
return ref_tag_name
# In[136]:
def replace_tags(text):
ref_tags = get_ref_tag(text)
for tag in ref_tags:
name = get_ref_tag_name(tag)
spec_ref = get_spec_ref(text, name)
text = text.replace(tag, spec_ref)
return text
# In[49]:
def replace_countries(text):
countries = get_excel_dict('countries2.xlsx', 'Länder')
text = replace_from_dict(text, countries)
return text
# In[66]:
def replace_headers(text):
headers = {'English title':'Engelsk titel',
'Original title':'Originaltitel',
'Director(s)':'Regissör(er)',
'Country':'Land',
'School':'Skola'}
text = replace_from_dict(text, headers)
return text
# In[169]:
def reference_sorter(text):
'''
Does a bunch of stuff that should be broken out in different functions.
'''
references = get_ref(text)
reference_dict = create_ref_dict(references)
reference_list = []
reference_text = '== Referenser ==\n<references>\n'
text = text.replace('== Källor ==', '== Referenser ==')
text = text.replace('<references/>', '')
for entry in reference_dict:
for reference in reference_dict[entry]['refs']:
text = text.replace(reference, '<ref name="{}" />'.format(reference_dict[entry]['refname']))
reference_list.append('<ref name="{}">{}</ref>'.format(reference_dict[entry]['refname'], entry))
for reference in reference_list:
reference_text += reference +'\n'
reference_text += '</references>'
text = re.split('== Referenser ==', text)
text = text[0] + reference_text + text[-1]
return text
# In[134]:
def fix_wiki_entry(textfile):
with open(textfile, 'r', encoding="utf8") as txt:
text = txt.read()
text = replace_tags(text)
text = reference_sorter(text)
text = replace_countries(text)
text = replace_headers(text)
with open('new_' + textfile, 'w', encoding='utf8') as new_text:
new_text.write(text)
return text
# In[173]:
def main():
fix_wiki_entry(input('Please enter input textfile:'))
if __name__ == "__main__":
main()
| [((173, 233), 'pandas.read_excel', 'pd.read_excel', (['excelfile'], {'index_col': 'index_col', 'header': 'header'}), '(excelfile, index_col=index_col, header=header)\n', (186, 233), True, 'import pandas as pd\n'), ((947, 988), 're.findall', 're.findall', (['"""\\\\<ref.+?\\\\<\\\\/ref\\\\>"""', 'text'], {}), "('\\\\<ref.+?\\\\<\\\\/ref\\\\>', text)\n", (957, 988), False, 'import re\n'), ((1120, 1178), 're.search', 're.search', (['"""http.+?(?=\\\\s|\\\\|title=|\\\\|titel|\\\\}\\\\})"""', 'ref'], {}), "('http.+?(?=\\\\s|\\\\|title=|\\\\|titel|\\\\}\\\\})', ref)\n", (1129, 1178), False, 'import re\n'), ((1372, 1411), 're.search', 're.search', (['"""(?<=\\\\/\\\\/).+?(?=\\\\/)"""', 'url'], {}), "('(?<=\\\\/\\\\/).+?(?=\\\\/)', url)\n", (1381, 1411), False, 'import re\n'), ((2782, 2825), 're.findall', 're.findall', (['"""\\\\<ref name\\\\=.+?\\\\/\\\\>"""', 'text'], {}), "('\\\\<ref name\\\\=.+?\\\\/\\\\>', text)\n", (2792, 2825), False, 'import re\n'), ((3379, 3443), 're.findall', 're.findall', (['f"""\\\\<ref name\\\\="{ref_tag}"\\\\>.+?\\\\<\\\\/ref\\\\>"""', 'text'], {}), '(f\'\\\\<ref name\\\\="{ref_tag}"\\\\>.+?\\\\<\\\\/ref\\\\>\', text)\n', (3389, 3443), False, 'import re\n'), ((3537, 3564), 're.findall', 're.findall', (['"""".+\\""""', 'ref_tag'], {}), '(\'".+"\', ref_tag)\n', (3547, 3564), False, 'import re\n'), ((5188, 5222), 're.split', 're.split', (['"""== Referenser =="""', 'text'], {}), "('== Referenser ==', text)\n", (5196, 5222), False, 'import re\n')] |
me-anton/radar-app | backend/radar/engine/body_objects.py | cc7d1e876e0ce9b6173b6d7b484d5553e247166e | import logging
import json
from dataclasses import dataclass
from redis import Redis
from typing import Iterable, Tuple, List, Iterator, Union, Dict
from typing_extensions import TypedDict
from backend import settings
from caching.scripts import RedisScriptsPool
from share.metaclasses import Singleton
from radar.models import AlienBody
from radar.validation import validate_body_str_profile
logger = logging.getLogger(__name__)
BodiesUpdate = TypedDict('BodiesUpdate', {'dropped_keys': List[str],
'new_records': Dict[str, str]})
@dataclass(frozen=True)
class BodyObject:
key: str
matrix: List[List[str]]
width: int
height: int
@staticmethod
def generate(key: str, body: str) -> 'BodyObject':
line_list = body.splitlines()
matrix = [list(line) for line in line_list]
return BodyObject(key=key, matrix=matrix,
width=len(matrix[0]), height=len(matrix))
class BodyObjectsPool(metaclass=Singleton):
"""
An object for getting BodyObject instances from database or cache
"""
body_key_prefix = 'body:'
body_lookup_pattern = body_key_prefix + '*'
body_expiration = 10 # in seconds
def __init__(self, num_of_default_bodies=3):
self.num_of_default_bodies = num_of_default_bodies
self.__default_bodies: Tuple[BodyObject, ...] = \
self._generate_defaults(num_of_default_bodies)
self._redis = Redis(host=settings.REDIS_HOSTNAME)
self._scripts = RedisScriptsPool()
def add_body(self, body: Union[str, bytes], body_id: str) -> None:
"""Cache the requested body string in Redis db"""
validate_body_str_profile(body)
key = self.make_body_key(body_id)
self._redis.set(key, body, self.body_expiration)
def ping_body(self, body_id: str):
"""Reset expiration time of a body"""
key = self.make_body_key(body_id)
self._redis.expire(key, self.body_expiration)
def update_bodies(self, known_bodies_keys: Iterable[str],
max_capacity: int) -> BodiesUpdate:
"""
Give update on state of body objects' records in Redis db
:param known_bodies_keys: redis keys of already known bodies
:param max_capacity: maximum relevant for requester number of bodies
including already known ones
"""
return json.loads(
self._scripts.update_records(keys=known_bodies_keys,
args=[max_capacity,
self.body_lookup_pattern])
)
def make_body_key(self, body_id: str):
return self.body_key_prefix + body_id
@property
def first(self):
return self._get_default(0)
@property
def second(self):
return self._get_default(1)
@property
def third(self):
return self._get_default(2)
def _get_default(self, index) -> BodyObject:
return self.__default_bodies[index]
@staticmethod
def _generate_defaults(num_of_defaults):
logger.info('Generating default bodies')
query = AlienBody.objects.filter(id__lte=num_of_defaults)
return tuple(BodyObject.generate(str(body.id), body.body_str)
for body in query)
| [((405, 432), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (422, 432), False, 'import logging\n'), ((448, 538), 'typing_extensions.TypedDict', 'TypedDict', (['"""BodiesUpdate"""', "{'dropped_keys': List[str], 'new_records': Dict[str, str]}"], {}), "('BodiesUpdate', {'dropped_keys': List[str], 'new_records': Dict[\n str, str]})\n", (457, 538), False, 'from typing_extensions import TypedDict\n'), ((579, 601), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (588, 601), False, 'from dataclasses import dataclass\n'), ((1474, 1509), 'redis.Redis', 'Redis', ([], {'host': 'settings.REDIS_HOSTNAME'}), '(host=settings.REDIS_HOSTNAME)\n', (1479, 1509), False, 'from redis import Redis\n'), ((1534, 1552), 'caching.scripts.RedisScriptsPool', 'RedisScriptsPool', ([], {}), '()\n', (1550, 1552), False, 'from caching.scripts import RedisScriptsPool\n'), ((1691, 1722), 'radar.validation.validate_body_str_profile', 'validate_body_str_profile', (['body'], {}), '(body)\n', (1716, 1722), False, 'from radar.validation import validate_body_str_profile\n'), ((3165, 3214), 'radar.models.AlienBody.objects.filter', 'AlienBody.objects.filter', ([], {'id__lte': 'num_of_defaults'}), '(id__lte=num_of_defaults)\n', (3189, 3214), False, 'from radar.models import AlienBody\n')] |
benzkji/djangocms-baseplugins | djangocms_baseplugins/spacer/cms_plugins.py | 7f041a030ed93dcdec70e4ca777b841846b8f2f2 | # coding: utf-8
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django import forms
from django.utils.translation import ugettext_lazy as _
from djangocms_baseplugins.baseplugin import defaults
from djangocms_baseplugins.baseplugin.cms_plugins import BasePluginMixin
from djangocms_baseplugins.baseplugin.utils import get_fields_from_fieldsets, get_baseplugin_widgets
from . import conf
from .models import Spacer
class SpacerPluginForm(forms.ModelForm):
class Meta:
model = Spacer
fields = get_fields_from_fieldsets(conf.FIELDSETS)
# exclude = []
widgets = get_baseplugin_widgets(conf)
class SpacerPlugin(BasePluginMixin, CMSPluginBase):
model = Spacer
form = SpacerPluginForm
module = defaults.SPECIAL_LABEL
name = _(u'Spacer')
render_template = "djangocms_baseplugins/spacer.html"
fieldsets = conf.FIELDSETS
plugin_pool.register_plugin(SpacerPlugin)
| [((917, 958), 'cms.plugin_pool.plugin_pool.register_plugin', 'plugin_pool.register_plugin', (['SpacerPlugin'], {}), '(SpacerPlugin)\n', (944, 958), False, 'from cms.plugin_pool import plugin_pool\n'), ((813, 825), 'django.utils.translation.ugettext_lazy', '_', (['u"""Spacer"""'], {}), "(u'Spacer')\n", (814, 825), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((553, 594), 'djangocms_baseplugins.baseplugin.utils.get_fields_from_fieldsets', 'get_fields_from_fieldsets', (['conf.FIELDSETS'], {}), '(conf.FIELDSETS)\n', (578, 594), False, 'from djangocms_baseplugins.baseplugin.utils import get_fields_from_fieldsets, get_baseplugin_widgets\n'), ((636, 664), 'djangocms_baseplugins.baseplugin.utils.get_baseplugin_widgets', 'get_baseplugin_widgets', (['conf'], {}), '(conf)\n', (658, 664), False, 'from djangocms_baseplugins.baseplugin.utils import get_fields_from_fieldsets, get_baseplugin_widgets\n')] |
sdaxen/python_utilities | python_utilities/plotting/util.py | 7b9d6cc21bfc31be83629d2ac02b27e886ebc2bb | """Utility functions for plotting.
Author: Seth Axen
E-mail: seth.axen@gmail.com"""
from collections import deque
import numpy as np
def rgb_to_hsv(rgb):
"""Convert RGB colors to HSV colors."""
r, g, b = tuple(map(float, rgb))
if any([r > 1, g > 1, b > 1]):
r /= 255.
g /= 255.
b /= 255.
mmax = max(r, g, b)
mmin = min(r, g, b)
c = mmax - mmin
if (c == 0.):
hp = 0.
elif (mmax == r):
hp = ((g - b) / c) % 6
elif (mmax == g):
hp = ((b - r) / c) + 2
elif (mmax == b):
hp = ((r - g) / c) + 4
h = 60 * hp
v = mmax
if (c == 0):
s = 0
else:
s = c / v
return (h, s, v)
def hsv_to_rgb(hsv):
"""Convert HSV colors to RGB colors."""
h, s, v = tuple(map(float, hsv))
c = v * s
m = v - c
hp = h / 60.
x = c * (1. - abs((hp % 2) - 1.))
hp = int(hp)
rgb = deque((c + m, x + m, m))
if (hp % 2):
rgb.reverse()
rgb.rotate((hp - 3) / 2)
else:
rgb.rotate(hp / 2)
return tuple(rgb)
def rgb_to_yuv(rgb):
"""Convert RGB colors to Y'UV colors, useful for comparison."""
rgbv = np.array(rgb).reshape(3, 1)
if np.any(rgbv > 1.):
rgbv = rgbv / 255.
yuv = np.dot(np.array([[ .299, .587, .114],
[-.14713, -.28886, .436],
[ .615, -.51499, -.10001]], dtype=np.double),
rgbv)
return list(yuv)
def yuv_to_rgb(yuv):
"""Convert Y'UV colors to RGB colors."""
yuvv = np.array(yuv).reshape(3, 1)
rgb = np.dot(np.array([[1., 0., 1.13983],
[1., -.39465, -.58060],
[1., 2.03211, 0.]], dtype=np.double),
yuvv)
return list(rgb)
def compute_yuv_dist(rgb1, rgb2):
"""Compute Euclidean Y'UV distance between RGB colors."""
yuv1 = rgb_to_yuv(rgb1)
yuv2 = rgb_to_yuv(rgb2)
return float(sum((np.array(yuv1) - np.array(yuv2))**2)**.5)
def lighten_rgb(rgb, p=0.):
"""Lighten RGB colors by percentage p of total."""
h, s, v = rgb_to_hsv(rgb)
hsv = (h, s, min(1, v + p))
return hsv_to_rgb(hsv)
| [((912, 936), 'collections.deque', 'deque', (['(c + m, x + m, m)'], {}), '((c + m, x + m, m))\n', (917, 936), False, 'from collections import deque\n'), ((1205, 1223), 'numpy.any', 'np.any', (['(rgbv > 1.0)'], {}), '(rgbv > 1.0)\n', (1211, 1223), True, 'import numpy as np\n'), ((1268, 1381), 'numpy.array', 'np.array', (['[[0.299, 0.587, 0.114], [-0.14713, -0.28886, 0.436], [0.615, -0.51499, -\n 0.10001]]'], {'dtype': 'np.double'}), '([[0.299, 0.587, 0.114], [-0.14713, -0.28886, 0.436], [0.615, -\n 0.51499, -0.10001]], dtype=np.double)\n', (1276, 1381), True, 'import numpy as np\n'), ((1602, 1702), 'numpy.array', 'np.array', (['[[1.0, 0.0, 1.13983], [1.0, -0.39465, -0.5806], [1.0, 2.03211, 0.0]]'], {'dtype': 'np.double'}), '([[1.0, 0.0, 1.13983], [1.0, -0.39465, -0.5806], [1.0, 2.03211, 0.0\n ]], dtype=np.double)\n', (1610, 1702), True, 'import numpy as np\n'), ((1170, 1183), 'numpy.array', 'np.array', (['rgb'], {}), '(rgb)\n', (1178, 1183), True, 'import numpy as np\n'), ((1557, 1570), 'numpy.array', 'np.array', (['yuv'], {}), '(yuv)\n', (1565, 1570), True, 'import numpy as np\n'), ((1972, 1986), 'numpy.array', 'np.array', (['yuv1'], {}), '(yuv1)\n', (1980, 1986), True, 'import numpy as np\n'), ((1989, 2003), 'numpy.array', 'np.array', (['yuv2'], {}), '(yuv2)\n', (1997, 2003), True, 'import numpy as np\n')] |
confluentinc/utils-core | tests/test_process.py | 6001b4c61f7d923d273a23dc5a1580e0fa277d2c | import pytest
from utils.process import run, silent_run, RunError
from utils.fs import in_temp_dir
def test_run(capsys):
with in_temp_dir():
assert run('echo hello > hello.txt; echo world >> hello.txt', shell=True)
out = run('ls', return_output=True)
assert out == 'hello.txt\n'
out = run(['cat', 'hello.txt'], return_output=True)
assert out == 'hello\nworld\n'
with pytest.raises(RunError):
run('blah')
assert not run('blah', raises=False)
assert silent_run('ls -l')
out, _ = capsys.readouterr()
assert out == ''
| [((133, 146), 'utils.fs.in_temp_dir', 'in_temp_dir', ([], {}), '()\n', (144, 146), False, 'from utils.fs import in_temp_dir\n'), ((163, 229), 'utils.process.run', 'run', (['"""echo hello > hello.txt; echo world >> hello.txt"""'], {'shell': '(True)'}), "('echo hello > hello.txt; echo world >> hello.txt', shell=True)\n", (166, 229), False, 'from utils.process import run, silent_run, RunError\n'), ((245, 274), 'utils.process.run', 'run', (['"""ls"""'], {'return_output': '(True)'}), "('ls', return_output=True)\n", (248, 274), False, 'from utils.process import run, silent_run, RunError\n'), ((326, 371), 'utils.process.run', 'run', (["['cat', 'hello.txt']"], {'return_output': '(True)'}), "(['cat', 'hello.txt'], return_output=True)\n", (329, 371), False, 'from utils.process import run, silent_run, RunError\n'), ((536, 555), 'utils.process.silent_run', 'silent_run', (['"""ls -l"""'], {}), "('ls -l')\n", (546, 555), False, 'from utils.process import run, silent_run, RunError\n'), ((425, 448), 'pytest.raises', 'pytest.raises', (['RunError'], {}), '(RunError)\n', (438, 448), False, 'import pytest\n'), ((462, 473), 'utils.process.run', 'run', (['"""blah"""'], {}), "('blah')\n", (465, 473), False, 'from utils.process import run, silent_run, RunError\n'), ((494, 519), 'utils.process.run', 'run', (['"""blah"""'], {'raises': '(False)'}), "('blah', raises=False)\n", (497, 519), False, 'from utils.process import run, silent_run, RunError\n')] |
areaweb/bokeh | bokeh/client/util.py | 9d131e45d626a912e85aee5b2647139c194dc893 | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Internal utility functions used by ``bokeh.client``
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
from bokeh.util.api import public, internal ; public, internal
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Public API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Internal API
#-----------------------------------------------------------------------------
@internal((1,0,0))
def server_url_for_websocket_url(url):
''' Convert an ``ws(s)`` URL for a Bokeh server into the appropriate
``http(s)`` URL for the websocket endpoint.
Args:
url (str):
An ``ws(s)`` URL ending in ``/ws``
Returns:
str:
The corresponding ``http(s)`` URL.
Raises:
ValueError:
If the input URL is not of the proper form.
'''
if url.startswith("ws:"):
reprotocoled = "http" + url[2:]
elif url.startswith("wss:"):
reprotocoled = "https" + url[3:]
else:
raise ValueError("URL has non-websocket protocol " + url)
if not reprotocoled.endswith("/ws"):
raise ValueError("websocket URL does not end in /ws")
return reprotocoled[:-2]
@internal((1,0,0))
def websocket_url_for_server_url(url):
''' Convert an ``http(s)`` URL for a Bokeh server websocket endpoint into
the appropriate ``ws(s)`` URL
Args:
url (str):
An ``http(s)`` URL
Returns:
str:
The corresponding ``ws(s)`` URL ending in ``/ws``
Raises:
ValueError:
If the input URL is not of the proper form.
'''
if url.startswith("http:"):
reprotocoled = "ws" + url[4:]
elif url.startswith("https:"):
reprotocoled = "wss" + url[5:]
else:
raise ValueError("URL has unknown protocol " + url)
if reprotocoled.endswith("/"):
return reprotocoled + "ws"
else:
return reprotocoled + "/ws"
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| [((686, 713), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (703, 713), False, 'import logging\n'), ((1543, 1562), 'bokeh.util.api.internal', 'internal', (['(1, 0, 0)'], {}), '((1, 0, 0))\n', (1551, 1562), False, 'from bokeh.util.api import public, internal\n'), ((2324, 2343), 'bokeh.util.api.internal', 'internal', (['(1, 0, 0)'], {}), '((1, 0, 0))\n', (2332, 2343), False, 'from bokeh.util.api import public, internal\n')] |
PPTMiao/mtl-ssl | slim/nets/inception_resnet_v2.py | b61449c3f902414304657de6ec217077e441a6b9 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains the definition of the Inception Resnet V2 architecture.
As described in http://arxiv.org/abs/1602.07261.
Inception-v4, Inception-ResNet and the Impact of Residual Connections
on Learning
Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
def block35(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 35x35 resnet block."""
with tf.variable_scope(scope, 'Block35', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 32, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 32, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 32, 3, scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
tower_conv2_0 = slim.conv2d(net, 32, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2_0, 48, 3, scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 64, 3, scope='Conv2d_0c_3x3')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_1, tower_conv2_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
def block17(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 17x17 resnet block."""
with tf.variable_scope(scope, 'Block17', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 192, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 128, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 160, [1, 7],
scope='Conv2d_0b_1x7')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 192, [7, 1],
scope='Conv2d_0c_7x1')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
def block8(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 8x8 resnet block."""
with tf.variable_scope(scope, 'Block8', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 192, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 192, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 224, [1, 3],
scope='Conv2d_0b_1x3')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 256, [3, 1],
scope='Conv2d_0c_3x1')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
def inception_resnet_v2_base(inputs,
final_endpoint='Conv2d_7b_1x1',
output_stride=16,
align_feature_maps=False,
scope=None):
"""Inception model from http://arxiv.org/abs/1602.07261.
Constructs an Inception Resnet v2 network from inputs to the given final
endpoint. This method can construct the network up to the final inception
block Conv2d_7b_1x1.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
final_endpoint: specifies the endpoint to construct the network up to. It
can be one of ['Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3',
'MaxPool_3a_3x3', 'Conv2d_3b_1x1', 'Conv2d_4a_3x3', 'MaxPool_5a_3x3',
'Mixed_5b', 'Mixed_6a', 'PreAuxLogits', 'Mixed_7a', 'Conv2d_7b_1x1']
output_stride: A scalar that specifies the requested ratio of input to
output spatial resolution. Only supports 8 and 16.
align_feature_maps: When true, changes all the VALID paddings in the network
to SAME padding so that the feature maps are aligned.
scope: Optional variable_scope.
Returns:
tensor_out: output tensor corresponding to the final_endpoint.
end_points: a set of activations for external use, for example summaries or
losses.
Raises:
ValueError: if final_endpoint is not set to one of the predefined values,
or if the output_stride is not 8 or 16, or if the output_stride is 8 and
we request an end point after 'PreAuxLogits'.
"""
if output_stride != 8 and output_stride != 16:
raise ValueError('output_stride must be 8 or 16.')
padding = 'SAME' if align_feature_maps else 'VALID'
end_points = {}
def add_and_check_final(name, net):
end_points[name] = net
return name == final_endpoint
with tf.variable_scope(scope, 'InceptionResnetV2', [inputs]):
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
# 149 x 149 x 32
net = slim.conv2d(inputs, 32, 3, stride=2, padding=padding,
scope='Conv2d_1a_3x3')
if add_and_check_final('Conv2d_1a_3x3', net): return net, end_points
# 147 x 147 x 32
net = slim.conv2d(net, 32, 3, padding=padding,
scope='Conv2d_2a_3x3')
if add_and_check_final('Conv2d_2a_3x3', net): return net, end_points
# 147 x 147 x 64
net = slim.conv2d(net, 64, 3, scope='Conv2d_2b_3x3')
if add_and_check_final('Conv2d_2b_3x3', net): return net, end_points
# 73 x 73 x 64
net = slim.max_pool2d(net, 3, stride=2, padding=padding,
scope='MaxPool_3a_3x3')
if add_and_check_final('MaxPool_3a_3x3', net): return net, end_points
# 73 x 73 x 80
net = slim.conv2d(net, 80, 1, padding=padding,
scope='Conv2d_3b_1x1')
if add_and_check_final('Conv2d_3b_1x1', net): return net, end_points
# 71 x 71 x 192
net = slim.conv2d(net, 192, 3, padding=padding,
scope='Conv2d_4a_3x3')
if add_and_check_final('Conv2d_4a_3x3', net): return net, end_points
# 35 x 35 x 192
net = slim.max_pool2d(net, 3, stride=2, padding=padding,
scope='MaxPool_5a_3x3')
if add_and_check_final('MaxPool_5a_3x3', net): return net, end_points
# 35 x 35 x 320
with tf.variable_scope('Mixed_5b'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 96, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 48, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 64, 5,
scope='Conv2d_0b_5x5')
with tf.variable_scope('Branch_2'):
tower_conv2_0 = slim.conv2d(net, 64, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2_0, 96, 3,
scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 96, 3,
scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
tower_pool = slim.avg_pool2d(net, 3, stride=1, padding='SAME',
scope='AvgPool_0a_3x3')
tower_pool_1 = slim.conv2d(tower_pool, 64, 1,
scope='Conv2d_0b_1x1')
net = tf.concat(
[tower_conv, tower_conv1_1, tower_conv2_2, tower_pool_1], 3)
if add_and_check_final('Mixed_5b', net): return net, end_points
# TODO(alemi): Register intermediate endpoints
net = slim.repeat(net, 10, block35, scale=0.17)
# 17 x 17 x 1088 if output_stride == 8,
# 33 x 33 x 1088 if output_stride == 16
use_atrous = output_stride == 8
with tf.variable_scope('Mixed_6a'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 384, 3, stride=1 if use_atrous else 2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 256, 3,
scope='Conv2d_0b_3x3')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 384, 3,
stride=1 if use_atrous else 2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
tower_pool = slim.max_pool2d(net, 3, stride=1 if use_atrous else 2,
padding=padding,
scope='MaxPool_1a_3x3')
net = tf.concat([tower_conv, tower_conv1_2, tower_pool], 3)
if add_and_check_final('Mixed_6a', net): return net, end_points
# TODO(alemi): register intermediate endpoints
with slim.arg_scope([slim.conv2d], rate=2 if use_atrous else 1):
net = slim.repeat(net, 20, block17, scale=0.10)
if add_and_check_final('PreAuxLogits', net): return net, end_points
if output_stride == 8:
# TODO(gpapan): Properly support output_stride for the rest of the net.
raise ValueError('output_stride==8 is only supported up to the '
'PreAuxlogits end_point for now.')
# 8 x 8 x 2080
with tf.variable_scope('Mixed_7a'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv_1 = slim.conv2d(tower_conv, 384, 3, stride=2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
tower_conv1 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1, 288, 3, stride=2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
tower_conv2 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2, 288, 3,
scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 320, 3, stride=2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_3'):
tower_pool = slim.max_pool2d(net, 3, stride=2,
padding=padding,
scope='MaxPool_1a_3x3')
net = tf.concat(
[tower_conv_1, tower_conv1_1, tower_conv2_2, tower_pool], 3)
if add_and_check_final('Mixed_7a', net): return net, end_points
# TODO(alemi): register intermediate endpoints
net = slim.repeat(net, 9, block8, scale=0.20)
net = block8(net, activation_fn=None)
# 8 x 8 x 1536
net = slim.conv2d(net, 1536, 1, scope='Conv2d_7b_1x1')
if add_and_check_final('Conv2d_7b_1x1', net): return net, end_points
raise ValueError('final_endpoint (%s) not recognized', final_endpoint)
def inception_resnet_v2(inputs, num_classes=1001, is_training=True,
dropout_keep_prob=0.8,
reuse=None,
scope='InceptionResnetV2',
create_aux_logits=True):
"""Creates the Inception Resnet V2 model.
Args:
inputs: a 4-D tensor of size [batch_size, height, width, 3].
num_classes: number of predicted classes.
is_training: whether is training or not.
dropout_keep_prob: float, the fraction to keep before final layer.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
create_aux_logits: Whether to include the auxilliary logits.
Returns:
logits: the logits outputs of the model.
end_points: the set of end_points from the inception model.
"""
end_points = {}
with tf.variable_scope(scope, 'InceptionResnetV2', [inputs, num_classes],
reuse=reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
net, end_points = inception_resnet_v2_base(inputs, scope=scope)
if create_aux_logits:
with tf.variable_scope('AuxLogits'):
aux = end_points['PreAuxLogits']
aux = slim.avg_pool2d(aux, 5, stride=3, padding='VALID',
scope='Conv2d_1a_3x3')
aux = slim.conv2d(aux, 128, 1, scope='Conv2d_1b_1x1')
aux = slim.conv2d(aux, 768, aux.get_shape()[1:3],
padding='VALID', scope='Conv2d_2a_5x5')
aux = slim.flatten(aux)
aux = slim.fully_connected(aux, num_classes, activation_fn=None,
scope='Logits')
end_points['AuxLogits'] = aux
with tf.variable_scope('Logits'):
net = slim.avg_pool2d(net, net.get_shape()[1:3], padding='VALID',
scope='AvgPool_1a_8x8')
net = slim.flatten(net)
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='Dropout')
end_points['PreLogitsFlatten'] = net
logits = slim.fully_connected(net, num_classes, activation_fn=None,
scope='Logits')
end_points['Logits'] = logits
end_points['Predictions'] = tf.nn.softmax(logits, name='Predictions')
return logits, end_points
inception_resnet_v2.default_image_size = 299
def inception_resnet_v2_arg_scope(weight_decay=0.00004,
batch_norm_decay=0.9997,
batch_norm_epsilon=0.001,
trainable=True):
"""Returns the scope with the default parameters for inception_resnet_v2.
Args:
weight_decay: the weight decay for weights variables.
batch_norm_decay: decay for the moving average of batch_norm momentums.
batch_norm_epsilon: small float added to variance to avoid dividing by zero.
Returns:
a arg_scope with the parameters needed for inception_resnet_v2.
"""
# Set weight_decay for weights in conv2d and fully_connected layers.
with slim.arg_scope([slim.conv2d, slim.fully_connected],
weights_regularizer=slim.l2_regularizer(weight_decay),
biases_regularizer=slim.l2_regularizer(weight_decay),
trainable=trainable):
batch_norm_params = {
'decay': batch_norm_decay,
'epsilon': batch_norm_epsilon,
'trainable': trainable
}
# Set activation_fn and parameters for batch_norm.
with slim.arg_scope([slim.conv2d], activation_fn=tf.nn.relu,
normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params) as scope:
return scope
| [((1247, 1302), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope', '"""Block35"""', '[net]'], {'reuse': 'reuse'}), "(scope, 'Block35', [net], reuse=reuse)\n", (1264, 1302), True, 'import tensorflow as tf\n'), ((1874, 1942), 'tensorflow.concat', 'tf.concat', ([], {'axis': '(3)', 'values': '[tower_conv, tower_conv1_1, tower_conv2_2]'}), '(axis=3, values=[tower_conv, tower_conv1_1, tower_conv2_2])\n', (1883, 1942), True, 'import tensorflow as tf\n'), ((2290, 2345), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope', '"""Block17"""', '[net]'], {'reuse': 'reuse'}), "(scope, 'Block17', [net], reuse=reuse)\n", (2307, 2345), True, 'import tensorflow as tf\n'), ((2811, 2864), 'tensorflow.concat', 'tf.concat', ([], {'axis': '(3)', 'values': '[tower_conv, tower_conv1_2]'}), '(axis=3, values=[tower_conv, tower_conv1_2])\n', (2820, 2864), True, 'import tensorflow as tf\n'), ((3209, 3263), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope', '"""Block8"""', '[net]'], {'reuse': 'reuse'}), "(scope, 'Block8', [net], reuse=reuse)\n", (3226, 3263), True, 'import tensorflow as tf\n'), ((3729, 3782), 'tensorflow.concat', 'tf.concat', ([], {'axis': '(3)', 'values': '[tower_conv, tower_conv1_2]'}), '(axis=3, values=[tower_conv, tower_conv1_2])\n', (3738, 3782), True, 'import tensorflow as tf\n'), ((5864, 5919), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope', '"""InceptionResnetV2"""', '[inputs]'], {}), "(scope, 'InceptionResnetV2', [inputs])\n", (5881, 5919), True, 'import tensorflow as tf\n'), ((13314, 13400), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope', '"""InceptionResnetV2"""', '[inputs, num_classes]'], {'reuse': 'reuse'}), "(scope, 'InceptionResnetV2', [inputs, num_classes], reuse=\n reuse)\n", (13331, 13400), True, 'import tensorflow as tf\n'), ((1313, 1342), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_0"""'], {}), "('Branch_0')\n", (1330, 1342), True, 'import tensorflow as tf\n'), ((1416, 1445), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_1"""'], {}), "('Branch_1')\n", (1433, 1445), True, 'import tensorflow as tf\n'), ((1604, 1633), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_2"""'], {}), "('Branch_2')\n", (1621, 1633), True, 'import tensorflow as tf\n'), ((2356, 2385), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_0"""'], {}), "('Branch_0')\n", (2373, 2385), True, 'import tensorflow as tf\n'), ((2460, 2489), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_1"""'], {}), "('Branch_1')\n", (2477, 2489), True, 'import tensorflow as tf\n'), ((3274, 3303), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_0"""'], {}), "('Branch_0')\n", (3291, 3303), True, 'import tensorflow as tf\n'), ((3378, 3407), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_1"""'], {}), "('Branch_1')\n", (3395, 3407), True, 'import tensorflow as tf\n'), ((7465, 7494), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Mixed_5b"""'], {}), "('Mixed_5b')\n", (7482, 7494), True, 'import tensorflow as tf\n'), ((8514, 8584), 'tensorflow.concat', 'tf.concat', (['[tower_conv, tower_conv1_1, tower_conv2_2, tower_pool_1]', '(3)'], {}), '([tower_conv, tower_conv1_1, tower_conv2_2, tower_pool_1], 3)\n', (8523, 8584), True, 'import tensorflow as tf\n'), ((8919, 8948), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Mixed_6a"""'], {}), "('Mixed_6a')\n", (8936, 8948), True, 'import tensorflow as tf\n'), ((9924, 9977), 'tensorflow.concat', 'tf.concat', (['[tower_conv, tower_conv1_2, tower_pool]', '(3)'], {}), '([tower_conv, tower_conv1_2, tower_pool], 3)\n', (9933, 9977), True, 'import tensorflow as tf\n'), ((10580, 10609), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Mixed_7a"""'], {}), "('Mixed_7a')\n", (10597, 10609), True, 'import tensorflow as tf\n'), ((11865, 11935), 'tensorflow.concat', 'tf.concat', (['[tower_conv_1, tower_conv1_1, tower_conv2_2, tower_pool]', '(3)'], {}), '([tower_conv_1, tower_conv1_1, tower_conv2_2, tower_pool], 3)\n', (11874, 11935), True, 'import tensorflow as tf\n'), ((14254, 14281), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Logits"""'], {}), "('Logits')\n", (14271, 14281), True, 'import tensorflow as tf\n'), ((14814, 14855), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {'name': '"""Predictions"""'}), "(logits, name='Predictions')\n", (14827, 14855), True, 'import tensorflow as tf\n'), ((7509, 7538), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_0"""'], {}), "('Branch_0')\n", (7526, 7538), True, 'import tensorflow as tf\n'), ((7620, 7649), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_1"""'], {}), "('Branch_1')\n", (7637, 7649), True, 'import tensorflow as tf\n'), ((7858, 7887), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_2"""'], {}), "('Branch_2')\n", (7875, 7887), True, 'import tensorflow as tf\n'), ((8217, 8246), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_3"""'], {}), "('Branch_3')\n", (8234, 8246), True, 'import tensorflow as tf\n'), ((8963, 8992), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_0"""'], {}), "('Branch_0')\n", (8980, 8992), True, 'import tensorflow as tf\n'), ((9196, 9225), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_1"""'], {}), "('Branch_1')\n", (9213, 9225), True, 'import tensorflow as tf\n'), ((9682, 9711), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_2"""'], {}), "('Branch_2')\n", (9699, 9711), True, 'import tensorflow as tf\n'), ((10624, 10653), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_0"""'], {}), "('Branch_0')\n", (10641, 10653), True, 'import tensorflow as tf\n'), ((10920, 10949), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_1"""'], {}), "('Branch_1')\n", (10937, 10949), True, 'import tensorflow as tf\n'), ((11221, 11250), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_2"""'], {}), "('Branch_2')\n", (11238, 11250), True, 'import tensorflow as tf\n'), ((11644, 11673), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Branch_3"""'], {}), "('Branch_3')\n", (11661, 11673), True, 'import tensorflow as tf\n'), ((13651, 13681), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""AuxLogits"""'], {}), "('AuxLogits')\n", (13668, 13681), True, 'import tensorflow as tf\n')] |
LedgerHQ/ledger-app-neo3 | tests/boilerplate_client/boilerplate_cmd.py | 48e1e0dec3e4801fc3ab1b07c4fe4ed86735a642 | import struct
from typing import Tuple
from ledgercomm import Transport
from boilerplate_client.boilerplate_cmd_builder import BoilerplateCommandBuilder, InsType
from boilerplate_client.button import Button
from boilerplate_client.exception import DeviceException
from boilerplate_client.transaction import Transaction
from neo3.network import payloads
class BoilerplateCommand:
def __init__(self,
transport: Transport,
debug: bool = False) -> None:
self.transport = transport
self.builder = BoilerplateCommandBuilder(debug=debug)
self.debug = debug
def get_app_and_version(self) -> Tuple[str, str]:
sw, response = self.transport.exchange_raw(
self.builder.get_app_and_version()
) # type: int, bytes
if sw != 0x9000:
raise DeviceException(error_code=sw, ins=0x01)
# response = format_id (1) ||
# app_name_len (1) ||
# app_name (var) ||
# version_len (1) ||
# version (var) ||
offset: int = 0
format_id: int = response[offset]
offset += 1
app_name_len: int = response[offset]
offset += 1
app_name: str = response[offset:offset + app_name_len].decode("ascii")
offset += app_name_len
version_len: int = response[offset]
offset += 1
version: str = response[offset:offset + version_len].decode("ascii")
offset += version_len
return app_name, version
def get_version(self) -> Tuple[int, int, int]:
sw, response = self.transport.exchange_raw(
self.builder.get_version()
) # type: int, bytes
if sw != 0x9000:
raise DeviceException(error_code=sw, ins=InsType.INS_GET_VERSION)
# response = MAJOR (1) || MINOR (1) || PATCH (1)
assert len(response) == 3
major, minor, patch = struct.unpack(
"BBB",
response
) # type: int, int, int
return major, minor, patch
def get_app_name(self) -> str:
sw, response = self.transport.exchange_raw(
self.builder.get_app_name()
) # type: int, bytes
if sw != 0x9000:
raise DeviceException(error_code=sw, ins=InsType.INS_GET_APP_NAME)
return response.decode("ascii")
def get_public_key(self, bip44_path: str, display: bool = False) -> bytes:
sw, response = self.transport.exchange_raw(
self.builder.get_public_key(bip44_path=bip44_path)
) # type: int, bytes
if sw != 0x9000:
raise DeviceException(error_code=sw, ins=InsType.INS_GET_PUBLIC_KEY)
assert len(response) == 65 # 04 + 64 bytes of uncompressed key
return response
def sign_tx(self, bip44_path: str, transaction: payloads.Transaction, network_magic: int, button: Button) -> Tuple[int, bytes]:
sw: int
response: bytes = b""
for is_last, chunk in self.builder.sign_tx(bip44_path=bip44_path,
transaction=transaction,
network_magic=network_magic):
self.transport.send_raw(chunk)
if is_last:
# Review Transaction
button.right_click()
# Destination address
button.right_click()
button.right_click()
button.right_click()
# Token Amount
button.right_click()
# Target network
button.right_click()
# System fee
button.right_click()
# Network fee
button.right_click()
# Total fees
button.right_click()
# Valid until
button.right_click()
# Signer 1 of 1
button.right_click()
# Account 1/3, 2/3, 3/3
button.right_click()
button.right_click()
button.right_click()
# Scope
button.right_click()
# custom contracts
if (len(transaction.signers) > 0 and
payloads.WitnessScope.CUSTOM_CONTRACTS in transaction.signers[0].scope):
for _ in range(len(transaction.signers[0].allowed_contracts)):
button.right_click()
button.right_click()
button.right_click()
# Approve
button.both_click()
sw, response = self.transport.recv() # type: int, bytes
if sw != 0x9000:
raise DeviceException(error_code=sw, ins=InsType.INS_SIGN_TX)
return response
def sign_vote_tx(self, bip44_path: str, transaction: Transaction, network_magic: int, button: Button) -> Tuple[int, bytes]:
sw: int
response: bytes = b""
for is_last, chunk in self.builder.sign_tx(bip44_path=bip44_path,
transaction=transaction,
network_magic=network_magic):
self.transport.send_raw(chunk)
if is_last:
# Review Transaction
button.right_click()
# Vote to public key
button.right_click()
button.right_click()
button.right_click()
button.right_click()
# Target network
button.right_click()
# System fee
button.right_click()
# Network fee
button.right_click()
# Total fees
button.right_click()
# Valid until
button.right_click()
# Signer 1 of 1
button.right_click()
# Account 1/3, 2/3, 3/3
button.right_click()
button.right_click()
button.right_click()
# Scope
button.right_click()
# Approve
button.both_click()
sw, response = self.transport.recv() # type: int, bytes
if sw != 0x9000:
raise DeviceException(error_code=sw, ins=InsType.INS_SIGN_TX)
return response | [((550, 588), 'boilerplate_client.boilerplate_cmd_builder.BoilerplateCommandBuilder', 'BoilerplateCommandBuilder', ([], {'debug': 'debug'}), '(debug=debug)\n', (575, 588), False, 'from boilerplate_client.boilerplate_cmd_builder import BoilerplateCommandBuilder, InsType\n'), ((1949, 1979), 'struct.unpack', 'struct.unpack', (['"""BBB"""', 'response'], {}), "('BBB', response)\n", (1962, 1979), False, 'import struct\n'), ((844, 881), 'boilerplate_client.exception.DeviceException', 'DeviceException', ([], {'error_code': 'sw', 'ins': '(1)'}), '(error_code=sw, ins=1)\n', (859, 881), False, 'from boilerplate_client.exception import DeviceException\n'), ((1766, 1825), 'boilerplate_client.exception.DeviceException', 'DeviceException', ([], {'error_code': 'sw', 'ins': 'InsType.INS_GET_VERSION'}), '(error_code=sw, ins=InsType.INS_GET_VERSION)\n', (1781, 1825), False, 'from boilerplate_client.exception import DeviceException\n'), ((2275, 2335), 'boilerplate_client.exception.DeviceException', 'DeviceException', ([], {'error_code': 'sw', 'ins': 'InsType.INS_GET_APP_NAME'}), '(error_code=sw, ins=InsType.INS_GET_APP_NAME)\n', (2290, 2335), False, 'from boilerplate_client.exception import DeviceException\n'), ((2646, 2708), 'boilerplate_client.exception.DeviceException', 'DeviceException', ([], {'error_code': 'sw', 'ins': 'InsType.INS_GET_PUBLIC_KEY'}), '(error_code=sw, ins=InsType.INS_GET_PUBLIC_KEY)\n', (2661, 2708), False, 'from boilerplate_client.exception import DeviceException\n'), ((4783, 4838), 'boilerplate_client.exception.DeviceException', 'DeviceException', ([], {'error_code': 'sw', 'ins': 'InsType.INS_SIGN_TX'}), '(error_code=sw, ins=InsType.INS_SIGN_TX)\n', (4798, 4838), False, 'from boilerplate_client.exception import DeviceException\n'), ((6399, 6454), 'boilerplate_client.exception.DeviceException', 'DeviceException', ([], {'error_code': 'sw', 'ins': 'InsType.INS_SIGN_TX'}), '(error_code=sw, ins=InsType.INS_SIGN_TX)\n', (6414, 6454), False, 'from boilerplate_client.exception import DeviceException\n')] |
fixstars/clpy | clpy/sparse/util.py | 693485f85397cc110fa45803c36c30c24c297df0 | import clpy
import clpy.sparse.base
_preamble_atomic_add = '''
#if __CUDA_ARCH__ < 600
__device__ double atomicAdd(double* address, double val) {
unsigned long long* address_as_ull =
(unsigned long long*)address;
unsigned long long old = *address_as_ull, assumed;
do {
assumed = old;
old = atomicCAS(address_as_ull, assumed,
__double_as_longlong(val +
__longlong_as_double(assumed)));
} while (assumed != old);
return __longlong_as_double(old);
}
#endif
'''
def isintlike(x):
try:
return bool(int(x) == x)
except (TypeError, ValueError):
return False
def isscalarlike(x):
return clpy.isscalar(x) or (clpy.sparse.base.isdense(x) and x.ndim == 0)
def isshape(x):
if not isinstance(x, tuple) or len(x) != 2:
return False
m, n = x
return isintlike(m) and isintlike(n)
| [((739, 755), 'clpy.isscalar', 'clpy.isscalar', (['x'], {}), '(x)\n', (752, 755), False, 'import clpy\n'), ((760, 787), 'clpy.sparse.base.isdense', 'clpy.sparse.base.isdense', (['x'], {}), '(x)\n', (784, 787), False, 'import clpy\n')] |
hwazni/discopy | test/test_cartesian.py | 812a4c77de4c766591bad74306720b518cdc54fc | from pytest import raises
from discopy.cartesian import *
def test_Box_repr():
f = Box('f', 1, 2, lambda x: (x, x))
assert "Box('f', 1, 2" in repr(f)
def test_Function_str():
f = Function(2, 1, lambda x, y: x + y)
assert 'Function(dom=2, cod=1,' in str(f)
def test_Function_call():
f = Swap(2, 1)
values = (2, 3)
with raises(TypeError) as err:
f(*values)
assert str(err.value) == messages.expected_input_length(f, values)
def test_Function_then():
f, g = Function(2, 1, lambda x, y: x + y), Function(1, 1, lambda x: x + 1)
assert Function.id(2).then(*(f, g))(20, 21) == 42
def test_Function_then_err():
f = Function(2, 1, lambda x, y: x + y)
g = (lambda x: x, )
with raises(TypeError) as err:
f >> g
assert str(err.value) == messages.type_err(Function, g)
g = Function.id(2)
with raises(AxiomError) as err:
f >> g
assert str(err.value) == messages.does_not_compose(f, g)
def test_Function_tensor():
assert Function.id(3)(1, 2, 3)\
== Function.id(0).tensor(*(3 * [Function.id(1)]))(1, 2, 3)
def test_Function_tensor_err():
f = Function(2, 1, lambda x, y: x + y)
g = (lambda x: x, )
with raises(TypeError) as err:
f @ g
assert str(err.value) == messages.type_err(Function, g)
| [((352, 369), 'pytest.raises', 'raises', (['TypeError'], {}), '(TypeError)\n', (358, 369), False, 'from pytest import raises\n'), ((737, 754), 'pytest.raises', 'raises', (['TypeError'], {}), '(TypeError)\n', (743, 754), False, 'from pytest import raises\n'), ((870, 888), 'pytest.raises', 'raises', (['AxiomError'], {}), '(AxiomError)\n', (876, 888), False, 'from pytest import raises\n'), ((1216, 1233), 'pytest.raises', 'raises', (['TypeError'], {}), '(TypeError)\n', (1222, 1233), False, 'from pytest import raises\n')] |
neal-hub/nvda-test | source/browseMode.py | 4c3a67b2eafa9721c5de3f671d10e60ab2d43865 | # A part of NonVisual Desktop Access (NVDA)
# Copyright (C) 2007-2021 NV Access Limited, Babbage B.V., James Teh, Leonard de Ruijter,
# Thomas Stivers, Accessolutions, Julien Cochuyt
# This file is covered by the GNU General Public License.
# See the file COPYING for more details.
from typing import Any, Callable, Union
import os
import itertools
import collections
import winsound
import time
import weakref
import wx
import core
from logHandler import log
import documentBase
import review
import scriptHandler
import eventHandler
import nvwave
import queueHandler
import gui
import ui
import cursorManager
from scriptHandler import script, isScriptWaiting, willSayAllResume
import aria
import controlTypes
from controlTypes import OutputReason
import config
import textInfos
import braille
import vision
import speech
from speech import sayAll
import treeInterceptorHandler
import inputCore
import api
import gui.guiHelper
from gui.dpiScalingHelper import DpiScalingHelperMixinWithoutInit
from NVDAObjects import NVDAObject
import gui.contextHelp
from abc import ABCMeta, abstractmethod
import globalVars
from typing import Optional
def reportPassThrough(treeInterceptor,onlyIfChanged=True):
"""Reports the pass through mode if it has changed.
@param treeInterceptor: The current Browse Mode treeInterceptor.
@type treeInterceptor: L{BrowseModeTreeInterceptor}
@param onlyIfChanged: if true reporting will not happen if the last reportPassThrough reported the same thing.
@type onlyIfChanged: bool
"""
if not onlyIfChanged or treeInterceptor.passThrough != reportPassThrough.last:
if config.conf["virtualBuffers"]["passThroughAudioIndication"]:
sound = "focusMode.wav" if treeInterceptor.passThrough else "browseMode.wav"
nvwave.playWaveFile(os.path.join(globalVars.appDir, "waves", sound))
else:
if treeInterceptor.passThrough:
# Translators: The mode to interact with controls in documents
ui.message(_("Focus mode"))
else:
# Translators: The mode that presents text in a flat representation
# that can be navigated with the cursor keys like in a text document
ui.message(_("Browse mode"))
reportPassThrough.last = treeInterceptor.passThrough
reportPassThrough.last = False
def mergeQuickNavItemIterators(iterators,direction="next"):
"""
Merges multiple iterators that emit L{QuickNavItem} objects, yielding them from first to last.
They are sorted using min or max (__lt__ should be implemented on the L{QuickNavItem} objects).
@param iters: the iterators you want to merge.
@type iters: sequence of iterators that emit L{QuicknavItem} objects.
@param direction: the direction these iterators are searching (e.g. next, previous)
@type direction: string
"""
finder=min if direction=="next" else max
curValues=[]
# Populate a list with all iterators and their corisponding first value
for it in iterators:
try:
val=next(it)
except StopIteration:
continue
curValues.append((it,val))
# Until all iterators have been used up,
# Find the first (minimum or maximum) of all the values,
# emit that, and update the list with the next available value for the iterator whose value was emitted.
while len(curValues)>0:
first=finder(curValues,key=lambda x: x[1])
curValues.remove(first)
it,val=first
yield val
try:
newVal=next(it)
except StopIteration:
continue
curValues.append((it,newVal))
class QuickNavItem(object, metaclass=ABCMeta):
""" Emitted by L{BrowseModeTreeInterceptor._iterNodesByType}, this represents one of many positions in a browse mode document, based on the type of item being searched for (e.g. link, heading, table etc)."""
itemType=None #: The type of items searched for (e.g. link, heading, table etc)
label=None #: The label that should represent this item in the Elements list.
isAfterSelection=False #: Is this item positioned after the caret in the document? Used by the elements list to place its own selection.
def __init__(self,itemType,document):
"""
@param itemType: the type that was searched for (e.g. link, heading, table etc)
@type itemType: string
@param document: the browse mode document this item is a part of.
@type document: L{BrowseModeTreeInterceptor}
"""
self.itemType=itemType
self.document=document
@abstractmethod
def isChild(self,parent):
"""
Is this item a child of the given parent?
This is used when representing items in a hierarchical tree structure, such as the Elements List.
@param parent: the item of whom this item may be a child of.
@type parent: L{QuickNavItem}
@return: True if this item is a child, false otherwise.
@rtype: bool
"""
raise NotImplementedError
@abstractmethod
def report(self,readUnit=None):
"""
Reports the contents of this item.
@param readUnit: the optional unit (e.g. line, paragraph) that should be used to announce the item position when moved to. If not given, then the full sise of the item is used.
@type readUnit: a L{textInfos}.UNIT_* constant.
"""
raise NotImplementedError
@abstractmethod
def moveTo(self):
"""
Moves the browse mode caret or focus to this item.
"""
raise NotImplementedError
def activate(self):
"""
Activates this item's position. E.g. follows a link, presses a button etc.
"""
raise NotImplementedError
def rename(self,newName):
"""
Renames this item with the new name.
"""
raise NotImplementedError
@property
def isRenameAllowed(self):
return False
class TextInfoQuickNavItem(QuickNavItem):
""" Represents a quick nav item in a browse mode document who's positions are represented by a L{textInfos.TextInfo}. """
def __init__(self,itemType,document,textInfo):
"""
See L{QuickNavItem.__init__} for itemType and document argument definitions.
@param textInfo: the textInfo position this item represents.
@type textInfo: L{textInfos.TextInfo}
"""
self.textInfo=textInfo
super(TextInfoQuickNavItem,self).__init__(itemType,document)
def __lt__(self,other):
return self.textInfo.compareEndPoints(other.textInfo,"startToStart")<0
@property
def obj(self):
return self.textInfo.basePosition if isinstance(self.textInfo.basePosition,NVDAObject) else None
@property
def label(self):
return self.textInfo.text.strip()
def isChild(self,parent):
if parent.textInfo.isOverlapping(self.textInfo):
return True
return False
def report(self,readUnit=None):
info=self.textInfo
# If we are dealing with a form field, ensure we don't read the whole content if it's an editable text.
if self.itemType == "formField":
if self.obj.role == controlTypes.Role.EDITABLETEXT:
readUnit = textInfos.UNIT_LINE
if readUnit:
fieldInfo = info.copy()
info.collapse()
info.move(readUnit, 1, endPoint="end")
if info.compareEndPoints(fieldInfo, "endToEnd") > 0:
# We've expanded past the end of the field, so limit to the end of the field.
info.setEndPoint(fieldInfo, "endToEnd")
speech.speakTextInfo(info, reason=OutputReason.QUICKNAV)
def activate(self):
self.textInfo.obj._activatePosition(info=self.textInfo)
def moveTo(self):
if self.document.passThrough and getattr(self, "obj", False):
if controlTypes.State.FOCUSABLE in self.obj.states:
self.obj.setFocus()
return
self.document.passThrough = False
reportPassThrough(self.document)
info = self.textInfo.copy()
info.collapse()
self.document._set_selection(info, reason=OutputReason.QUICKNAV)
@property
def isAfterSelection(self):
caret=self.document.makeTextInfo(textInfos.POSITION_CARET)
return self.textInfo.compareEndPoints(caret, "startToStart") > 0
def _getLabelForProperties(self, labelPropertyGetter: Callable[[str], Optional[Any]]):
"""
Fetches required properties for this L{TextInfoQuickNavItem} and constructs a label to be shown in an elements list.
This can be used by subclasses to implement the L{label} property.
@Param labelPropertyGetter: A callable taking 1 argument, specifying the property to fetch.
For example, if L{itemType} is landmark, the callable must return the landmark type when "landmark" is passed as the property argument.
Alternative property names might be name or value.
The callable must return None if the property doesn't exist.
An expected callable might be get method on a L{Dict},
or "lambda property: getattr(self.obj, property, None)" for an L{NVDAObject}.
"""
content = self.textInfo.text.strip()
if self.itemType == "heading":
# Output: displayed text of the heading.
return content
labelParts = None
name = labelPropertyGetter("name")
if self.itemType == "landmark":
landmark = aria.landmarkRoles.get(labelPropertyGetter("landmark"))
# Example output: main menu; navigation
labelParts = (name, landmark)
else:
role: Union[controlTypes.Role, int] = labelPropertyGetter("role")
role = controlTypes.Role(role)
roleText = role.displayString
# Translators: Reported label in the elements list for an element which which has no name and value
unlabeled = _("Unlabeled")
realStates = labelPropertyGetter("states")
labeledStates = " ".join(controlTypes.processAndLabelStates(role, realStates, OutputReason.FOCUS))
if self.itemType == "formField":
if role in (
controlTypes.Role.BUTTON,
controlTypes.Role.DROPDOWNBUTTON,
controlTypes.Role.TOGGLEBUTTON,
controlTypes.Role.SPLITBUTTON,
controlTypes.Role.MENUBUTTON,
controlTypes.Role.DROPDOWNBUTTONGRID,
controlTypes.Role.TREEVIEWBUTTON
):
# Example output: Mute; toggle button; pressed
labelParts = (content or name or unlabeled, roleText, labeledStates)
else:
# Example output: Find a repository...; edit; has auto complete; NVDA
labelParts = (name or unlabeled, roleText, labeledStates, content)
elif self.itemType in ("link", "button"):
# Example output: You have unread notifications; visited
labelParts = (content or name or unlabeled, labeledStates)
if labelParts:
label = "; ".join(lp for lp in labelParts if lp)
else:
label = content
return label
class BrowseModeTreeInterceptor(treeInterceptorHandler.TreeInterceptor):
scriptCategory = inputCore.SCRCAT_BROWSEMODE
_disableAutoPassThrough = False
APPLICATION_ROLES = (controlTypes.Role.APPLICATION, controlTypes.Role.DIALOG)
def _get_currentNVDAObject(self):
raise NotImplementedError
def _get_currentFocusableNVDAObject(self):
return self.makeTextInfo(textInfos.POSITION_CARET).focusableNVDAObjectAtStart
def event_treeInterceptor_gainFocus(self):
"""Triggered when this browse mode interceptor gains focus.
This event is only fired upon entering this treeInterceptor when it was not the current treeInterceptor before.
This is different to L{event_gainFocus}, which is fired when an object inside this treeInterceptor gains focus, even if that object is in the same treeInterceptor.
"""
reportPassThrough(self)
ALWAYS_SWITCH_TO_PASS_THROUGH_ROLES = frozenset({
controlTypes.Role.COMBOBOX,
controlTypes.Role.EDITABLETEXT,
controlTypes.Role.LIST,
controlTypes.Role.LISTITEM,
controlTypes.Role.SLIDER,
controlTypes.Role.TABCONTROL,
controlTypes.Role.MENUBAR,
controlTypes.Role.POPUPMENU,
controlTypes.Role.TREEVIEW,
controlTypes.Role.TREEVIEWITEM,
controlTypes.Role.SPINBUTTON,
controlTypes.Role.TABLEROW,
controlTypes.Role.TABLECELL,
controlTypes.Role.TABLEROWHEADER,
controlTypes.Role.TABLECOLUMNHEADER,
})
SWITCH_TO_PASS_THROUGH_ON_FOCUS_ROLES = frozenset({
controlTypes.Role.LISTITEM,
controlTypes.Role.RADIOBUTTON,
controlTypes.Role.TAB,
controlTypes.Role.MENUITEM,
controlTypes.Role.RADIOMENUITEM,
controlTypes.Role.CHECKMENUITEM,
})
IGNORE_DISABLE_PASS_THROUGH_WHEN_FOCUSED_ROLES = frozenset({
controlTypes.Role.MENUITEM,
controlTypes.Role.RADIOMENUITEM,
controlTypes.Role.CHECKMENUITEM,
controlTypes.Role.TABLECELL,
})
def shouldPassThrough(self, obj, reason: Optional[OutputReason] = None):
"""Determine whether pass through mode should be enabled (focus mode) or disabled (browse mode) for a given object.
@param obj: The object in question.
@type obj: L{NVDAObjects.NVDAObject}
@param reason: The reason for this query;
one of the output reasons, or C{None} for manual pass through mode activation by the user.
@return: C{True} if pass through mode (focus mode) should be enabled, C{False} if it should be disabled (browse mode).
"""
if reason and (
self.disableAutoPassThrough
or (reason == OutputReason.FOCUS and not config.conf["virtualBuffers"]["autoPassThroughOnFocusChange"])
or (reason == OutputReason.CARET and not config.conf["virtualBuffers"]["autoPassThroughOnCaretMove"])
):
# This check relates to auto pass through and auto pass through is disabled, so don't change the pass through state.
return self.passThrough
if reason == OutputReason.QUICKNAV:
return False
states = obj.states
role = obj.role
if controlTypes.State.EDITABLE in states and controlTypes.State.UNAVAILABLE not in states:
return True
# Menus sometimes get focus due to menuStart events even though they don't report as focused/focusable.
if not obj.isFocusable and controlTypes.State.FOCUSED not in states and role != controlTypes.Role.POPUPMENU:
return False
# many controls that are read-only should not switch to passThrough.
# However, there are exceptions.
if controlTypes.State.READONLY in states:
# #13221: For Slack message lists, and the MS Edge downloads window, switch to passthrough
# even though the list item and list are read-only, but focusable.
if (
role == controlTypes.Role.LISTITEM and controlTypes.State.FOCUSED in states
and obj.parent.role == controlTypes.Role.LIST and controlTypes.State.FOCUSABLE in obj.parent.states
):
return True
# Certain controls such as combo boxes and readonly edits are read-only but still interactive.
# #5118: read-only ARIA grids should also be allowed (focusable table cells, rows and headers).
if role not in (
controlTypes.Role.EDITABLETEXT, controlTypes.Role.COMBOBOX, controlTypes.Role.TABLEROW,
controlTypes.Role.TABLECELL, controlTypes.Role.TABLEROWHEADER, controlTypes.Role.TABLECOLUMNHEADER
):
return False
# Any roles or states for which we always switch to passThrough
if role in self.ALWAYS_SWITCH_TO_PASS_THROUGH_ROLES or controlTypes.State.EDITABLE in states:
return True
# focus is moving to this control. Perhaps after pressing tab or clicking a button that brings up a menu (via javascript)
if reason == OutputReason.FOCUS:
if role in self.SWITCH_TO_PASS_THROUGH_ON_FOCUS_ROLES:
return True
# If this is a focus change, pass through should be enabled for certain ancestor containers.
# this is done last for performance considerations. Walking up the through the parents could be costly
while obj and obj != self.rootNVDAObject:
if obj.role == controlTypes.Role.TOOLBAR:
return True
obj = obj.parent
return False
def _get_shouldTrapNonCommandGestures(self):
return config.conf['virtualBuffers']['trapNonCommandGestures']
def script_trapNonCommandGesture(self,gesture):
winsound.PlaySound("default",1)
singleLetterNavEnabled=True #: Whether single letter navigation scripts should be active (true) or if these letters should fall to the application.
def getAlternativeScript(self,gesture,script):
if self.passThrough or not gesture.isCharacter:
return script
if not self.singleLetterNavEnabled:
return None
if not script and self.shouldTrapNonCommandGestures:
script=self.script_trapNonCommandGesture
return script
def script_toggleSingleLetterNav(self,gesture):
if self.singleLetterNavEnabled:
self.singleLetterNavEnabled=False
# Translators: Reported when single letter navigation in browse mode is turned off.
ui.message(_("Single letter navigation off"))
else:
self.singleLetterNavEnabled=True
# Translators: Reported when single letter navigation in browse mode is turned on.
ui.message(_("Single letter navigation on"))
# Translators: the description for the toggleSingleLetterNavigation command in browse mode.
script_toggleSingleLetterNav.__doc__=_("Toggles single letter navigation on and off. When on, single letter keys in browse mode jump to various kinds of elements on the page. When off, these keys are passed to the application")
def _get_ElementsListDialog(self):
return ElementsListDialog
def _iterNodesByType(self,itemType,direction="next",pos=None):
"""
Yields L{QuickNavItem} objects representing the ordered positions in this document according to the type being searched for (e.g. link, heading, table etc).
@param itemType: the type being searched for (e.g. link, heading, table etc)
@type itemType: string
@param direction: the direction in which to search (next, previous, up)
@type direction: string
@param pos: the position in the document from where to start the search.
@type pos: Usually an L{textInfos.TextInfo}
@raise NotImplementedError: This type is not supported by this BrowseMode implementation
"""
raise NotImplementedError
def _iterNotLinkBlock(self, direction="next", pos=None):
raise NotImplementedError
def _quickNavScript(self,gesture, itemType, direction, errorMessage, readUnit):
if itemType=="notLinkBlock":
iterFactory=self._iterNotLinkBlock
else:
iterFactory=lambda direction,info: self._iterNodesByType(itemType,direction,info)
info=self.selection
try:
item = next(iterFactory(direction, info))
except NotImplementedError:
# Translators: a message when a particular quick nav command is not supported in the current document.
ui.message(_("Not supported in this document"))
return
except StopIteration:
ui.message(errorMessage)
return
# #8831: Report before moving because moving might change the focus, which
# might mutate the document, potentially invalidating info if it is
# offset-based.
if not gesture or not willSayAllResume(gesture):
item.report(readUnit=readUnit)
item.moveTo()
@classmethod
def addQuickNav(
cls,
itemType: str,
key: Optional[str],
nextDoc: str,
nextError: str,
prevDoc: str,
prevError: str,
readUnit: Optional[str] = None
):
"""Adds a script for the given quick nav item.
@param itemType: The type of item, I.E. "heading" "Link" ...
@param key: The quick navigation key to bind to the script.
Shift is automatically added for the previous item gesture. E.G. h for heading.
If C{None} is provided, the script is unbound by default.
@param nextDoc: The command description to bind to the script that yields the next quick nav item.
@param nextError: The error message if there are no more quick nav items of type itemType in this direction.
@param prevDoc: The command description to bind to the script that yields the previous quick nav item.
@param prevError: The error message if there are no more quick nav items of type itemType in this direction.
@param readUnit: The unit (one of the textInfos.UNIT_* constants) to announce when moving to this type of item.
For example, only the line is read when moving to tables to avoid reading a potentially massive table.
If None, the entire item will be announced.
"""
scriptSuffix = itemType[0].upper() + itemType[1:]
scriptName = "next%s" % scriptSuffix
funcName = "script_%s" % scriptName
script = lambda self,gesture: self._quickNavScript(gesture, itemType, "next", nextError, readUnit)
script.__doc__ = nextDoc
script.__name__ = funcName
script.resumeSayAllMode = sayAll.CURSOR.CARET
setattr(cls, funcName, script)
if key is not None:
cls.__gestures["kb:%s" % key] = scriptName
scriptName = "previous%s" % scriptSuffix
funcName = "script_%s" % scriptName
script = lambda self,gesture: self._quickNavScript(gesture, itemType, "previous", prevError, readUnit)
script.__doc__ = prevDoc
script.__name__ = funcName
script.resumeSayAllMode = sayAll.CURSOR.CARET
setattr(cls, funcName, script)
if key is not None:
cls.__gestures["kb:shift+%s" % key] = scriptName
def script_elementsList(self, gesture):
# We need this to be a modal dialog, but it mustn't block this script.
def run():
gui.mainFrame.prePopup()
d = self.ElementsListDialog(self)
d.ShowModal()
d.Destroy()
gui.mainFrame.postPopup()
wx.CallAfter(run)
# Translators: the description for the Elements List command in browse mode.
script_elementsList.__doc__ = _("Lists various types of elements in this document")
script_elementsList.ignoreTreeInterceptorPassThrough = True
def _activateNVDAObject(self, obj):
"""Activate an object in response to a user request.
This should generally perform the default action or click on the object.
@param obj: The object to activate.
@type obj: L{NVDAObjects.NVDAObject}
"""
try:
obj.doAction()
except NotImplementedError:
log.debugWarning("doAction not implemented")
def _activatePosition(self, obj=None):
if not obj:
obj=self.currentNVDAObject
if not obj:
return
if obj.role == controlTypes.Role.MATH:
import mathPres
try:
return mathPres.interactWithMathMl(obj.mathMl)
except (NotImplementedError, LookupError):
pass
return
if self.shouldPassThrough(obj):
obj.setFocus()
self.passThrough = True
reportPassThrough(self)
elif obj.role == controlTypes.Role.EMBEDDEDOBJECT or obj.role in self.APPLICATION_ROLES:
obj.setFocus()
speech.speakObject(obj, reason=OutputReason.FOCUS)
else:
self._activateNVDAObject(obj)
def script_activatePosition(self,gesture):
if config.conf["virtualBuffers"]["autoFocusFocusableElements"]:
self._activatePosition()
else:
self._focusLastFocusableObject(activatePosition=True)
# Translators: the description for the activatePosition script on browseMode documents.
script_activatePosition.__doc__ = _("Activates the current object in the document")
def _focusLastFocusableObject(self, activatePosition=False):
"""Used when auto focus focusable elements is disabled to sync the focus
to the browse mode cursor.
When auto focus focusable elements is disabled, NVDA doesn't focus elements
as the user moves the browse mode cursor. However, there are some cases
where the user always wants to interact with the focus; e.g. if they press
the applications key to open the context menu. In these cases, this method
is called first to sync the focus to the browse mode cursor.
"""
obj = self.currentFocusableNVDAObject
if obj!=self.rootNVDAObject and self._shouldSetFocusToObj(obj) and obj!= api.getFocusObject():
obj.setFocus()
# We might be about to activate or pass through a key which will cause
# this object to change (e.g. checking a check box). However, we won't
# actually get the focus event until after the change has occurred.
# Therefore, we must cache properties for speech before the change occurs.
speech.speakObject(obj, OutputReason.ONLYCACHE)
self._objPendingFocusBeforeActivate = obj
if activatePosition:
# Make sure we activate the object at the caret, which is not necessarily focusable.
self._activatePosition()
def script_passThrough(self,gesture):
if not config.conf["virtualBuffers"]["autoFocusFocusableElements"]:
self._focusLastFocusableObject()
gesture.send()
# Translators: the description for the passThrough script on browseMode documents.
script_passThrough.__doc__ = _("Passes gesture through to the application")
def script_disablePassThrough(self, gesture):
if not self.passThrough or self.disableAutoPassThrough:
return gesture.send()
# #3215 ARIA menus should get the Escape key unconditionally so they can handle it without invoking browse mode first
obj = api.getFocusObject()
if obj and obj.role in self.IGNORE_DISABLE_PASS_THROUGH_WHEN_FOCUSED_ROLES:
return gesture.send()
self.passThrough = False
self.disableAutoPassThrough = False
reportPassThrough(self)
script_disablePassThrough.ignoreTreeInterceptorPassThrough = True
def _set_disableAutoPassThrough(self, state):
# If the user manually switches to focus mode with NVDA+space, that enables
# pass-through and disables auto pass-through. If auto focusing of focusable
# elements is disabled, NVDA won't have synced the focus to the browse mode
# cursor. However, since the user is switching to focus mode, they probably
# want to interact with the focus, so sync the focus here.
if (
state
and not config.conf["virtualBuffers"]["autoFocusFocusableElements"]
and self.passThrough
):
self._focusLastFocusableObject()
self._disableAutoPassThrough = state
def _get_disableAutoPassThrough(self):
return self._disableAutoPassThrough
__gestures={
"kb:NVDA+f7": "elementsList",
"kb:enter": "activatePosition",
"kb:numpadEnter": "activatePosition",
"kb:space": "activatePosition",
"kb:NVDA+shift+space":"toggleSingleLetterNav",
"kb:escape": "disablePassThrough",
"kb:control+enter": "passThrough",
"kb:control+numpadEnter": "passThrough",
"kb:shift+enter": "passThrough",
"kb:shift+numpadEnter": "passThrough",
"kb:control+shift+enter": "passThrough",
"kb:control+shift+numpadEnter": "passThrough",
"kb:alt+enter": "passThrough",
"kb:alt+numpadEnter": "passThrough",
"kb:applications": "passThrough",
"kb:shift+applications": "passThrough",
"kb:shift+f10": "passThrough",
}
# Add quick navigation scripts.
qn = BrowseModeTreeInterceptor.addQuickNav
qn("heading", key="h",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading"))
qn("heading1", key="1",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 1"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 1"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 1"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 1"))
qn("heading2", key="2",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 2"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 2"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 2"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 2"))
qn("heading3", key="3",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 3"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 3"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 3"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 3"))
qn("heading4", key="4",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 4"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 4"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 4"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 4"))
qn("heading5", key="5",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 5"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 5"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 5"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 5"))
qn("heading6", key="6",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 6"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 6"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 6"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 6"))
qn("table", key="t",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next table"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next table"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous table"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous table"),
readUnit=textInfos.UNIT_LINE)
qn("link", key="k",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next link"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next link"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous link"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous link"))
qn("visitedLink", key="v",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next visited link"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next visited link"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous visited link"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous visited link"))
qn("unvisitedLink", key="u",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next unvisited link"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next unvisited link"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous unvisited link"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous unvisited link"))
qn("formField", key="f",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next form field"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next form field"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous form field"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous form field"))
qn("list", key="l",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next list"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next list"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous list"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous list"),
readUnit=textInfos.UNIT_LINE)
qn("listItem", key="i",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next list item"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next list item"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous list item"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous list item"))
qn("button", key="b",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next button"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next button"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous button"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous button"))
qn("edit", key="e",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next edit field"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next edit field"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous edit field"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous edit field"),
readUnit=textInfos.UNIT_LINE)
qn("frame", key="m",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next frame"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next frame"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous frame"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous frame"),
readUnit=textInfos.UNIT_LINE)
qn("separator", key="s",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next separator"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next separator"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous separator"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous separator"))
qn("radioButton", key="r",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next radio button"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next radio button"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous radio button"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous radio button"))
qn("comboBox", key="c",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next combo box"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next combo box"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous combo box"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous combo box"))
qn("checkBox", key="x",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next check box"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next check box"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous check box"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous check box"))
qn("graphic", key="g",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next graphic"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next graphic"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous graphic"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous graphic"))
qn("blockQuote", key="q",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next block quote"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next block quote"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous block quote"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous block quote"))
qn("notLinkBlock", key="n",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("skips forward past a block of links"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no more text after a block of links"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("skips backward past a block of links"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no more text before a block of links"),
readUnit=textInfos.UNIT_LINE)
qn("landmark", key="d",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next landmark"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next landmark"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous landmark"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous landmark"),
readUnit=textInfos.UNIT_LINE)
qn("embeddedObject", key="o",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next embedded object"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next embedded object"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous embedded object"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous embedded object"))
qn("annotation", key="a",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next annotation"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next annotation"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous annotation"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous annotation"))
qn("error", key="w",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next error"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next error"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous error"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous error"))
qn(
"article", key=None,
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next article"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next article"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous article"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous article")
)
qn(
"grouping", key=None,
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next grouping"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next grouping"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous grouping"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous grouping")
)
del qn
class ElementsListDialog(
DpiScalingHelperMixinWithoutInit,
gui.contextHelp.ContextHelpMixin,
wx.Dialog # wxPython does not seem to call base class initializer, put last in MRO
):
helpId = "ElementsList"
ELEMENT_TYPES = (
# Translators: The label of a radio button to select the type of element
# in the browse mode Elements List dialog.
("link", _("Lin&ks")),
# Translators: The label of a radio button to select the type of element
# in the browse mode Elements List dialog.
("heading", _("&Headings")),
# Translators: The label of a radio button to select the type of element
# in the browse mode Elements List dialog.
("formField", _("&Form fields")),
# Translators: The label of a radio button to select the type of element
# in the browse mode Elements List dialog.
("button", _("&Buttons")),
# Translators: The label of a radio button to select the type of element
# in the browse mode Elements List dialog.
("landmark", _("Lan&dmarks")),
)
Element = collections.namedtuple("Element", ("item", "parent"))
lastSelectedElementType=0
def __init__(self, document):
super().__init__(
parent=gui.mainFrame,
# Translators: The title of the browse mode Elements List dialog.
title=_("Elements List")
)
self.document = document
mainSizer = wx.BoxSizer(wx.VERTICAL)
contentsSizer = wx.BoxSizer(wx.VERTICAL)
# Translators: The label of a group of radio buttons to select the type of element
# in the browse mode Elements List dialog.
child = wx.RadioBox(self, wx.ID_ANY, label=_("Type:"), choices=tuple(et[1] for et in self.ELEMENT_TYPES))
child.SetSelection(self.lastSelectedElementType)
child.Bind(wx.EVT_RADIOBOX, self.onElementTypeChange)
contentsSizer.Add(child, flag=wx.EXPAND)
contentsSizer.AddSpacer(gui.guiHelper.SPACE_BETWEEN_VERTICAL_DIALOG_ITEMS)
self.tree = wx.TreeCtrl(
self,
size=self.scaleSize((500, 300)), # height is chosen to ensure the dialog will fit on an 800x600 screen
style=wx.TR_HAS_BUTTONS | wx.TR_HIDE_ROOT | wx.TR_LINES_AT_ROOT | wx.TR_SINGLE | wx.TR_EDIT_LABELS
)
self.tree.Bind(wx.EVT_SET_FOCUS, self.onTreeSetFocus)
self.tree.Bind(wx.EVT_CHAR, self.onTreeChar)
self.tree.Bind(wx.EVT_TREE_BEGIN_LABEL_EDIT, self.onTreeLabelEditBegin)
self.tree.Bind(wx.EVT_TREE_END_LABEL_EDIT, self.onTreeLabelEditEnd)
self.treeRoot = self.tree.AddRoot("root")
contentsSizer.Add(self.tree,flag=wx.EXPAND)
contentsSizer.AddSpacer(gui.guiHelper.SPACE_BETWEEN_VERTICAL_DIALOG_ITEMS)
# Translators: The label of an editable text field to filter the elements
# in the browse mode Elements List dialog.
filterText = _("Filter b&y:")
labeledCtrl = gui.guiHelper.LabeledControlHelper(self, filterText, wx.TextCtrl)
self.filterEdit = labeledCtrl.control
self.filterEdit.Bind(wx.EVT_TEXT, self.onFilterEditTextChange)
contentsSizer.Add(labeledCtrl.sizer)
contentsSizer.AddSpacer(gui.guiHelper.SPACE_BETWEEN_VERTICAL_DIALOG_ITEMS)
bHelper = gui.guiHelper.ButtonHelper(wx.HORIZONTAL)
# Translators: The label of a button to activate an element in the browse mode Elements List dialog.
# Beware not to set an accelerator that would collide with other controls in this dialog, such as an
# element type radio label.
self.activateButton = bHelper.addButton(self, label=_("Activate"))
self.activateButton.Bind(wx.EVT_BUTTON, lambda evt: self.onAction(True))
# Translators: The label of a button to move to an element
# in the browse mode Elements List dialog.
self.moveButton = bHelper.addButton(self, label=_("&Move to"))
self.moveButton.Bind(wx.EVT_BUTTON, lambda evt: self.onAction(False))
bHelper.addButton(self, id=wx.ID_CANCEL)
contentsSizer.Add(bHelper.sizer, flag=wx.ALIGN_RIGHT)
mainSizer.Add(contentsSizer, border=gui.guiHelper.BORDER_FOR_DIALOGS, flag=wx.ALL)
mainSizer.Fit(self)
self.SetSizer(mainSizer)
self.tree.SetFocus()
self.initElementType(self.ELEMENT_TYPES[self.lastSelectedElementType][0])
self.CentreOnScreen()
def onElementTypeChange(self, evt):
elementType=evt.GetInt()
# We need to make sure this gets executed after the focus event.
# Otherwise, NVDA doesn't seem to get the event.
queueHandler.queueFunction(queueHandler.eventQueue, self.initElementType, self.ELEMENT_TYPES[elementType][0])
self.lastSelectedElementType=elementType
def initElementType(self, elType):
if elType in ("link","button"):
# Links and buttons can be activated.
self.activateButton.Enable()
self.SetAffirmativeId(self.activateButton.GetId())
else:
# No other element type can be activated.
self.activateButton.Disable()
self.SetAffirmativeId(self.moveButton.GetId())
# Gather the elements of this type.
self._elements = []
self._initialElement = None
parentElements = []
isAfterSelection=False
for item in self.document._iterNodesByType(elType):
# Find the parent element, if any.
for parent in reversed(parentElements):
if item.isChild(parent.item):
break
else:
# We're not a child of this parent, so this parent has no more children and can be removed from the stack.
parentElements.pop()
else:
# No parent found, so we're at the root.
# Note that parentElements will be empty at this point, as all parents are no longer relevant and have thus been removed from the stack.
parent = None
element=self.Element(item,parent)
self._elements.append(element)
if not isAfterSelection:
isAfterSelection=item.isAfterSelection
if not isAfterSelection:
# The element immediately preceding or overlapping the caret should be the initially selected element.
# Since we have not yet passed the selection, use this as the initial element.
try:
self._initialElement = self._elements[-1]
except IndexError:
# No previous element.
pass
# This could be the parent of a subsequent element, so add it to the parents stack.
parentElements.append(element)
# Start with no filtering.
self.filterEdit.ChangeValue("")
self.filter("", newElementType=True)
def filter(self, filterText, newElementType=False):
# If this is a new element type, use the element nearest the cursor.
# Otherwise, use the currently selected element.
# #8753: wxPython 4 returns "invalid tree item" when the tree view is empty, so use initial element if appropriate.
try:
defaultElement = self._initialElement if newElementType else self.tree.GetItemData(self.tree.GetSelection())
except:
defaultElement = self._initialElement
# Clear the tree.
self.tree.DeleteChildren(self.treeRoot)
# Populate the tree with elements matching the filter text.
elementsToTreeItems = {}
defaultItem = None
matched = False
#Do case-insensitive matching by lowering both filterText and each element's text.
filterText=filterText.lower()
for element in self._elements:
label=element.item.label
if filterText and filterText not in label.lower():
continue
matched = True
parent = element.parent
if parent:
parent = elementsToTreeItems.get(parent)
item = self.tree.AppendItem(parent or self.treeRoot, label)
self.tree.SetItemData(item, element)
elementsToTreeItems[element] = item
if element == defaultElement:
defaultItem = item
self.tree.ExpandAll()
if not matched:
# No items, so disable the buttons.
self.activateButton.Disable()
self.moveButton.Disable()
return
# If there's no default item, use the first item in the tree.
self.tree.SelectItem(defaultItem or self.tree.GetFirstChild(self.treeRoot)[0])
# Enable the button(s).
# If the activate button isn't the default button, it is disabled for this element type and shouldn't be enabled here.
if self.AffirmativeId == self.activateButton.Id:
self.activateButton.Enable()
self.moveButton.Enable()
def onTreeSetFocus(self, evt):
# Start with no search.
self._searchText = ""
self._searchCallLater = None
evt.Skip()
def onTreeChar(self, evt):
key = evt.KeyCode
if key == wx.WXK_RETURN:
# The enter key should be propagated to the dialog and thus activate the default button,
# but this is broken (wx ticket #3725).
# Therefore, we must catch the enter key here.
# Activate the current default button.
evt = wx.CommandEvent(wx.wxEVT_COMMAND_BUTTON_CLICKED, wx.ID_ANY)
button = self.FindWindowById(self.AffirmativeId)
if button.Enabled:
button.ProcessEvent(evt)
else:
wx.Bell()
elif key == wx.WXK_F2:
item=self.tree.GetSelection()
if item:
selectedItemType=self.tree.GetItemData(item).item
self.tree.EditLabel(item)
evt.Skip()
elif key >= wx.WXK_START or key == wx.WXK_BACK:
# Non-printable character.
self._searchText = ""
evt.Skip()
else:
# Search the list.
# We have to implement this ourselves, as tree views don't accept space as a search character.
char = chr(evt.UnicodeKey).lower()
# IF the same character is typed twice, do the same search.
if self._searchText != char:
self._searchText += char
if self._searchCallLater:
self._searchCallLater.Restart()
else:
self._searchCallLater = wx.CallLater(1000, self._clearSearchText)
self.search(self._searchText)
def onTreeLabelEditBegin(self,evt):
item=self.tree.GetSelection()
selectedItemType = self.tree.GetItemData(item).item
if not selectedItemType.isRenameAllowed:
evt.Veto()
def onTreeLabelEditEnd(self,evt):
selectedItemNewName=evt.GetLabel()
item=self.tree.GetSelection()
selectedItemType = self.tree.GetItemData(item).item
selectedItemType.rename(selectedItemNewName)
def _clearSearchText(self):
self._searchText = ""
def search(self, searchText):
item = self.tree.GetSelection()
if not item:
# No items.
return
# First try searching from the current item.
# Failing that, search from the first item.
items = itertools.chain(self._iterReachableTreeItemsFromItem(item), self._iterReachableTreeItemsFromItem(self.tree.GetFirstChild(self.treeRoot)[0]))
if len(searchText) == 1:
# If only a single character has been entered, skip (search after) the current item.
next(items)
for item in items:
if self.tree.GetItemText(item).lower().startswith(searchText):
self.tree.SelectItem(item)
return
# Not found.
wx.Bell()
def _iterReachableTreeItemsFromItem(self, item):
while item:
yield item
childItem = self.tree.GetFirstChild(item)[0]
if childItem and self.tree.IsExpanded(item):
# Has children and is reachable, so recurse.
for childItem in self._iterReachableTreeItemsFromItem(childItem):
yield childItem
item = self.tree.GetNextSibling(item)
def onFilterEditTextChange(self, evt):
self.filter(self.filterEdit.GetValue())
evt.Skip()
def onAction(self, activate):
prevFocus = gui.mainFrame.prevFocus
self.Close()
# Save off the last selected element type on to the class so its used in initialization next time.
self.__class__.lastSelectedElementType=self.lastSelectedElementType
item = self.tree.GetSelection()
item = self.tree.GetItemData(item).item
if activate:
item.activate()
else:
def move():
speech.cancelSpeech()
# Avoid double announce if item.obj is about to gain focus.
if not (
self.document.passThrough
and getattr(item, "obj", False)
and item.obj != prevFocus
and controlTypes.State.FOCUSABLE in item.obj.states
):
# #8831: Report before moving because moving might change the focus, which
# might mutate the document, potentially invalidating info if it is
# offset-based.
item.report()
item.moveTo()
# We must use core.callLater rather than wx.CallLater to ensure that the callback runs within NVDA's core pump.
# If it didn't, and it directly or indirectly called wx.Yield, it could start executing NVDA's core pump from within the yield, causing recursion.
core.callLater(100, move)
class BrowseModeDocumentTextInfo(textInfos.TextInfo):
def _get_focusableNVDAObjectAtStart(self):
try:
item = next(self.obj._iterNodesByType("focusable", "up", self))
except StopIteration:
return self.obj.rootNVDAObject
if not item:
return self.obj.rootNVDAObject
return item.obj
class BrowseModeDocumentTreeInterceptor(documentBase.DocumentWithTableNavigation,cursorManager.CursorManager,BrowseModeTreeInterceptor,treeInterceptorHandler.DocumentTreeInterceptor):
programmaticScrollMayFireEvent = False
def __init__(self,obj):
super(BrowseModeDocumentTreeInterceptor,self).__init__(obj)
self._lastProgrammaticScrollTime = None
self.documentConstantIdentifier = self.documentConstantIdentifier
self._lastFocusObj = None
self._objPendingFocusBeforeActivate = None
self._hadFirstGainFocus = False
self._enteringFromOutside = True
# We need to cache this because it will be unavailable once the document dies.
if not hasattr(self.rootNVDAObject.appModule, "_browseModeRememberedCaretPositions"):
self.rootNVDAObject.appModule._browseModeRememberedCaretPositions = {}
self._lastCaretPosition = None
#: True if the last caret move was due to a focus change.
self._lastCaretMoveWasFocus = False
def terminate(self):
if self.shouldRememberCaretPositionAcrossLoads and self._lastCaretPosition:
try:
self.rootNVDAObject.appModule._browseModeRememberedCaretPositions[self.documentConstantIdentifier] = self._lastCaretPosition
except AttributeError:
# The app module died.
pass
def _get_currentNVDAObject(self):
return self.makeTextInfo(textInfos.POSITION_CARET).NVDAObjectAtStart
def event_treeInterceptor_gainFocus(self):
doSayAll=False
hadFirstGainFocus=self._hadFirstGainFocus
if not hadFirstGainFocus:
# This treeInterceptor is gaining focus for the first time.
# Fake a focus event on the focus object, as the treeInterceptor may have missed the actual focus event.
focus = api.getFocusObject()
self.event_gainFocus(focus, lambda: focus.event_gainFocus())
if not self.passThrough:
# We only set the caret position if in browse mode.
# If in focus mode, the document must have forced the focus somewhere,
# so we don't want to override it.
initialPos = self._getInitialCaretPos()
if initialPos:
self.selection = self.makeTextInfo(initialPos)
reportPassThrough(self)
doSayAll=config.conf['virtualBuffers']['autoSayAllOnPageLoad']
self._hadFirstGainFocus = True
if not self.passThrough:
if doSayAll:
speech.speakObjectProperties(self.rootNVDAObject, name=True, states=True, reason=OutputReason.FOCUS)
sayAll.SayAllHandler.readText(sayAll.CURSOR.CARET)
else:
# Speak it like we would speak focus on any other document object.
# This includes when entering the treeInterceptor for the first time:
if not hadFirstGainFocus:
speech.speakObject(self.rootNVDAObject, reason=OutputReason.FOCUS)
else:
# And when coming in from an outside object
# #4069 But not when coming up from a non-rendered descendant.
ancestors=api.getFocusAncestors()
fdl=api.getFocusDifferenceLevel()
try:
tl=ancestors.index(self.rootNVDAObject)
except ValueError:
tl=len(ancestors)
if fdl<=tl:
speech.speakObject(self.rootNVDAObject, reason=OutputReason.FOCUS)
info = self.selection
if not info.isCollapsed:
speech.speakPreselectedText(info.text)
else:
info.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(info, reason=OutputReason.CARET, unit=textInfos.UNIT_LINE)
reportPassThrough(self)
braille.handler.handleGainFocus(self)
def event_caret(self, obj, nextHandler):
if self.passThrough:
nextHandler()
def _activateLongDesc(self,controlField):
"""
Activates (presents) the long description for a particular field (usually a graphic).
@param controlField: the field who's long description should be activated. This field is guaranteed to have states containing HASLONGDESC state.
@type controlField: dict
"""
raise NotImplementedError
def _activatePosition(self, obj=None, info=None):
if info:
obj=info.NVDAObjectAtStart
if not obj:
return
super(BrowseModeDocumentTreeInterceptor,self)._activatePosition(obj=obj)
def _set_selection(self, info, reason=OutputReason.CARET):
super(BrowseModeDocumentTreeInterceptor, self)._set_selection(info)
if isScriptWaiting() or not info.isCollapsed:
return
# Save the last caret position for use in terminate().
# This must be done here because the buffer might be cleared just before terminate() is called,
# causing the last caret position to be lost.
caret = info.copy()
caret.collapse()
self._lastCaretPosition = caret.bookmark
review.handleCaretMove(caret)
if reason == OutputReason.FOCUS:
self._lastCaretMoveWasFocus = True
focusObj = api.getFocusObject()
if focusObj==self.rootNVDAObject:
return
else:
self._lastCaretMoveWasFocus = False
focusObj=info.focusableNVDAObjectAtStart
obj=info.NVDAObjectAtStart
if not obj:
log.debugWarning("Invalid NVDAObjectAtStart")
return
if obj==self.rootNVDAObject:
return
obj.scrollIntoView()
if self.programmaticScrollMayFireEvent:
self._lastProgrammaticScrollTime = time.time()
if focusObj:
self.passThrough = self.shouldPassThrough(focusObj, reason=reason)
if (
not eventHandler.isPendingEvents("gainFocus")
and focusObj != self.rootNVDAObject
and focusObj != api.getFocusObject()
and self._shouldSetFocusToObj(focusObj)
):
followBrowseModeFocus = config.conf["virtualBuffers"]["autoFocusFocusableElements"]
if followBrowseModeFocus or self.passThrough:
focusObj.setFocus()
# Queue the reporting of pass through mode so that it will be spoken after the actual content.
queueHandler.queueFunction(queueHandler.eventQueue, reportPassThrough, self)
def _shouldSetFocusToObj(self, obj):
"""Determine whether an object should receive focus.
Subclasses may extend or override this method.
@param obj: The object in question.
@type obj: L{NVDAObjects.NVDAObject}
"""
return obj.role not in self.APPLICATION_ROLES and obj.isFocusable and obj.role!=controlTypes.Role.EMBEDDEDOBJECT
def script_activateLongDesc(self,gesture):
info=self.makeTextInfo(textInfos.POSITION_CARET)
info.expand("character")
for field in reversed(info.getTextWithFields()):
if isinstance(field,textInfos.FieldCommand) and field.command=="controlStart":
states=field.field.get('states')
if states and controlTypes.State.HASLONGDESC in states:
self._activateLongDesc(field.field)
break
else:
# Translators: the message presented when the activateLongDescription script cannot locate a long description to activate.
ui.message(_("No long description"))
# Translators: the description for the activateLongDescription script on browseMode documents.
script_activateLongDesc.__doc__=_("Shows the long description at this position if one is found.")
def event_caretMovementFailed(self, obj, nextHandler, gesture=None):
if not self.passThrough or not gesture or not config.conf["virtualBuffers"]["autoPassThroughOnCaretMove"]:
return nextHandler()
if gesture.mainKeyName in ("home", "end"):
# Home, end, control+home and control+end should not disable pass through.
return nextHandler()
script = self.getScript(gesture)
if not script:
return nextHandler()
# We've hit the edge of the focused control.
# Therefore, move the virtual caret to the same edge of the field.
info = self.makeTextInfo(textInfos.POSITION_CARET)
info.expand(textInfos.UNIT_CONTROLFIELD)
if gesture.mainKeyName in ("leftArrow", "upArrow", "pageUp"):
info.collapse()
else:
info.collapse(end=True)
info.move(textInfos.UNIT_CHARACTER, -1)
info.updateCaret()
scriptHandler.queueScript(script, gesture)
currentExpandedControl=None #: an NVDAObject representing the control that has just been expanded with the collapseOrExpandControl script.
def script_collapseOrExpandControl(self, gesture):
if not config.conf["virtualBuffers"]["autoFocusFocusableElements"]:
self._focusLastFocusableObject()
oldFocus = api.getFocusObject()
oldFocusStates = oldFocus.states
gesture.send()
if controlTypes.State.COLLAPSED in oldFocusStates:
self.passThrough = True
# When a control (such as a combo box) is expanded, we expect that its descendants will be classed as being outside the browseMode document.
# We save off the expanded control so that the next focus event within the browseMode document can see if it is for the control,
# and if so, it disables passthrough, as the control has obviously been collapsed again.
self.currentExpandedControl=oldFocus
elif not self.disableAutoPassThrough:
self.passThrough = False
reportPassThrough(self)
def _tabOverride(self, direction):
"""Override the tab order if the virtual caret is not within the currently focused node.
This is done because many nodes are not focusable and it is thus possible for the virtual caret to be unsynchronised with the focus.
In this case, we want tab/shift+tab to move to the next/previous focusable node relative to the virtual caret.
If the virtual caret is within the focused node, the tab/shift+tab key should be passed through to allow normal tab order navigation.
Note that this method does not pass the key through itself if it is not overridden. This should be done by the calling script if C{False} is returned.
@param direction: The direction in which to move.
@type direction: str
@return: C{True} if the tab order was overridden, C{False} if not.
@rtype: bool
"""
if self._lastCaretMoveWasFocus:
# #5227: If the caret was last moved due to a focus change, don't override tab.
# This ensures that tabbing behaves as expected after tabbing hits an iframe document.
return False
focus = api.getFocusObject()
try:
focusInfo = self.makeTextInfo(focus)
except:
return False
# We only want to override the tab order if the caret is not within the focused node.
caretInfo=self.makeTextInfo(textInfos.POSITION_CARET)
#Only check that the caret is within the focus for things that ar not documents
#As for documents we should always override
if focus.role!=controlTypes.Role.DOCUMENT or controlTypes.State.EDITABLE in focus.states:
# Expand to one character, as isOverlapping() doesn't yield the desired results with collapsed ranges.
caretInfo.expand(textInfos.UNIT_CHARACTER)
if focusInfo.isOverlapping(caretInfo):
return False
# If we reach here, we do want to override tab/shift+tab if possible.
# Find the next/previous focusable node.
try:
item = next(self._iterNodesByType("focusable", direction, caretInfo))
except StopIteration:
return False
obj=item.obj
newInfo=item.textInfo
if obj == api.getFocusObject():
# This node is already focused, so we need to move to and speak this node here.
newCaret = newInfo.copy()
newCaret.collapse()
self._set_selection(newCaret, reason=OutputReason.FOCUS)
if self.passThrough:
obj.event_gainFocus()
else:
speech.speakTextInfo(newInfo, reason=OutputReason.FOCUS)
else:
# This node doesn't have the focus, so just set focus to it. The gainFocus event will handle the rest.
obj.setFocus()
return True
def script_tab(self, gesture):
if not self._tabOverride("next"):
gesture.send()
def script_shiftTab(self, gesture):
if not self._tabOverride("previous"):
gesture.send()
def event_focusEntered(self,obj,nextHandler):
if obj==self.rootNVDAObject:
self._enteringFromOutside = True
# Even if passThrough is enabled, we still completely drop focusEntered events here.
# In order to get them back when passThrough is enabled, we replay them with the _replayFocusEnteredEvents method in event_gainFocus.
# The reason for this is to ensure that focusEntered events are delayed until a focus event has had a chance to disable passthrough mode.
# As in this case we would not want them.
def _shouldIgnoreFocus(self, obj):
"""Determines whether focus on a given object should be ignored.
@param obj: The object in question.
@type obj: L{NVDAObjects.NVDAObject}
@return: C{True} if focus on L{obj} should be ignored, C{False} otherwise.
@rtype: bool
"""
return False
def _postGainFocus(self, obj):
"""Executed after a gainFocus within the browseMode document.
This will not be executed if L{event_gainFocus} determined that it should abort and call nextHandler.
@param obj: The object that gained focus.
@type obj: L{NVDAObjects.NVDAObject}
"""
def _replayFocusEnteredEvents(self):
# We blocked the focusEntered events because we were in browse mode,
# but now that we've switched to focus mode, we need to fire them.
for parent in api.getFocusAncestors()[api.getFocusDifferenceLevel():]:
try:
parent.event_focusEntered()
except:
log.exception("Error executing focusEntered event: %s" % parent)
def event_gainFocus(self, obj, nextHandler):
enteringFromOutside=self._enteringFromOutside
self._enteringFromOutside=False
if not self.isReady:
if self.passThrough:
self._replayFocusEnteredEvents()
nextHandler()
return
# If a control has been expanded by the collapseOrExpandControl script, and this focus event is for it,
# disable passThrough and report the control, as the control has obviously been collapsed again.
# Note that whether or not this focus event was for that control, the last expanded control is forgotten, so that only the next focus event for the browseMode document can handle the collapsed control.
lastExpandedControl=self.currentExpandedControl
self.currentExpandedControl=None
if self.passThrough and obj==lastExpandedControl:
self.passThrough=False
reportPassThrough(self)
nextHandler()
return
if enteringFromOutside and not self.passThrough and self._lastFocusObj==obj:
# We're entering the document from outside (not returning from an inside object/application; #3145)
# and this was the last non-root node with focus, so ignore this focus event.
# Otherwise, if the user switches away and back to this document, the cursor will jump to this node.
# This is not ideal if the user was positioned over a node which cannot receive focus.
return
if obj==self.rootNVDAObject:
if self.passThrough:
self._replayFocusEnteredEvents()
return nextHandler()
return
if not self.passThrough and self._shouldIgnoreFocus(obj):
return
# If the previous focus object was removed, we might hit a false positive for overlap detection.
# Track the previous focus target so that we can account for this scenario.
previousFocusObjIsDefunct = False
if self._lastFocusObj:
try:
states = self._lastFocusObj.states
previousFocusObjIsDefunct = controlTypes.State.DEFUNCT in states
except Exception:
log.debugWarning(
"Error fetching states when checking for defunct object. Treating object as defunct anyway.",
exc_info=True
)
previousFocusObjIsDefunct = True
self._lastFocusObj=obj
try:
focusInfo = self.makeTextInfo(obj)
except:
# This object is not in the treeInterceptor, even though it resides beneath the document.
# Automatic pass through should be enabled in certain circumstances where this occurs.
if not self.passThrough and self.shouldPassThrough(obj, reason=OutputReason.FOCUS):
self.passThrough=True
reportPassThrough(self)
self._replayFocusEnteredEvents()
return nextHandler()
#We only want to update the caret and speak the field if we're not in the same one as before
caretInfo=self.makeTextInfo(textInfos.POSITION_CARET)
# Expand to one character, as isOverlapping() doesn't treat, for example, (4,4) and (4,5) as overlapping.
caretInfo.expand(textInfos.UNIT_CHARACTER)
isOverlapping = focusInfo.isOverlapping(caretInfo)
if not self._hadFirstGainFocus or not isOverlapping or (isOverlapping and previousFocusObjIsDefunct):
# The virtual caret is not within the focus node.
oldPassThrough=self.passThrough
passThrough = self.shouldPassThrough(obj, reason=OutputReason.FOCUS)
if not oldPassThrough and (passThrough or sayAll.SayAllHandler.isRunning()):
# If pass-through is disabled, cancel speech, as a focus change should cause page reading to stop.
# This must be done before auto-pass-through occurs, as we want to stop page reading even if pass-through will be automatically enabled by this focus change.
speech.cancelSpeech()
self.passThrough=passThrough
if not self.passThrough:
# We read the info from the browseMode document instead of the control itself.
speech.speakTextInfo(focusInfo, reason=OutputReason.FOCUS)
# However, we still want to update the speech property cache so that property changes will be spoken properly.
speech.speakObject(obj, controlTypes.OutputReason.ONLYCACHE)
# As we do not call nextHandler which would trigger the vision framework to handle gain focus,
# we need to call it manually here.
vision.handler.handleGainFocus(obj)
else:
# Although we are going to speak the object rather than textInfo content, we still need to silently speak the textInfo content so that the textInfo speech cache is updated correctly.
# Not doing this would cause later browseMode speaking to either not speak controlFields it had entered, or speak controlField exits after having already exited.
# See #7435 for a discussion on this.
speech.speakTextInfo(focusInfo, reason=OutputReason.ONLYCACHE)
self._replayFocusEnteredEvents()
nextHandler()
focusInfo.collapse()
self._set_selection(focusInfo, reason=OutputReason.FOCUS)
else:
# The virtual caret was already at the focused node.
if not self.passThrough:
# This focus change was caused by a virtual caret movement, so don't speak the focused node to avoid double speaking.
# However, we still want to update the speech property cache so that property changes will be spoken properly.
speech.speakObject(obj, OutputReason.ONLYCACHE)
if config.conf["virtualBuffers"]["autoFocusFocusableElements"]:
# As we do not call nextHandler which would trigger the vision framework to handle gain focus,
# we need to call it manually here.
# Note: this is usually called after the caret movement.
vision.handler.handleGainFocus(obj)
elif (
self._objPendingFocusBeforeActivate
and obj == self._objPendingFocusBeforeActivate
and obj is not self._objPendingFocusBeforeActivate
):
# With auto focus focusable elements disabled, when the user activates
# an element (e.g. by pressing enter) or presses a key which we pass
# through (e.g. control+enter), we call _focusLastFocusableObject.
# However, the activation/key press might cause a property change
# before we get the focus event, so NVDA's normal reporting of
# changes to the focus won't pick it up.
# The speech property cache on _objPendingFocusBeforeActivate reflects
# the properties before the activation/key, so use that to speak any
# changes.
speech.speakObject(
self._objPendingFocusBeforeActivate,
OutputReason.CHANGE
)
self._objPendingFocusBeforeActivate = None
else:
self._replayFocusEnteredEvents()
return nextHandler()
self._postGainFocus(obj)
event_gainFocus.ignoreIsReady=True
def _handleScrollTo(
self,
obj: Union[NVDAObject, textInfos.TextInfo],
) -> bool:
"""Handle scrolling the browseMode document to a given object in response to an event.
Subclasses should call this from an event which indicates that the document has scrolled.
@postcondition: The virtual caret is moved to L{obj} and the buffer content for L{obj} is reported.
@param obj: The object to which the document should scroll.
@return: C{True} if the document was scrolled, C{False} if not.
@note: If C{False} is returned, calling events should probably call their nextHandler.
"""
if self.programmaticScrollMayFireEvent and self._lastProgrammaticScrollTime and time.time() - self._lastProgrammaticScrollTime < 0.4:
# This event was probably caused by this browseMode document's call to scrollIntoView().
# Therefore, ignore it. Otherwise, the cursor may bounce back to the scroll point.
# However, pretend we handled it, as we don't want it to be passed on to the object either.
return True
if isinstance(obj, NVDAObject):
try:
scrollInfo = self.makeTextInfo(obj)
except (NotImplementedError, RuntimeError):
return False
elif isinstance(obj, textInfos.TextInfo):
scrollInfo = obj.copy()
else:
raise ValueError(f"{obj} is not a supported type")
#We only want to update the caret and speak the field if we're not in the same one as before
caretInfo=self.makeTextInfo(textInfos.POSITION_CARET)
# Expand to one character, as isOverlapping() doesn't treat, for example, (4,4) and (4,5) as overlapping.
caretInfo.expand(textInfos.UNIT_CHARACTER)
if not scrollInfo.isOverlapping(caretInfo):
if scrollInfo.isCollapsed:
scrollInfo.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(scrollInfo, reason=OutputReason.CARET)
scrollInfo.collapse()
self.selection = scrollInfo
return True
return False
def _isNVDAObjectInApplication_noWalk(self, obj):
"""Determine whether a given object is within an application without walking ancestors.
The base implementation simply checks whether the object has an application role.
Subclasses can override this if they can provide a definite answer without needing to walk.
For example, for virtual buffers, if the object is in the buffer,
it definitely isn't in an application.
L{_isNVDAObjectInApplication} calls this and walks to the next ancestor if C{None} is returned.
@return: C{True} if definitely in an application,
C{False} if definitely not in an application,
C{None} if this can't be determined without walking ancestors.
"""
if (
# roles such as application and dialog should be treated as being within a "application" and therefore outside of the browseMode document.
obj.role in self.APPLICATION_ROLES
# Anything other than an editable text box inside a combo box should be
# treated as being outside a browseMode document.
or (
obj.role != controlTypes.Role.EDITABLETEXT and obj.container
and obj.container.role == controlTypes.Role.COMBOBOX
)
):
return True
return None
def _isNVDAObjectInApplication(self, obj):
"""Determine whether a given object is within an application.
The object is considered to be within an application if it or one of its ancestors has an application role.
This should only be called on objects beneath the treeInterceptor's root NVDAObject.
@param obj: The object in question.
@type obj: L{NVDAObjects.NVDAObject}
@return: C{True} if L{obj} is within an application, C{False} otherwise.
@rtype: bool
"""
# We cache the result for each object we walk.
# There can be browse mode documents within other documents and the result might be different between these,
# so the cache must be maintained on the TreeInterceptor rather than the object itself.
try:
cache = self._isInAppCache
except AttributeError:
# Create this lazily, as this method isn't used by all browse mode implementations.
cache = self._isInAppCache = weakref.WeakKeyDictionary()
objs = []
def doResult(result):
# Cache this on descendants we've walked over.
for obj in objs:
cache[obj] = result
return result
while obj and obj != self.rootNVDAObject:
inApp = cache.get(obj)
if inApp is not None:
# We found a cached result.
return doResult(inApp)
objs.append(obj)
inApp = self._isNVDAObjectInApplication_noWalk(obj)
if inApp is not None:
return doResult(inApp)
# We must walk ancestors.
# Cache container.
container = obj.container
obj.container = container
obj = container
return doResult(False)
def _get_documentConstantIdentifier(self):
"""Get the constant identifier for this document.
This identifier should uniquely identify all instances (not just one instance) of a document for at least the current session of the hosting application.
Generally, the document URL should be used.
@return: The constant identifier for this document, C{None} if there is none.
"""
return None
def _get_shouldRememberCaretPositionAcrossLoads(self):
"""Specifies whether the position of the caret should be remembered when this document is loaded again.
This is useful when the browser remembers the scroll position for the document,
but does not communicate this information via APIs.
The remembered caret position is associated with this document using L{documentConstantIdentifier}.
@return: C{True} if the caret position should be remembered, C{False} if not.
@rtype: bool
"""
docConstId = self.documentConstantIdentifier
# Return True if the URL indicates that this is probably a web browser document.
# We do this check because we don't want to remember caret positions for email messages, etc.
if isinstance(docConstId, str):
protocols=("http", "https", "ftp", "ftps", "file")
protocol=docConstId.split("://", 1)[0]
return protocol in protocols
return False
def _getInitialCaretPos(self):
"""Retrieve the initial position of the caret after the buffer has been loaded.
This position, if any, will be passed to L{makeTextInfo}.
Subclasses should extend this method.
@return: The initial position of the caret, C{None} if there isn't one.
@rtype: TextInfo position
"""
if self.shouldRememberCaretPositionAcrossLoads:
try:
return self.rootNVDAObject.appModule._browseModeRememberedCaretPositions[self.documentConstantIdentifier]
except KeyError:
pass
return None
def getEnclosingContainerRange(self, textRange):
textRange = textRange.copy()
textRange.collapse()
try:
item = next(self._iterNodesByType("container", "up", textRange))
except (NotImplementedError,StopIteration):
try:
item = next(self._iterNodesByType("landmark", "up", textRange))
except (NotImplementedError,StopIteration):
return
return item.textInfo
def script_moveToStartOfContainer(self,gesture):
info=self.makeTextInfo(textInfos.POSITION_CARET)
info.expand(textInfos.UNIT_CHARACTER)
container=self.getEnclosingContainerRange(info)
if not container:
# Translators: Reported when the user attempts to move to the start or end of a container
# (list, table, etc.) but there is no container.
ui.message(_("Not in a container"))
return
container.collapse()
self._set_selection(container, reason=OutputReason.QUICKNAV)
if not willSayAllResume(gesture):
container.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(container, reason=OutputReason.FOCUS)
script_moveToStartOfContainer.resumeSayAllMode = sayAll.CURSOR.CARET
# Translators: Description for the Move to start of container command in browse mode.
script_moveToStartOfContainer.__doc__=_("Moves to the start of the container element, such as a list or table")
def script_movePastEndOfContainer(self,gesture):
info=self.makeTextInfo(textInfos.POSITION_CARET)
info.expand(textInfos.UNIT_CHARACTER)
container=self.getEnclosingContainerRange(info)
if not container:
# Translators: Reported when the user attempts to move to the start or end of a container
# (list, table, etc.) but there is no container.
ui.message(_("Not in a container"))
return
container.collapse(end=True)
docEnd=container.obj.makeTextInfo(textInfos.POSITION_LAST)
if container.compareEndPoints(docEnd,"endToEnd")>=0:
container=docEnd
# Translators: a message reported when:
# Review cursor is at the bottom line of the current navigator object.
# Landing at the end of a browse mode document when trying to jump to the end of the current container.
ui.message(_("Bottom"))
self._set_selection(container, reason=OutputReason.QUICKNAV)
if not willSayAllResume(gesture):
container.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(container, reason=OutputReason.FOCUS)
script_movePastEndOfContainer.resumeSayAllMode = sayAll.CURSOR.CARET
# Translators: Description for the Move past end of container command in browse mode.
script_movePastEndOfContainer.__doc__=_("Moves past the end of the container element, such as a list or table")
NOT_LINK_BLOCK_MIN_LEN = 30
def _isSuitableNotLinkBlock(self, textRange):
return len(textRange.text) >= self.NOT_LINK_BLOCK_MIN_LEN
def _iterNotLinkBlock(self, direction="next", pos=None):
links = self._iterNodesByType("link", direction=direction, pos=pos)
# We want to compare each link against the next link.
item1 = next(links, None)
if item1 is None:
return
for item2 in links:
# If the distance between the links is small, this is probably just a piece of non-link text within a block of links; e.g. an inactive link of a nav bar.
if direction=="previous":
textRange=item1.textInfo.copy()
textRange.collapse()
textRange.setEndPoint(item2.textInfo,"startToEnd")
else:
textRange=item2.textInfo.copy()
textRange.collapse()
textRange.setEndPoint(item1.textInfo,"startToEnd")
if self._isSuitableNotLinkBlock(textRange):
yield TextInfoQuickNavItem("notLinkBlock", self, textRange)
item1=item2
__gestures={
"kb:NVDA+d": "activateLongDesc",
"kb:alt+upArrow": "collapseOrExpandControl",
"kb:alt+downArrow": "collapseOrExpandControl",
"kb:tab": "tab",
"kb:shift+tab": "shiftTab",
"kb:shift+,": "moveToStartOfContainer",
"kb:,": "movePastEndOfContainer",
}
@script(
description=_(
# Translators: the description for the toggleScreenLayout script.
"Toggles on and off if the screen layout is preserved while rendering the document content"
),
gesture="kb:NVDA+v",
)
def script_toggleScreenLayout(self, gesture):
# Translators: The message reported for not supported toggling of screen layout
ui.message(_("Not supported in this document."))
| [((43000, 43053), 'collections.namedtuple', 'collections.namedtuple', (['"""Element"""', "('item', 'parent')"], {}), "('Element', ('item', 'parent'))\n", (43022, 43053), False, 'import collections\n'), ((7130, 7186), 'speech.speakTextInfo', 'speech.speakTextInfo', (['info'], {'reason': 'OutputReason.QUICKNAV'}), '(info, reason=OutputReason.QUICKNAV)\n', (7150, 7186), False, 'import speech\n'), ((15523, 15555), 'winsound.PlaySound', 'winsound.PlaySound', (['"""default"""', '(1)'], {}), "('default', 1)\n", (15541, 15555), False, 'import winsound\n'), ((20842, 20859), 'wx.CallAfter', 'wx.CallAfter', (['run'], {}), '(run)\n', (20854, 20859), False, 'import wx\n'), ((24315, 24335), 'api.getFocusObject', 'api.getFocusObject', ([], {}), '()\n', (24333, 24335), False, 'import api\n'), ((43312, 43336), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (43323, 43336), False, 'import wx\n'), ((43356, 43380), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (43367, 43380), False, 'import wx\n'), ((44707, 44772), 'gui.guiHelper.LabeledControlHelper', 'gui.guiHelper.LabeledControlHelper', (['self', 'filterText', 'wx.TextCtrl'], {}), '(self, filterText, wx.TextCtrl)\n', (44741, 44772), False, 'import gui\n'), ((45013, 45054), 'gui.guiHelper.ButtonHelper', 'gui.guiHelper.ButtonHelper', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (45039, 45054), False, 'import gui\n'), ((46252, 46365), 'queueHandler.queueFunction', 'queueHandler.queueFunction', (['queueHandler.eventQueue', 'self.initElementType', 'self.ELEMENT_TYPES[elementType][0]'], {}), '(queueHandler.eventQueue, self.initElementType,\n self.ELEMENT_TYPES[elementType][0])\n', (46278, 46365), False, 'import queueHandler\n'), ((52530, 52539), 'wx.Bell', 'wx.Bell', ([], {}), '()\n', (52537, 52539), False, 'import wx\n'), ((57899, 57936), 'braille.handler.handleGainFocus', 'braille.handler.handleGainFocus', (['self'], {}), '(self)\n', (57930, 57936), False, 'import braille\n'), ((59070, 59099), 'review.handleCaretMove', 'review.handleCaretMove', (['caret'], {}), '(caret)\n', (59092, 59099), False, 'import review\n'), ((62245, 62287), 'scriptHandler.queueScript', 'scriptHandler.queueScript', (['script', 'gesture'], {}), '(script, gesture)\n', (62270, 62287), False, 'import scriptHandler\n'), ((62606, 62626), 'api.getFocusObject', 'api.getFocusObject', ([], {}), '()\n', (62624, 62626), False, 'import api\n'), ((64353, 64373), 'api.getFocusObject', 'api.getFocusObject', ([], {}), '()\n', (64371, 64373), False, 'import api\n'), ((9080, 9103), 'controlTypes.Role', 'controlTypes.Role', (['role'], {}), '(role)\n', (9097, 9103), False, 'import controlTypes\n'), ((20712, 20736), 'gui.mainFrame.prePopup', 'gui.mainFrame.prePopup', ([], {}), '()\n', (20734, 20736), False, 'import gui\n'), ((20813, 20838), 'gui.mainFrame.postPopup', 'gui.mainFrame.postPopup', ([], {}), '()\n', (20836, 20838), False, 'import gui\n'), ((23483, 23530), 'speech.speakObject', 'speech.speakObject', (['obj', 'OutputReason.ONLYCACHE'], {}), '(obj, OutputReason.ONLYCACHE)\n', (23501, 23530), False, 'import speech\n'), ((50446, 50505), 'wx.CommandEvent', 'wx.CommandEvent', (['wx.wxEVT_COMMAND_BUTTON_CLICKED', 'wx.ID_ANY'], {}), '(wx.wxEVT_COMMAND_BUTTON_CLICKED, wx.ID_ANY)\n', (50461, 50505), False, 'import wx\n'), ((54172, 54197), 'core.callLater', 'core.callLater', (['(100)', 'move'], {}), '(100, move)\n', (54186, 54197), False, 'import core\n'), ((56211, 56231), 'api.getFocusObject', 'api.getFocusObject', ([], {}), '()\n', (56229, 56231), False, 'import api\n'), ((58720, 58737), 'scriptHandler.isScriptWaiting', 'isScriptWaiting', ([], {}), '()\n', (58735, 58737), False, 'from scriptHandler import script, isScriptWaiting, willSayAllResume\n'), ((59190, 59210), 'api.getFocusObject', 'api.getFocusObject', ([], {}), '()\n', (59208, 59210), False, 'import api\n'), ((60182, 60258), 'queueHandler.queueFunction', 'queueHandler.queueFunction', (['queueHandler.eventQueue', 'reportPassThrough', 'self'], {}), '(queueHandler.eventQueue, reportPassThrough, self)\n', (60208, 60258), False, 'import queueHandler\n'), ((65330, 65350), 'api.getFocusObject', 'api.getFocusObject', ([], {}), '()\n', (65348, 65350), False, 'import api\n'), ((67348, 67371), 'api.getFocusAncestors', 'api.getFocusAncestors', ([], {}), '()\n', (67369, 67371), False, 'import api\n'), ((75868, 75927), 'speech.speakTextInfo', 'speech.speakTextInfo', (['scrollInfo'], {'reason': 'OutputReason.CARET'}), '(scrollInfo, reason=OutputReason.CARET)\n', (75888, 75927), False, 'import speech\n'), ((81579, 81604), 'scriptHandler.willSayAllResume', 'willSayAllResume', (['gesture'], {}), '(gesture)\n', (81595, 81604), False, 'from scriptHandler import script, isScriptWaiting, willSayAllResume\n'), ((81652, 81710), 'speech.speakTextInfo', 'speech.speakTextInfo', (['container'], {'reason': 'OutputReason.FOCUS'}), '(container, reason=OutputReason.FOCUS)\n', (81672, 81710), False, 'import speech\n'), ((82903, 82928), 'scriptHandler.willSayAllResume', 'willSayAllResume', (['gesture'], {}), '(gesture)\n', (82919, 82928), False, 'from scriptHandler import script, isScriptWaiting, willSayAllResume\n'), ((82976, 83034), 'speech.speakTextInfo', 'speech.speakTextInfo', (['container'], {'reason': 'OutputReason.FOCUS'}), '(container, reason=OutputReason.FOCUS)\n', (82996, 83034), False, 'import speech\n'), ((1824, 1871), 'os.path.join', 'os.path.join', (['globalVars.appDir', '"""waves"""', 'sound'], {}), "(globalVars.appDir, 'waves', sound)\n", (1836, 1871), False, 'import os\n'), ((9349, 9421), 'controlTypes.processAndLabelStates', 'controlTypes.processAndLabelStates', (['role', 'realStates', 'OutputReason.FOCUS'], {}), '(role, realStates, OutputReason.FOCUS)\n', (9383, 9421), False, 'import controlTypes\n'), ((18179, 18203), 'ui.message', 'ui.message', (['errorMessage'], {}), '(errorMessage)\n', (18189, 18203), False, 'import ui\n'), ((18408, 18433), 'scriptHandler.willSayAllResume', 'willSayAllResume', (['gesture'], {}), '(gesture)\n', (18424, 18433), False, 'from scriptHandler import script, isScriptWaiting, willSayAllResume\n'), ((21409, 21453), 'logHandler.log.debugWarning', 'log.debugWarning', (['"""doAction not implemented"""'], {}), "('doAction not implemented')\n", (21425, 21453), False, 'from logHandler import log\n'), ((21654, 21693), 'mathPres.interactWithMathMl', 'mathPres.interactWithMathMl', (['obj.mathMl'], {}), '(obj.mathMl)\n', (21681, 21693), False, 'import mathPres\n'), ((21987, 22037), 'speech.speakObject', 'speech.speakObject', (['obj'], {'reason': 'OutputReason.FOCUS'}), '(obj, reason=OutputReason.FOCUS)\n', (22005, 22037), False, 'import speech\n'), ((23137, 23157), 'api.getFocusObject', 'api.getFocusObject', ([], {}), '()\n', (23155, 23157), False, 'import api\n'), ((50627, 50636), 'wx.Bell', 'wx.Bell', ([], {}), '()\n', (50634, 50636), False, 'import wx\n'), ((53416, 53437), 'speech.cancelSpeech', 'speech.cancelSpeech', ([], {}), '()\n', (53435, 53437), False, 'import speech\n'), ((56801, 56905), 'speech.speakObjectProperties', 'speech.speakObjectProperties', (['self.rootNVDAObject'], {'name': '(True)', 'states': '(True)', 'reason': 'OutputReason.FOCUS'}), '(self.rootNVDAObject, name=True, states=True,\n reason=OutputReason.FOCUS)\n', (56829, 56905), False, 'import speech\n'), ((56907, 56957), 'speech.sayAll.SayAllHandler.readText', 'sayAll.SayAllHandler.readText', (['sayAll.CURSOR.CARET'], {}), '(sayAll.CURSOR.CARET)\n', (56936, 56957), False, 'from speech import sayAll\n'), ((59407, 59452), 'logHandler.log.debugWarning', 'log.debugWarning', (['"""Invalid NVDAObjectAtStart"""'], {}), "('Invalid NVDAObjectAtStart')\n", (59423, 59452), False, 'from logHandler import log\n'), ((59619, 59630), 'time.time', 'time.time', ([], {}), '()\n', (59628, 59630), False, 'import time\n'), ((65618, 65674), 'speech.speakTextInfo', 'speech.speakTextInfo', (['newInfo'], {'reason': 'OutputReason.FOCUS'}), '(newInfo, reason=OutputReason.FOCUS)\n', (65638, 65674), False, 'import speech\n'), ((67372, 67401), 'api.getFocusDifferenceLevel', 'api.getFocusDifferenceLevel', ([], {}), '()\n', (67399, 67401), False, 'import api\n'), ((71131, 71152), 'speech.cancelSpeech', 'speech.cancelSpeech', ([], {}), '()\n', (71150, 71152), False, 'import speech\n'), ((71305, 71363), 'speech.speakTextInfo', 'speech.speakTextInfo', (['focusInfo'], {'reason': 'OutputReason.FOCUS'}), '(focusInfo, reason=OutputReason.FOCUS)\n', (71325, 71363), False, 'import speech\n'), ((71485, 71545), 'speech.speakObject', 'speech.speakObject', (['obj', 'controlTypes.OutputReason.ONLYCACHE'], {}), '(obj, controlTypes.OutputReason.ONLYCACHE)\n', (71503, 71545), False, 'import speech\n'), ((71692, 71727), 'vision.handler.handleGainFocus', 'vision.handler.handleGainFocus', (['obj'], {}), '(obj)\n', (71722, 71727), False, 'import vision\n'), ((72142, 72204), 'speech.speakTextInfo', 'speech.speakTextInfo', (['focusInfo'], {'reason': 'OutputReason.ONLYCACHE'}), '(focusInfo, reason=OutputReason.ONLYCACHE)\n', (72162, 72204), False, 'import speech\n'), ((72688, 72735), 'speech.speakObject', 'speech.speakObject', (['obj', 'OutputReason.ONLYCACHE'], {}), '(obj, OutputReason.ONLYCACHE)\n', (72706, 72735), False, 'import speech\n'), ((78158, 78185), 'weakref.WeakKeyDictionary', 'weakref.WeakKeyDictionary', ([], {}), '()\n', (78183, 78185), False, 'import weakref\n'), ((57152, 57218), 'speech.speakObject', 'speech.speakObject', (['self.rootNVDAObject'], {'reason': 'OutputReason.FOCUS'}), '(self.rootNVDAObject, reason=OutputReason.FOCUS)\n', (57170, 57218), False, 'import speech\n'), ((57365, 57388), 'api.getFocusAncestors', 'api.getFocusAncestors', ([], {}), '()\n', (57386, 57388), False, 'import api\n'), ((57399, 57428), 'api.getFocusDifferenceLevel', 'api.getFocusDifferenceLevel', ([], {}), '()\n', (57426, 57428), False, 'import api\n'), ((57692, 57730), 'speech.speakPreselectedText', 'speech.speakPreselectedText', (['info.text'], {}), '(info.text)\n', (57719, 57730), False, 'import speech\n'), ((57787, 57866), 'speech.speakTextInfo', 'speech.speakTextInfo', (['info'], {'reason': 'OutputReason.CARET', 'unit': 'textInfos.UNIT_LINE'}), '(info, reason=OutputReason.CARET, unit=textInfos.UNIT_LINE)\n', (57807, 57866), False, 'import speech\n'), ((59736, 59777), 'eventHandler.isPendingEvents', 'eventHandler.isPendingEvents', (['"""gainFocus"""'], {}), "('gainFocus')\n", (59764, 59777), False, 'import eventHandler\n'), ((59840, 59860), 'api.getFocusObject', 'api.getFocusObject', ([], {}), '()\n', (59858, 59860), False, 'import api\n'), ((67464, 67528), 'logHandler.log.exception', 'log.exception', (["('Error executing focusEntered event: %s' % parent)"], {}), "('Error executing focusEntered event: %s' % parent)\n", (67477, 67528), False, 'from logHandler import log\n'), ((69483, 69618), 'logHandler.log.debugWarning', 'log.debugWarning', (['"""Error fetching states when checking for defunct object. Treating object as defunct anyway."""'], {'exc_info': '(True)'}), "(\n 'Error fetching states when checking for defunct object. Treating object as defunct anyway.'\n , exc_info=True)\n", (69499, 69618), False, 'from logHandler import log\n'), ((70824, 70856), 'speech.sayAll.SayAllHandler.isRunning', 'sayAll.SayAllHandler.isRunning', ([], {}), '()\n', (70854, 70856), False, 'from speech import sayAll\n'), ((73017, 73052), 'vision.handler.handleGainFocus', 'vision.handler.handleGainFocus', (['obj'], {}), '(obj)\n', (73047, 73052), False, 'import vision\n'), ((74796, 74807), 'time.time', 'time.time', ([], {}), '()\n', (74805, 74807), False, 'import time\n'), ((51344, 51385), 'wx.CallLater', 'wx.CallLater', (['(1000)', 'self._clearSearchText'], {}), '(1000, self._clearSearchText)\n', (51356, 51385), False, 'import wx\n'), ((57562, 57628), 'speech.speakObject', 'speech.speakObject', (['self.rootNVDAObject'], {'reason': 'OutputReason.FOCUS'}), '(self.rootNVDAObject, reason=OutputReason.FOCUS)\n', (57580, 57628), False, 'import speech\n'), ((73813, 73889), 'speech.speakObject', 'speech.speakObject', (['self._objPendingFocusBeforeActivate', 'OutputReason.CHANGE'], {}), '(self._objPendingFocusBeforeActivate, OutputReason.CHANGE)\n', (73831, 73889), False, 'import speech\n')] |
PatrickSJacobs/qiskit-metal | qiskit_metal/qlibrary/qubits/Transmon_Interdigitated.py | 9628369c4b880d1e13199e559f898c5e0b96eecb | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
#from math import *
from math import sin, cos
from qiskit_metal import draw, Dict
from qiskit_metal.qlibrary.core.base import QComponent
import numpy as np
#from ... import config
#if not config.is_building_docs():
# from qiskit_metal import is_true
class TransmonInterdigitated(QComponent):
"""
The base "TransmonInterdigitated" inherits the "QComponent" class.
This creates a transmon pocket with two large pads connected by a Josephson
junction. Both pads have four interdigitated "fingers" which increase the
capacitance of the structure. There are three coupling capacitor pads with qpins
defined; these can be connected to other structures in a design using CPWs.
Default Options:
* pad_width: '1000um' -- width of the large rectanglular pads on either side
of the junction
* pad_height: '300um' -- height of the large rectanglular pads on either side
of the junction
* finger_width: '50um' -- width of the "finger" on either side of the junction
* finger_height: '100um' -- height of the "finger" on the side of the junction
* finger_space: '50um' -- height of the Josephson Junction (equivalently; space
between two fingers)
* pad_pos_x: '0um' -- the internal coordinate defining the center of the bottom
rectangular pad
* pad_pos_y: '0um' -- the internal coordinate defining the center of the bottom
rectangular pad
* comb_width: '50um' -- the width of the four interdigitated combs connected to
either pad
* comb_space_vert: '50um' -- the space between the edge of a comb and the edge of
the opposite rectangular pad
* comb_space_hor: '50um' -- the space between adjacent interdigitated comb structures
* jj_width: '20um' -- the width of the Josephson Junction located between the two
fingers of the device
* cc_space: '50um' -- the space between the lower rectangular pad and the coupling
capacitor below it
* cc_width: '100um' -- the width of the coupling capacitor located below the bottom
rectangular pad
* cc_height: '100um' -- the height of the coupling capacitor located below the bottom
rectangular pad
* cc_topleft_space: '50um' -- the space between the upper rectangular pad and the top
left coupling capacitor
* cc_topleft_width: '100um' -- the width of the top left coupling capacitor pad
* cc_topleft_height: '100um' -- the height of the top left coupling capacitor pad
* cc_topright_space: '50um' -- the space between the upper rectangular pad and the
top right coupling capacitor
* cc_topright_width: '100um' -- the width of the top right coupling capacitor pad
* cc_topright_height: '100um' -- the height of the top right coupling capacitor pad
* position_x: '0um' -- the x-coordinate defining the center of the transmon pocket
on the chip
* position_y: '0um' -- the y-coordinate defining the center of the transmon pocket
on the chip
* rotation: '0.0' -- the angle at which the entire structure is rotated
* rotation_top_pad: '180' -- internal coordinate defining the angle of rotation
between top and bottom pads
* layer: '1' -- all objcets are drawn assuming they are part of the same layer on a
the chip
"""
# Default drawing options
default_options = Dict(pad_width='1000um',
pad_height='300um',
finger_width='50um',
finger_height='100um',
finger_space='50um',
pad_pos_x='0um',
pad_pos_y='0um',
comb_width='50um',
comb_space_vert='50um',
comb_space_hor='50um',
jj_width='20um',
cc_space='50um',
cc_width='100um',
cc_height='100um',
cc_topleft_space='50um',
cc_topleft_width='100um',
cc_topleft_height='100um',
cc_topright_space='50um',
cc_topright_width='100um',
cc_topright_height='100um',
position_x='0um',
position_y='0um',
rotation='0.0',
rotation_top_pad='180',
layer='1')
"""Default drawing options"""
# Name prefix of component, if user doesn't provide name
component_metadata = Dict(short_name='component')
"""Component metadata"""
def make(self):
"""Convert self.options into QGeometry."""
p = self.parse_options() # Parse the string options into numbers
# draw the lower pad as a rectangle
pad_lower = draw.rectangle(p.pad_width, p.pad_height, p.pad_pos_x,
p.pad_pos_y)
# draw the lower finger as a rectangle
finger_lower = draw.rectangle(
p.finger_width, p.finger_height, p.pad_pos_x, p.pad_pos_y +
0.49999 * (p.pad_height) + 0.49999 * (p.finger_height))
# draw the Josephson Junction
rect_jj = draw.rectangle(
p.jj_width, p.finger_space, p.pad_pos_x,
0.5 * (p.pad_height) + p.finger_height + 0.5 * (p.finger_space))
# draw the first comb to the right of the lower finger as a rectangle
comb1_lower = draw.rectangle(
p.comb_width,
(2 * p.finger_height + p.finger_space - p.comb_space_vert),
(0.5 * p.finger_width + p.comb_space_hor + 0.5 * p.comb_width),
(0.5 * p.pad_height + 0.5 *
(p.pad_pos_y + 0.5 * (p.pad_height) + 0.5 * (p.finger_height))))
# draw the second comb to the right of the lower finger by translating the first comb
comb2_lower = draw.translate(comb1_lower,
2.0 * (p.comb_space_hor + p.comb_width),
0.0)
# draw the first comb to the left of the lower finger
comb3_lower = draw.rectangle(
p.comb_width,
(2 * p.finger_height + p.finger_space - p.comb_space_vert),
(-0.5 * p.finger_width - 2.0 * p.comb_space_hor -
1.5 * p.comb_width),
(0.5 * p.pad_height + 0.5 *
(p.pad_pos_y + 0.5 * (p.pad_height) + 0.5 * (p.finger_height))))
# draw the second comb to the left of the lower finger
comb4_lower = draw.translate(comb3_lower,
-2.0 * (p.comb_space_hor + p.comb_width),
0.0)
coupling_capacitor = draw.rectangle(
p.cc_width, p.cc_height, p.pad_pos_x,
p.pad_pos_y - 0.5 * (p.pad_height) - p.cc_space - 0.5 * p.cc_height)
cc_topleft = draw.rectangle(
p.cc_topleft_width, p.cc_topleft_height,
p.pad_pos_x - 0.5 * p.pad_width + 0.5 * p.cc_topleft_width,
p.pad_pos_y + 1.5 * p.pad_height + 2.0 * p.finger_height +
p.finger_space + p.cc_topleft_space + 0.5 * p.cc_topleft_height)
cc_topright = draw.translate(
cc_topleft,
p.pad_width - 0.5 * p.cc_topleft_width - 0.5 * p.cc_topright_width,
0.0)
# merge the bottom elements
bottom = draw.union(pad_lower, finger_lower, comb1_lower, comb2_lower,
comb3_lower, comb4_lower)
# create the top portion of the comb by translating and rotating
# the bottom portion of the comb
top = draw.translate(bottom, 0.0, p.pad_height + p.finger_space)
top = draw.rotate(top, p.rotation_top_pad)
# merge everything into a single design
design = draw.union(bottom, top, rect_jj, coupling_capacitor,
cc_topleft, cc_topright)
# draw the transmon pocket bounding box
pocket = draw.rectangle(1.5 * p.pad_width, 5.0 * p.pad_height)
# the origin is originally set to the middle of the lower pad.
# Let's move it to the center of the JJ.
design = draw.translate(
design, 0.0,
-0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space)
# now translate the final structure according to the user input
design = draw.rotate(design, p.rotation, origin=(0, 0))
design = draw.translate(design, p.position_x, p.position_y)
pocket = draw.rotate(pocket, p.rotation, origin=(0, 0))
pocket = draw.translate(pocket, p.position_x, p.position_y)
geom = {'design': design}
geom_pocket = {'pocket': pocket}
self.add_qgeometry('poly', geom, layer=p.layer, subtract=False)
self.add_qgeometry('poly', geom_pocket, layer=p.layer, subtract=True)
###################################################################
# Add Qpin connections for coupling capacitors
# define a function that both rotates and translates the
# qpin coordinates
def qpin_rotate_translate(x):
""" This function rotates the coordinates of the three qpins
according to the user inputs for "position_x", "position_y"
and "rotation".
"""
y = list(x)
z = [0.0, 0.0]
z[0] = y[0] * cos(p.rotation * 3.14159 / 180) - y[1] * sin(
p.rotation * 3.14159 / 180)
z[1] = y[0] * sin(p.rotation * 3.14159 / 180) + y[1] * cos(
p.rotation * 3.14159 / 180)
z[0] = z[0] + p.position_x
z[1] = z[1] + p.position_y
x = (z[0], z[1])
return x
# Add Qpin connections for the bottom coupling capacitor
qp1a = (0.0,
-0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space)
qp1b = (0.0, -0.5 * p.pad_height - p.cc_space - p.cc_height -
0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space)
# rotate and translate the qpin coordinates
qp1a = qpin_rotate_translate(qp1a)
qp1b = qpin_rotate_translate(qp1b)
self.add_pin('pin1',
points=np.array([qp1a, qp1b]),
width=0.01,
input_as_norm=True)
# Add Qpin connections for top left coupling capacitor
qp2a = (p.pad_pos_x - 0.5 * p.pad_width + 0.5 * p.cc_topleft_width,
p.pad_pos_y + 1.5 * p.pad_height + 2.0 * p.finger_height +
p.finger_space + p.cc_topleft_space +
0.5 * p.cc_topleft_height - 0.5 * p.pad_height -
p.finger_height - 0.5 * p.finger_space)
qp2b = (p.pad_pos_x - 0.5 * p.pad_width, p.pad_pos_y +
1.5 * p.pad_height + 2.0 * p.finger_height + p.finger_space +
p.cc_topleft_space + 0.5 * p.cc_topleft_height -
0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space)
qp2a = qpin_rotate_translate(qp2a)
qp2b = qpin_rotate_translate(qp2b)
self.add_pin('pin2',
points=np.array([qp2a, qp2b]),
width=0.01,
input_as_norm=True)
# Add Qpin connections for top right coupling capacitor
qp3a = (p.pad_pos_x + 0.5 * p.pad_width - 0.5 * p.cc_topleft_width,
p.pad_pos_y + 1.5 * p.pad_height + 2.0 * p.finger_height +
p.finger_space + p.cc_topleft_space +
0.5 * p.cc_topleft_height - 0.5 * p.pad_height -
p.finger_height - 0.5 * p.finger_space)
qp3b = (p.pad_pos_x + 0.5 * p.pad_width, p.pad_pos_y +
1.5 * p.pad_height + 2.0 * p.finger_height + p.finger_space +
p.cc_topleft_space + 0.5 * p.cc_topleft_height -
0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space)
qp3a = qpin_rotate_translate(qp3a)
qp3b = qpin_rotate_translate(qp3b)
self.add_pin('pin3',
points=np.array([qp3a, qp3b]),
width=0.01,
input_as_norm=True)
| [((4010, 4566), 'qiskit_metal.Dict', 'Dict', ([], {'pad_width': '"""1000um"""', 'pad_height': '"""300um"""', 'finger_width': '"""50um"""', 'finger_height': '"""100um"""', 'finger_space': '"""50um"""', 'pad_pos_x': '"""0um"""', 'pad_pos_y': '"""0um"""', 'comb_width': '"""50um"""', 'comb_space_vert': '"""50um"""', 'comb_space_hor': '"""50um"""', 'jj_width': '"""20um"""', 'cc_space': '"""50um"""', 'cc_width': '"""100um"""', 'cc_height': '"""100um"""', 'cc_topleft_space': '"""50um"""', 'cc_topleft_width': '"""100um"""', 'cc_topleft_height': '"""100um"""', 'cc_topright_space': '"""50um"""', 'cc_topright_width': '"""100um"""', 'cc_topright_height': '"""100um"""', 'position_x': '"""0um"""', 'position_y': '"""0um"""', 'rotation': '"""0.0"""', 'rotation_top_pad': '"""180"""', 'layer': '"""1"""'}), "(pad_width='1000um', pad_height='300um', finger_width='50um',\n finger_height='100um', finger_space='50um', pad_pos_x='0um', pad_pos_y=\n '0um', comb_width='50um', comb_space_vert='50um', comb_space_hor='50um',\n jj_width='20um', cc_space='50um', cc_width='100um', cc_height='100um',\n cc_topleft_space='50um', cc_topleft_width='100um', cc_topleft_height=\n '100um', cc_topright_space='50um', cc_topright_width='100um',\n cc_topright_height='100um', position_x='0um', position_y='0um',\n rotation='0.0', rotation_top_pad='180', layer='1')\n", (4014, 4566), False, 'from qiskit_metal import draw, Dict\n'), ((5306, 5334), 'qiskit_metal.Dict', 'Dict', ([], {'short_name': '"""component"""'}), "(short_name='component')\n", (5310, 5334), False, 'from qiskit_metal import draw, Dict\n'), ((5576, 5643), 'qiskit_metal.draw.rectangle', 'draw.rectangle', (['p.pad_width', 'p.pad_height', 'p.pad_pos_x', 'p.pad_pos_y'], {}), '(p.pad_width, p.pad_height, p.pad_pos_x, p.pad_pos_y)\n', (5590, 5643), False, 'from qiskit_metal import draw, Dict\n'), ((5750, 5881), 'qiskit_metal.draw.rectangle', 'draw.rectangle', (['p.finger_width', 'p.finger_height', 'p.pad_pos_x', '(p.pad_pos_y + 0.49999 * p.pad_height + 0.49999 * p.finger_height)'], {}), '(p.finger_width, p.finger_height, p.pad_pos_x, p.pad_pos_y + \n 0.49999 * p.pad_height + 0.49999 * p.finger_height)\n', (5764, 5881), False, 'from qiskit_metal import draw, Dict\n'), ((5963, 6083), 'qiskit_metal.draw.rectangle', 'draw.rectangle', (['p.jj_width', 'p.finger_space', 'p.pad_pos_x', '(0.5 * p.pad_height + p.finger_height + 0.5 * p.finger_space)'], {}), '(p.jj_width, p.finger_space, p.pad_pos_x, 0.5 * p.pad_height +\n p.finger_height + 0.5 * p.finger_space)\n', (5977, 6083), False, 'from qiskit_metal import draw, Dict\n'), ((6210, 6460), 'qiskit_metal.draw.rectangle', 'draw.rectangle', (['p.comb_width', '(2 * p.finger_height + p.finger_space - p.comb_space_vert)', '(0.5 * p.finger_width + p.comb_space_hor + 0.5 * p.comb_width)', '(0.5 * p.pad_height + 0.5 * (p.pad_pos_y + 0.5 * p.pad_height + 0.5 * p.\n finger_height))'], {}), '(p.comb_width, 2 * p.finger_height + p.finger_space - p.\n comb_space_vert, 0.5 * p.finger_width + p.comb_space_hor + 0.5 * p.\n comb_width, 0.5 * p.pad_height + 0.5 * (p.pad_pos_y + 0.5 * p.\n pad_height + 0.5 * p.finger_height))\n', (6224, 6460), False, 'from qiskit_metal import draw, Dict\n'), ((6635, 6708), 'qiskit_metal.draw.translate', 'draw.translate', (['comb1_lower', '(2.0 * (p.comb_space_hor + p.comb_width))', '(0.0)'], {}), '(comb1_lower, 2.0 * (p.comb_space_hor + p.comb_width), 0.0)\n', (6649, 6708), False, 'from qiskit_metal import draw, Dict\n'), ((6868, 7124), 'qiskit_metal.draw.rectangle', 'draw.rectangle', (['p.comb_width', '(2 * p.finger_height + p.finger_space - p.comb_space_vert)', '(-0.5 * p.finger_width - 2.0 * p.comb_space_hor - 1.5 * p.comb_width)', '(0.5 * p.pad_height + 0.5 * (p.pad_pos_y + 0.5 * p.pad_height + 0.5 * p.\n finger_height))'], {}), '(p.comb_width, 2 * p.finger_height + p.finger_space - p.\n comb_space_vert, -0.5 * p.finger_width - 2.0 * p.comb_space_hor - 1.5 *\n p.comb_width, 0.5 * p.pad_height + 0.5 * (p.pad_pos_y + 0.5 * p.\n pad_height + 0.5 * p.finger_height))\n', (6882, 7124), False, 'from qiskit_metal import draw, Dict\n'), ((7282, 7356), 'qiskit_metal.draw.translate', 'draw.translate', (['comb3_lower', '(-2.0 * (p.comb_space_hor + p.comb_width))', '(0.0)'], {}), '(comb3_lower, -2.0 * (p.comb_space_hor + p.comb_width), 0.0)\n', (7296, 7356), False, 'from qiskit_metal import draw, Dict\n'), ((7461, 7585), 'qiskit_metal.draw.rectangle', 'draw.rectangle', (['p.cc_width', 'p.cc_height', 'p.pad_pos_x', '(p.pad_pos_y - 0.5 * p.pad_height - p.cc_space - 0.5 * p.cc_height)'], {}), '(p.cc_width, p.cc_height, p.pad_pos_x, p.pad_pos_y - 0.5 * p.\n pad_height - p.cc_space - 0.5 * p.cc_height)\n', (7475, 7585), False, 'from qiskit_metal import draw, Dict\n'), ((7630, 7883), 'qiskit_metal.draw.rectangle', 'draw.rectangle', (['p.cc_topleft_width', 'p.cc_topleft_height', '(p.pad_pos_x - 0.5 * p.pad_width + 0.5 * p.cc_topleft_width)', '(p.pad_pos_y + 1.5 * p.pad_height + 2.0 * p.finger_height + p.finger_space +\n p.cc_topleft_space + 0.5 * p.cc_topleft_height)'], {}), '(p.cc_topleft_width, p.cc_topleft_height, p.pad_pos_x - 0.5 *\n p.pad_width + 0.5 * p.cc_topleft_width, p.pad_pos_y + 1.5 * p.\n pad_height + 2.0 * p.finger_height + p.finger_space + p.\n cc_topleft_space + 0.5 * p.cc_topleft_height)\n', (7644, 7883), False, 'from qiskit_metal import draw, Dict\n'), ((7942, 8046), 'qiskit_metal.draw.translate', 'draw.translate', (['cc_topleft', '(p.pad_width - 0.5 * p.cc_topleft_width - 0.5 * p.cc_topright_width)', '(0.0)'], {}), '(cc_topleft, p.pad_width - 0.5 * p.cc_topleft_width - 0.5 * p\n .cc_topright_width, 0.0)\n', (7956, 8046), False, 'from qiskit_metal import draw, Dict\n'), ((8133, 8224), 'qiskit_metal.draw.union', 'draw.union', (['pad_lower', 'finger_lower', 'comb1_lower', 'comb2_lower', 'comb3_lower', 'comb4_lower'], {}), '(pad_lower, finger_lower, comb1_lower, comb2_lower, comb3_lower,\n comb4_lower)\n', (8143, 8224), False, 'from qiskit_metal import draw, Dict\n'), ((8378, 8436), 'qiskit_metal.draw.translate', 'draw.translate', (['bottom', '(0.0)', '(p.pad_height + p.finger_space)'], {}), '(bottom, 0.0, p.pad_height + p.finger_space)\n', (8392, 8436), False, 'from qiskit_metal import draw, Dict\n'), ((8451, 8487), 'qiskit_metal.draw.rotate', 'draw.rotate', (['top', 'p.rotation_top_pad'], {}), '(top, p.rotation_top_pad)\n', (8462, 8487), False, 'from qiskit_metal import draw, Dict\n'), ((8554, 8631), 'qiskit_metal.draw.union', 'draw.union', (['bottom', 'top', 'rect_jj', 'coupling_capacitor', 'cc_topleft', 'cc_topright'], {}), '(bottom, top, rect_jj, coupling_capacitor, cc_topleft, cc_topright)\n', (8564, 8631), False, 'from qiskit_metal import draw, Dict\n'), ((8726, 8779), 'qiskit_metal.draw.rectangle', 'draw.rectangle', (['(1.5 * p.pad_width)', '(5.0 * p.pad_height)'], {}), '(1.5 * p.pad_width, 5.0 * p.pad_height)\n', (8740, 8779), False, 'from qiskit_metal import draw, Dict\n'), ((8918, 9012), 'qiskit_metal.draw.translate', 'draw.translate', (['design', '(0.0)', '(-0.5 * p.pad_height - p.finger_height - 0.5 * p.finger_space)'], {}), '(design, 0.0, -0.5 * p.pad_height - p.finger_height - 0.5 * p\n .finger_space)\n', (8932, 9012), False, 'from qiskit_metal import draw, Dict\n'), ((9123, 9169), 'qiskit_metal.draw.rotate', 'draw.rotate', (['design', 'p.rotation'], {'origin': '(0, 0)'}), '(design, p.rotation, origin=(0, 0))\n', (9134, 9169), False, 'from qiskit_metal import draw, Dict\n'), ((9187, 9237), 'qiskit_metal.draw.translate', 'draw.translate', (['design', 'p.position_x', 'p.position_y'], {}), '(design, p.position_x, p.position_y)\n', (9201, 9237), False, 'from qiskit_metal import draw, Dict\n'), ((9256, 9302), 'qiskit_metal.draw.rotate', 'draw.rotate', (['pocket', 'p.rotation'], {'origin': '(0, 0)'}), '(pocket, p.rotation, origin=(0, 0))\n', (9267, 9302), False, 'from qiskit_metal import draw, Dict\n'), ((9320, 9370), 'qiskit_metal.draw.translate', 'draw.translate', (['pocket', 'p.position_x', 'p.position_y'], {}), '(pocket, p.position_x, p.position_y)\n', (9334, 9370), False, 'from qiskit_metal import draw, Dict\n'), ((10971, 10993), 'numpy.array', 'np.array', (['[qp1a, qp1b]'], {}), '([qp1a, qp1b])\n', (10979, 10993), True, 'import numpy as np\n'), ((11888, 11910), 'numpy.array', 'np.array', (['[qp2a, qp2b]'], {}), '([qp2a, qp2b])\n', (11896, 11910), True, 'import numpy as np\n'), ((12806, 12828), 'numpy.array', 'np.array', (['[qp3a, qp3b]'], {}), '([qp3a, qp3b])\n', (12814, 12828), True, 'import numpy as np\n'), ((10127, 10158), 'math.cos', 'cos', (['(p.rotation * 3.14159 / 180)'], {}), '(p.rotation * 3.14159 / 180)\n', (10130, 10158), False, 'from math import sin, cos\n'), ((10168, 10199), 'math.sin', 'sin', (['(p.rotation * 3.14159 / 180)'], {}), '(p.rotation * 3.14159 / 180)\n', (10171, 10199), False, 'from math import sin, cos\n'), ((10243, 10274), 'math.sin', 'sin', (['(p.rotation * 3.14159 / 180)'], {}), '(p.rotation * 3.14159 / 180)\n', (10246, 10274), False, 'from math import sin, cos\n'), ((10284, 10315), 'math.cos', 'cos', (['(p.rotation * 3.14159 / 180)'], {}), '(p.rotation * 3.14159 / 180)\n', (10287, 10315), False, 'from math import sin, cos\n')] |
guotong1988/Rule-SQL | sqlova/model/nl2sql/wikisql_models.py | e826c0d659c8b35a72b64aa2b50d4d943fdd70f1 | # Copyright 2019-present NAVER Corp.
# Apache License v2.0
# Wonseok Hwang
import os, json
from copy import deepcopy
from matplotlib.pylab import *
import torch
import torch.nn as nn
import torch.nn.functional as F
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
from sqlova.utils.utils import topk_multi_dim
from sqlova.utils.utils_wikisql import *
class Seq2SQL_v1(nn.Module):
def __init__(self, input_size, hidden_size, num_layer, dropout,
number_cond_ops, number_agg_ops, old=False):
super(Seq2SQL_v1, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layer = num_layer
self.dropout = dropout
self.max_where_number = 4
self.number_cond_ops = number_cond_ops
self.number_agg_ops = number_agg_ops
self.select_column_predict = SelectColumnPredict(input_size, hidden_size, num_layer, dropout)
self.select_agg_predict = SelectAggPredict(input_size, hidden_size, num_layer, dropout, number_agg_ops, old=old)
self.where_number_predict = WhereNumberPredict(input_size, hidden_size, num_layer, dropout)
self.wcp = WhereColumnPredict(input_size, hidden_size, num_layer, dropout)
self.wop = WhereOpPredict(input_size, hidden_size, num_layer, dropout, number_cond_ops)
self.wvp = WhereValuePredict_startend(input_size, hidden_size, num_layer, dropout, number_cond_ops, old=old) # start-end-search-discriminative model
# emb_question, [16,26,1536]
# len_question, [16]
# emb_header, [102,12,1536]
# len_header_token, [102]
# number_header, [16]
def forward(self, emb_question, len_question, emb_header, len_header_token, number_header,
g_sc=None, g_sa=None, g_wn=None, g_wc=None, g_wo=None, g_wvi=None,
show_p_sc=False, show_p_sa=False,
show_p_wn=False, show_p_wc=False, show_p_wo=False, show_p_wv=False):
# sc
s_sc,s_sc_softmax = self.select_column_predict(emb_question, len_question, emb_header, len_header_token, number_header, show_p_sc=show_p_sc)
if g_sc:
pr_sc = g_sc
else:
pr_sc = pred_sc(s_sc)
# sa
s_sa,s_sa_softmax = self.select_agg_predict(emb_question, len_question, emb_header, len_header_token, number_header, pr_sc, show_p_sa=show_p_sa)
if g_sa:
# it's not necessary though.
pr_sa = g_sa
else:
pr_sa = pred_sa(s_sa)
# wn
s_wn,s_wn_softmax = self.where_number_predict(emb_question, len_question, emb_header, len_header_token, number_header, show_p_wn=show_p_wn)
if g_wn:
pr_wn = g_wn
else:
pr_wn = pred_wn(s_wn)
# wc
s_wc,s_wc_softmax = self.wcp(emb_question, len_question, emb_header, len_header_token, number_header, show_p_wc=show_p_wc, penalty=True)
if g_wc:
pr_wc = g_wc
else:
pr_wc = pred_wherecolumn(pr_wn, s_wc)
# wo
s_wo,s_wo_softmax = self.wop(emb_question, len_question, emb_header, len_header_token, number_header, wn=pr_wn, wc=pr_wc, show_p_wo=show_p_wo)
if g_wo:
pr_wo = g_wo
else:
pr_wo = pred_wo(pr_wn, s_wo)
# wv
s_wv,s_wv_softmax = self.wvp(emb_question, len_question, emb_header, len_header_token, number_header, wn=pr_wn, wc=pr_wc, wo=pr_wo, show_p_wv=show_p_wv)
return s_sc, s_sa, s_wn, s_wc, s_wo, s_wv, s_sc_softmax, s_sa_softmax, s_wn_softmax, s_wc_softmax, s_wo_softmax, s_wv_softmax
def beam_forward(self, emb_question, len_question, emb_header, len_header_token, l_header, engine, tb,
nlu_t, nlu_wp_t, wp_to_wh_index, nlu,
beam_size=4,
show_p_sc=False, show_p_sa=False,
show_p_wn=False, show_p_wc=False, show_p_wo=False, show_p_wv=False):
"""
Execution-guided beam decoding.
"""
# sc
s_sc,_ = self.select_column_predict(emb_question, len_question, emb_header, len_header_token, l_header, show_p_sc=show_p_sc)
prob_sc = F.softmax(s_sc, dim=-1)
bS, mcL = s_sc.shape
# minimum_header_length = min(l_header)
# beam_size = minimum_header_length if beam_size > minimum_header_length else beam_size
# sa
# Construct all possible sc_sa_score
prob_sc_sa = torch.zeros([bS, beam_size, self.number_agg_ops]).to(device)
prob_sca = torch.zeros_like(prob_sc_sa).to(device)
# get the top-k indices. pr_sc_beam = [B, beam_size]
pr_sc_beam = pred_sc_beam(s_sc, beam_size)
# calculate and predict s_sa.
for i_beam in range(beam_size):
pr_sc = list( array(pr_sc_beam)[:,i_beam] )
s_sa,_ = self.select_agg_predict(emb_question, len_question, emb_header, len_header_token, l_header, pr_sc, show_p_sa=show_p_sa)
prob_sa = F.softmax(s_sa, dim=-1)
prob_sc_sa[:, i_beam, :] = prob_sa
prob_sc_selected = prob_sc[range(bS), pr_sc] # [B]
prob_sca[:,i_beam,:] = (prob_sa.t() * prob_sc_selected).t()
# [mcL, B] * [B] -> [mcL, B] (element-wise multiplication)
# [mcL, B] -> [B, mcL]
# Calculate the dimension of tensor
# tot_dim = len(prob_sca.shape)
# First flatten to 1-d
idxs = topk_multi_dim(torch.tensor(prob_sca), n_topk=beam_size, batch_exist=True)
# Now as sc_idx is already sorted, re-map them properly.
idxs = remap_sc_idx(idxs, pr_sc_beam) # [sc_beam_idx, sa_idx] -> [sc_idx, sa_idx]
idxs_arr = array(idxs)
# [B, beam_size, remainig dim]
# idxs[b][0] gives first probable [sc_idx, sa_idx] pairs.
# idxs[b][1] gives of second.
# Calculate prob_sca, a joint probability
beam_idx_sca = [0] * bS
beam_meet_the_final = [False] * bS
while True:
pr_sc = idxs_arr[range(bS),beam_idx_sca,0]
pr_sa = idxs_arr[range(bS),beam_idx_sca,1]
# map index properly
check = check_sc_sa_pairs(tb, pr_sc, pr_sa)
if sum(check) == bS:
break
else:
for b, check1 in enumerate(check):
if not check1: # wrong pair
beam_idx_sca[b] += 1
if beam_idx_sca[b] >= beam_size:
beam_meet_the_final[b] = True
beam_idx_sca[b] -= 1
else:
beam_meet_the_final[b] = True
if sum(beam_meet_the_final) == bS:
break
# Now pr_sc, pr_sa are properly predicted.
pr_sc_best = list(pr_sc)
pr_sa_best = list(pr_sa)
# Now, Where-clause beam search.
s_wn,_ = self.where_number_predict(emb_question, len_question, emb_header, len_header_token, l_header, show_p_wn=show_p_wn)
prob_wn = F.softmax(s_wn, dim=-1).detach().to('cpu').numpy()
# Found "executable" most likely 4(=max_num_of_conditions) where-clauses.
# wc
s_wc,_ = self.wcp(emb_question, len_question, emb_header, len_header_token, l_header, show_p_wc=show_p_wc, penalty=True)
prob_wc = F.sigmoid(s_wc).detach().to('cpu').numpy()
# pr_wc_sorted_by_prob = pred_wc_sorted_by_prob(s_wc)
# get max_wn # of most probable columns & their prob.
pr_wn_max = [self.max_where_number] * bS
pr_wc_max = pred_wherecolumn(pr_wn_max, s_wc) # if some column do not have executable where-claouse, omit that column
prob_wc_max = zeros([bS, self.max_where_number])
for b, pr_wc_max1 in enumerate(pr_wc_max):
prob_wc_max[b,:] = prob_wc[b,pr_wc_max1]
# get most probable max_wn where-clouses
# wo
s_wo_max,_ = self.wop(emb_question, len_question, emb_header, len_header_token, l_header, wn=pr_wn_max, wc=pr_wc_max, show_p_wo=show_p_wo)
prob_wo_max = F.softmax(s_wo_max, dim=-1).detach().to('cpu').numpy()
# [B, max_wn, n_cond_op]
pr_wvi_beam_op_list = []
prob_wvi_beam_op_list = []
for i_op in range(self.number_cond_ops - 1):
pr_wo_temp = [[i_op] * self.max_where_number] * bS
# wv
s_wv,_ = self.wvp(emb_question, len_question, emb_header, len_header_token, l_header, wn=pr_wn_max, wc=pr_wc_max, wo=pr_wo_temp, show_p_wv=show_p_wv)
prob_wv = F.softmax(s_wv, dim=-2).detach().to('cpu').numpy()
# prob_wv
pr_wvi_beam, prob_wvi_beam = pred_wvi_se_beam(self.max_where_number, s_wv, beam_size)
pr_wvi_beam_op_list.append(pr_wvi_beam)
prob_wvi_beam_op_list.append(prob_wvi_beam)
# pr_wvi_beam = [B, max_wn, k_logit**2 [st, ed] paris]
# pred_wv_beam
# Calculate joint probability of where-clause
# prob_w = [batch, wc, wo, wv] = [B, max_wn, n_cond_op, n_pairs]
n_wv_beam_pairs = prob_wvi_beam.shape[2]
prob_w = zeros([bS, self.max_where_number, self.number_cond_ops - 1, n_wv_beam_pairs])
for b in range(bS):
for i_wn in range(self.max_where_number):
for i_op in range(self.number_cond_ops - 1): # do not use final one
for i_wv_beam in range(n_wv_beam_pairs):
# i_wc = pr_wc_max[b][i_wn] # already done
p_wc = prob_wc_max[b, i_wn]
p_wo = prob_wo_max[b, i_wn, i_op]
p_wv = prob_wvi_beam_op_list[i_op][b, i_wn, i_wv_beam]
prob_w[b, i_wn, i_op, i_wv_beam] = p_wc * p_wo * p_wv
# Perform execution guided decoding
conds_max = []
prob_conds_max = []
# while len(conds_max) < self.max_wn:
idxs = topk_multi_dim(torch.tensor(prob_w), n_topk=beam_size, batch_exist=True)
# idxs = [B, i_wc_beam, i_op, i_wv_pairs]
# Construct conds1
for b, idxs1 in enumerate(idxs):
conds_max1 = []
prob_conds_max1 = []
for i_wn, idxs11 in enumerate(idxs1):
i_wc = pr_wc_max[b][idxs11[0]]
i_op = idxs11[1]
wvi = pr_wvi_beam_op_list[i_op][b][idxs11[0]][idxs11[2]]
# get wv_str
temp_pr_wv_str, _ = convert_pred_wvi_to_string([[wvi]], [nlu_t[b]], [nlu_wp_t[b]], [wp_to_wh_index[b]], [nlu[b]])
merged_wv11 = merge_wv_t1_eng(temp_pr_wv_str[0][0], nlu[b])
conds11 = [i_wc, i_op, merged_wv11]
prob_conds11 = prob_w[b, idxs11[0], idxs11[1], idxs11[2] ]
# test execution
# print(nlu[b])
# print(tb[b]['id'], tb[b]['types'], pr_sc[b], pr_sa[b], [conds11])
pr_ans = engine.execute(tb[b]['id'], pr_sc[b], pr_sa[b], [conds11])
if bool(pr_ans):
# pr_ans is not empty!
conds_max1.append(conds11)
prob_conds_max1.append(prob_conds11)
conds_max.append(conds_max1)
prob_conds_max.append(prob_conds_max1)
# May need to do more exhuastive search?
# i.e. up to.. getting all executable cases.
# Calculate total probability to decide the number of where-clauses
pr_sql_i = []
prob_wn_w = []
pr_wn_based_on_prob = []
for b, prob_wn1 in enumerate(prob_wn):
max_executable_wn1 = len( conds_max[b] )
prob_wn_w1 = []
prob_wn_w1.append(prob_wn1[0]) # wn=0 case.
for i_wn in range(max_executable_wn1):
prob_wn_w11 = prob_wn1[i_wn+1] * prob_conds_max[b][i_wn]
prob_wn_w1.append(prob_wn_w11)
pr_wn_based_on_prob.append(argmax(prob_wn_w1))
prob_wn_w.append(prob_wn_w1)
pr_sql_i1 = {'agg': pr_sa_best[b], 'sel': pr_sc_best[b], 'conds': conds_max[b][:pr_wn_based_on_prob[b]]}
pr_sql_i.append(pr_sql_i1)
# s_wv = [B, max_wn, max_nlu_tokens, 2]
return prob_sca, prob_w, prob_wn_w, pr_sc_best, pr_sa_best, pr_wn_based_on_prob, pr_sql_i
class SelectColumnPredict(nn.Module):
def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3):
super(SelectColumnPredict, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layer = num_layer
self.dropout = dropout
self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.W_att = nn.Linear(hidden_size, hidden_size)
self.W_c = nn.Linear(hidden_size, hidden_size)
self.W_header = nn.Linear(hidden_size, hidden_size)
self.sc_out = nn.Sequential(nn.Tanh(), nn.Linear(2 * hidden_size, 1))
self.softmax_dim1 = nn.Softmax(dim=1)
self.softmax_dim2 = nn.Softmax(dim=2)
self.softmax_dim_1 = nn.Softmax(dim=-1)
# emb_question, [16,26,1536]
# len_question, [16]
# emb_header, [102,12,1536]
# len_header_token, [102]
# number_header, [16]
def forward(self, emb_question, len_question, emb_header, len_header_token, number_header, show_p_sc=False):
# Encode
encoded_question = encode(self.enc_n, emb_question, len_question,
return_hidden=False,
hc0=None,
last_only=False) # [b, n, dim]
encoded_header = encode_header(self.enc_h, emb_header, len_header_token, number_header) # [b, header, dim]
bS = len(number_header)
mL_n = max(len_question)
# [bS, max_len_header, 100] * [bS, 100, mL_n] -> [bS, max_len_header, mL_n]
att_h = torch.bmm(encoded_header, self.W_att(encoded_question).transpose(1, 2))
# Penalty on blank parts
for b, l_n1 in enumerate(len_question):
if l_n1 < mL_n:
att_h[b, :, l_n1:] = -10000000000
p_n = self.softmax_dim2(att_h)
if show_p_sc:
# p = [b, header, n]
if p_n.shape[0] != 1:
raise Exception("Batch size should be 1.")
fig=figure(2001, figsize=(12,3.5))
# subplot(6,2,7)
subplot2grid((7,2), (3, 0), rowspan=2)
cla()
_color='rgbkcm'
_symbol='.......'
for i_h in range(number_header[0]):
color_idx = i_h % len(_color)
plot(p_n[0][i_h][:].data.numpy() - i_h, '--'+_symbol[color_idx]+_color[color_idx], ms=7)
title('sc: p_n for each h')
grid(True)
fig.tight_layout()
fig.canvas.draw()
show()
# p_n [ bS, max_len_header, mL_n] -> [ bS, max_len_header, mL_n, 1]
# wenc_n [ bS, mL_n, 100] -> [ bS, 1, mL_n, 100]
# -> [bS, max_len_header, mL_n, 100] -> [bS, max_len_header, 100]
c_n = torch.mul(p_n.unsqueeze(3), encoded_question.unsqueeze(1)).sum(dim=2)
vec = torch.cat([self.W_c(c_n), self.W_header(encoded_header)], dim=2)
score_select_column = self.sc_out(vec).squeeze(2) # [bS, max_len_header, 1] -> [bS, max_len_header]
score_select_column_softmax = self.softmax_dim_1(score_select_column)
# Penalty
max_len_header = max(number_header)
for b, l_header1 in enumerate(number_header):
if l_header1 < max_len_header:
score_select_column[b, l_header1:] = -10000000000
for b, l_header1 in enumerate(number_header):
if l_header1 < max_len_header:
score_select_column_softmax[b, l_header1:] = 0
return score_select_column,score_select_column_softmax
class SelectAggPredict(nn.Module):
def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3, n_agg_ops=-1, old=False):
super(SelectAggPredict, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layer = num_layer
self.dropout = dropout
self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.W_att = nn.Linear(hidden_size, hidden_size)
self.sa_out = nn.Sequential(nn.Linear(hidden_size, hidden_size),
nn.Tanh(),
nn.Linear(hidden_size, n_agg_ops)) # Fixed number of aggregation operator.
self.softmax_dim1 = nn.Softmax(dim=1)
self.softmax_dim2 = nn.Softmax(dim=2)
self.softmax_dim_1 = nn.Softmax(dim=-1)
if old:
# for backwoard compatibility
self.W_c = nn.Linear(hidden_size, hidden_size)
self.W_header = nn.Linear(hidden_size, hidden_size)
def forward(self, emb_question, len_question, emb_header, len_header_token, l_header, pr_sc, show_p_sa=False):
# Encode
encoded_question = encode(self.enc_n, emb_question, len_question,
return_hidden=False,
hc0=None,
last_only=False) # [b, n, dim]
encoded_header = encode_header(self.enc_h, emb_header, len_header_token, l_header) # [b, header, dim]
bS = len(l_header)
mL_n = max(len_question)
wenc_header_ob = encoded_header[list(range(bS)), pr_sc] # list, so one sample for each batch.
# [bS, question_len, 100] * [bS, 100, 1] -> [bS, question_len]
att = torch.bmm(self.W_att(encoded_question), wenc_header_ob.unsqueeze(2)).squeeze(2)
# Penalty on blank parts
for b, l_n1 in enumerate(len_question):
if l_n1 < mL_n:
att[b, l_n1:] = -10000000000
# [bS, question_len]
p = self.softmax_dim1(att)
if show_p_sa:
if p.shape[0] != 1:
raise Exception("Batch size should be 1.")
fig=figure(2001);
subplot(7,2,3)
cla()
plot(p[0].data.numpy(), '--rs', ms=7)
title('sa: nlu_weight')
grid(True)
fig.tight_layout()
fig.canvas.draw()
show()
# [bS, question_len, 100] * ( [bS, question_len, 1] -> [bS, question_len, 100])
# -> [bS, question_len, 100] -> [bS, 100]
c_n = torch.mul(encoded_question, p.unsqueeze(2).expand_as(encoded_question)).sum(dim=1)
s_sa = self.sa_out(c_n)
s_sa_softmax = self.softmax_dim_1(s_sa)
return s_sa,s_sa_softmax
class WhereNumberPredict(nn.Module):
def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3, ):
super(WhereNumberPredict, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layer = num_layer
self.dropout = dropout
self.mL_w = 4 # max where condition number
self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.W_att_h = nn.Linear(hidden_size, 1)
self.W_hidden = nn.Linear(hidden_size, num_layer * hidden_size)
self.W_cell = nn.Linear(hidden_size, num_layer * hidden_size)
self.W_att_n = nn.Linear(hidden_size, 1)
self.wn_out = nn.Sequential(nn.Linear(hidden_size, hidden_size),
nn.Tanh(),
nn.Linear(hidden_size, self.mL_w + 1)) # max number (4 + 1)
self.softmax_dim1 = nn.Softmax(dim=1)
self.softmax_dim2 = nn.Softmax(dim=2)
self.softmax_dim_1 = nn.Softmax(dim=-1)
def forward(self, emb_question, len_question, emb_header, len_header_token, l_header, show_p_wn=False):
# Encode
encoded_header = encode_header(self.enc_h, emb_header, len_header_token, l_header) # [b, max_len_header, dim]
bS = len(l_header)
max_len_question = max(len_question)
max_len_header = max(l_header)
# mL_h = max(len_header_token)
# (self-attention?) column Embedding?
# [B, max_len_header, 100] -> [B, max_len_header, 1] -> [B, max_len_header]
att_h = self.W_att_h(encoded_header).squeeze(2)
# Penalty
for b, l_header1 in enumerate(l_header):
if l_header1 < max_len_header:
att_h[b, l_header1:] = -10000000000
p_h = self.softmax_dim1(att_h)
if show_p_wn:
if p_h.shape[0] != 1:
raise Exception("Batch size should be 1.")
fig=figure(2001);
subplot(7,2,5)
cla()
plot(p_h[0].data.numpy(), '--rs', ms=7)
title('wn: header_weight')
grid(True)
fig.canvas.draw()
show()
# input('Type Eenter to continue.')
# [B, max_len_header, 100] * [ B, max_len_header, 1] -> [B, max_len_header, 100] -> [B, 100]
c_header = torch.mul(encoded_header, p_h.unsqueeze(2)).sum(1)
# [B, 100] --> [B, 2*100] Enlarge because there are two layers.
hidden = self.W_hidden(c_header) # [B, 4, 200/2]
hidden = hidden.view(bS, self.num_layer * 2, int(
self.hidden_size / 2)) # [4, B, 100/2] # number_of_layer_layer * (bi-direction) # lstm input convention.
hidden = hidden.transpose(0, 1).contiguous()
cell = self.W_cell(c_header) # [B, 4, 100/2]
cell = cell.view(bS, self.num_layer * 2, int(self.hidden_size / 2)) # [4, B, 100/2]
cell = cell.transpose(0, 1).contiguous()
wenc_n = encode(self.enc_n, emb_question, len_question,
return_hidden=False,
hc0=(hidden, cell),
last_only=False) # [b, n, dim]
att_n = self.W_att_n(wenc_n).squeeze(2) # [B, max_len, 100] -> [B, max_len, 1] -> [B, max_len]
# Penalty
for b, l_n1 in enumerate(len_question):
if l_n1 < max_len_question:
att_n[b, l_n1:] = -10000000000
p_n = self.softmax_dim1(att_n)
if show_p_wn:
if p_n.shape[0] != 1:
raise Exception("Batch size should be 1.")
fig=figure(2001);
subplot(7,2,6)
cla()
plot(p_n[0].data.numpy(), '--rs', ms=7)
title('wn: nlu_weight')
grid(True)
fig.canvas.draw()
show()
# input('Type Enter to continue.')
# [B, mL_n, 100] *([B, mL_n] -> [B, mL_n, 1] -> [B, mL_n, 100] ) -> [B, 100]
c_n = torch.mul(wenc_n, p_n.unsqueeze(2).expand_as(wenc_n)).sum(dim=1)
s_wn = self.wn_out(c_n)
s_wn_softmax = self.softmax_dim_1(s_wn)
return s_wn,s_wn_softmax
# where column predict
class WhereColumnPredict(nn.Module):
def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3):
super(WhereColumnPredict, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layer = num_layer
self.dropout = dropout
self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.W_att = nn.Linear(hidden_size, hidden_size)
self.W_c = nn.Linear(hidden_size, hidden_size)
self.W_header = nn.Linear(hidden_size, hidden_size)
self.W_out = nn.Sequential(
nn.Tanh(), nn.Linear(2 * hidden_size, 1)
)
self.softmax_dim1 = nn.Softmax(dim=1)
self.softmax_dim2 = nn.Softmax(dim=2)
self.softmax_dim_1 = nn.Softmax(dim=-1)
def forward(self, emb_question, len_question, emb_header, len_header_token,
l_header, show_p_wc, penalty=True):
# Encode
encoded_question = encode(self.enc_n, emb_question, len_question,
return_hidden=False,
hc0=None,
last_only=False) # [b, n, dim]
encoded_header = encode_header(self.enc_h, emb_header, len_header_token, l_header) # [b, header, dim]
# attention
# wenc = [bS, mL, hidden_size]
# att = [bS, max_len_header, mL_n]
# att[b, i_h, j_n] = p(j_n| i_h)
att = torch.bmm(encoded_header, self.W_att(encoded_question).transpose(1, 2))
# penalty to blank part.
mL_n = max(len_question)
for b_n, l_n1 in enumerate(len_question):
if l_n1 < mL_n:
att[b_n, :, l_n1:] = -10000000000
# make p(j_n | i_h)
p = self.softmax_dim2(att)
if show_p_wc:
# p = [b, header, n]
if p.shape[0] != 1:
raise Exception("Batch size should be 1.")
fig=figure(2001);
# subplot(6,2,7)
subplot2grid((7,2), (3, 1), rowspan=2)
cla()
_color='rgbkcm'
_symbol='.......'
for i_h in range(l_header[0]):
color_idx = i_h % len(_color)
plot(p[0][i_h][:].data.numpy() - i_h, '--'+_symbol[color_idx]+_color[color_idx], ms=7)
title('wc: p_n for each h')
grid(True)
fig.tight_layout()
fig.canvas.draw()
show()
# max nlu context vectors
# [bS, max_len_header, mL_n]*[bS, max_len_header, mL_n]
encoded_question = encoded_question.unsqueeze(1) # [ b, n, dim] -> [b, 1, n, dim]
p = p.unsqueeze(3) # [b, header, n] -> [b, header, n, 1]
c_n = torch.mul(encoded_question, p).sum(2) # -> [b, header, dim], c_n for each header.
y = torch.cat([self.W_c(c_n), self.W_header(encoded_header)], dim=2) # [b, header, 2*dim]
score = self.W_out(y).squeeze(2) # [b, header]
score[torch.isnan(score)] = 0
score_softmax = self.softmax_dim_1(score)
if penalty:
for b, l_header1 in enumerate(l_header):
score[b, l_header1:] = -1e+10
for b, l_header1 in enumerate(l_header):
score_softmax[b, l_header1:] = 0
return score,score_softmax
# where op predict
class WhereOpPredict(nn.Module):
def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3, n_cond_ops=3):
super(WhereOpPredict, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layer = num_layer
self.dropout = dropout
self.mL_w = 4 # max where condition number
self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.W_att = nn.Linear(hidden_size, hidden_size)
self.W_c = nn.Linear(hidden_size, hidden_size)
self.W_header = nn.Linear(hidden_size, hidden_size)
self.wo_out = nn.Sequential(
nn.Linear(2*hidden_size, hidden_size),
nn.Tanh(),
nn.Linear(hidden_size, n_cond_ops)
)
self.softmax_dim1 = nn.Softmax(dim=1)
self.softmax_dim2 = nn.Softmax(dim=2)
self.softmax_dim_1 = nn.Softmax(dim=-1)
def forward(self, emb_question, len_question, emb_header, len_header_token,
l_header, wn, wc, wenc_n=None, show_p_wo=False):
# Encode
if not wenc_n:
wenc_n = encode(self.enc_n, emb_question, len_question,
return_hidden=False,
hc0=None,
last_only=False) # [b, n, dim]
encoded_header = encode_header(self.enc_h, emb_header, len_header_token, l_header) # [b, header, dim]
bS = len(l_header)
# wn
wenc_header_ob = [] # observed header
for b in range(bS):
# [[...], [...]]
# Pad list to maximum number of selections
real = [encoded_header[b, col] for col in wc[b]]
pad = (self.mL_w - wn[b]) * [encoded_header[b, 0]] # this padding could be wrong. Test with zero padding later.
wenc_header_ob1 = torch.stack(real + pad) # It is not used in the loss function.
wenc_header_ob.append(wenc_header_ob1)
# list to [B, 4, dim] tensor.
wenc_header_ob = torch.stack(wenc_header_ob) # list to tensor.
wenc_header_ob = wenc_header_ob.to(device)
# [B, 1, mL_n, dim] * [B, 4, dim, 1]
# -> [B, 4, mL_n, 1] -> [B, 4, mL_n]
# multiplication bewteen NLq-tokens and selected column
att = torch.matmul(self.W_att(wenc_n).unsqueeze(1),
wenc_header_ob.unsqueeze(3)
).squeeze(3)
# Penalty for blank part.
mL_n = max(len_question)
for b, l_n1 in enumerate(len_question):
if l_n1 < mL_n:
att[b, :, l_n1:] = -10000000000
p = self.softmax_dim2(att) # p( n| selected_col )
if show_p_wo:
# p = [b, header, n]
if p.shape[0] != 1:
raise Exception("Batch size should be 1.")
fig=figure(2001)
# subplot(6,2,7)
subplot2grid((7,2), (5, 0), rowspan=2)
cla()
_color='rgbkcm'
_symbol='.......'
for i_wn in range(self.mL_w):
color_idx = i_wn % len(_color)
plot(p[0][i_wn][:].data.numpy() - i_wn, '--'+_symbol[color_idx]+_color[color_idx], ms=7)
title('wo: p_n for selected h')
grid(True)
fig.tight_layout()
fig.canvas.draw()
show()
# [B, 1, mL_n, dim] * [B, 4, mL_n, 1]
# --> [B, 4, mL_n, dim]
# --> [B, 4, dim]
c_n = torch.mul(wenc_n.unsqueeze(1), p.unsqueeze(3)).sum(dim=2)
# [bS, 5-1, dim] -> [bS, 5-1, 3]
vec = torch.cat([self.W_c(c_n), self.W_header(wenc_header_ob)], dim=2)
s_wo = self.wo_out(vec)
s_wo_softmax = self.softmax_dim_1(s_wo)
return s_wo,s_wo_softmax
class WhereValuePredict_startend(nn.Module):
"""
Discriminative model
Get start and end.
Here, classifier for [ [투수], [팀1], [팀2], [연도], ...]
Input: Encoded nlu & selected column.
Algorithm: Encoded nlu & selected column. -> classifier -> mask scores -> ...
"""
def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3, n_cond_ops=4, old=False):
super(WhereValuePredict_startend, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layer = num_layer
self.dropout = dropout
self.n_cond_ops = n_cond_ops
self.mL_w = 4 # max where condition number
self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.W_att = nn.Linear(hidden_size, hidden_size)
self.W_c = nn.Linear(hidden_size, hidden_size)
self.W_header = nn.Linear(hidden_size, hidden_size)
self.W_op = nn.Linear(n_cond_ops, hidden_size)
# self.W_n = nn.Linear(hidden_size, hidden_size)
if old:
self.wv_out = nn.Sequential(
nn.Linear(4 * hidden_size, 2)
)
else:
self.wv_out = nn.Sequential(
nn.Linear(4 * hidden_size, hidden_size),
nn.Tanh(),
nn.Linear(hidden_size, 2)
)
# self.wv_out = nn.Sequential(
# nn.Linear(3 * hidden_size, hidden_size),
# nn.Tanh(),
# nn.Linear(hidden_size, self.gdkL)
# )
self.softmax_dim1 = nn.Softmax(dim=1)
self.softmax_dim2 = nn.Softmax(dim=2)
self.softmax_dim_1 = nn.Softmax(dim=-1)
def forward(self, emb_question, len_question, emb_header, len_header_token, l_header, wn, wc, wo, wenc_n=None, show_p_wv=False):
# Encode
if not wenc_n:
wenc_n, hout, cout = encode(self.enc_n, emb_question, len_question,
return_hidden=True,
hc0=None,
last_only=False) # [b, n, dim]
encoded_header = encode_header(self.enc_h, emb_header, len_header_token, l_header) # [b, header, dim]
bS = len(l_header)
wenc_header_ob = [] # observed header
for b in range(bS):
# [[...], [...]]
# Pad list to maximum number of selections
real = [encoded_header[b, col] for col in wc[b]]
pad = (self.mL_w - wn[b]) * [encoded_header[b, 0]] # this padding could be wrong. Test with zero padding later.
wenc_header_ob1 = torch.stack(real + pad) # It is not used in the loss function.
wenc_header_ob.append(wenc_header_ob1)
# list to [B, 4, dim] tensor.
wenc_header_ob = torch.stack(wenc_header_ob) # list to tensor.
wenc_header_ob = wenc_header_ob.to(device)
# Column attention
# [B, 1, mL_n, dim] * [B, 4, dim, 1]
# -> [B, 4, mL_n, 1] -> [B, 4, mL_n]
# multiplication bewteen NLq-tokens and selected column
att = torch.matmul(self.W_att(wenc_n).unsqueeze(1),
wenc_header_ob.unsqueeze(3)
).squeeze(3)
# Penalty for blank part.
mL_n = max(len_question)
for b, l_n1 in enumerate(len_question):
if l_n1 < mL_n:
att[b, :, l_n1:] = -10000000000
p = self.softmax_dim2(att) # p( n| selected_col )
if show_p_wv:
# p = [b, header, n]
if p.shape[0] != 1:
raise Exception("Batch size should be 1.")
fig=figure(2001)
# subplot(6,2,7)
subplot2grid((7,2), (5, 1), rowspan=2)
cla()
_color='rgbkcm'
_symbol='.......'
for i_wn in range(self.mL_w):
color_idx = i_wn % len(_color)
plot(p[0][i_wn][:].data.numpy() - i_wn, '--'+_symbol[color_idx]+_color[color_idx], ms=7)
title('wv: p_n for selected h')
grid(True)
fig.tight_layout()
fig.canvas.draw()
show()
# [B, 1, mL_n, dim] * [B, 4, mL_n, 1]
# --> [B, 4, mL_n, dim]
# --> [B, 4, dim]
c_n = torch.mul(wenc_n.unsqueeze(1), p.unsqueeze(3)).sum(dim=2)
# Select observed headers only.
# Also generate one_hot vector encoding info of the operator
# [B, 4, dim]
wenc_op = []
for b in range(bS):
# [[...], [...]]
# Pad list to maximum number of selections
wenc_op1 = torch.zeros(self.mL_w, self.n_cond_ops)
wo1 = wo[b]
idx_scatter = []
l_wo1 = len(wo1)
for i_wo11 in range(self.mL_w):
if i_wo11 < l_wo1:
wo11 = wo1[i_wo11]
idx_scatter.append([int(wo11)])
else:
idx_scatter.append([0]) # not used anyway
wenc_op1 = wenc_op1.scatter(1, torch.tensor(idx_scatter), 1)
wenc_op.append(wenc_op1)
# list to [B, 4, dim] tensor.
wenc_op = torch.stack(wenc_op) # list to tensor.
wenc_op = wenc_op.to(device)
# Now after concat, calculate logits for each token
# [bS, 5-1, 3*hidden_size] = [bS, 4, 300]
vec = torch.cat([self.W_c(c_n), self.W_header(wenc_header_ob), self.W_op(wenc_op)], dim=2)
# Make extended vector based on encoded nl token containing column and operator information.
# wenc_n = [bS, mL, 100]
# vec2 = [bS, 4, mL, 400]
vec1e = vec.unsqueeze(2).expand(-1,-1, mL_n, -1) # [bS, 4, 1, 300] -> [bS, 4, mL, 300]
wenc_ne = wenc_n.unsqueeze(1).expand(-1, 4, -1, -1) # [bS, 1, mL, 100] -> [bS, 4, mL, 100]
vec2 = torch.cat( [vec1e, wenc_ne], dim=3)
# now make logits
s_wv = self.wv_out(vec2) # [bS, 4, mL, 400] -> [bS, 4, mL, 2]
s_wv_softmax = self.softmax_dim_1(s_wv)
# penalty for spurious tokens
for b, l_n1 in enumerate(len_question):
if l_n1 < mL_n:
s_wv[b, :, l_n1:, :] = -10000000000
for b, l_n1 in enumerate(len_question):
if l_n1 < mL_n:
s_wv_softmax[b, :, l_n1:, :] = 0
return s_wv,s_wv_softmax
def Loss_selectwhere_startend_v2(score_select_column, s_sa, s_wn, s_wc, s_wo,
s_wv, ground_truth_select_column, g_sa, g_wn, g_wc, g_wo, g_wvi):
"""
:param s_wv: score [ B, n_conds, T, score]
:param g_wn: [ B ]
:param g_wvi: [B, conds, pnt], e.g. [[[0, 6, 7, 8, 15], [0, 1, 2, 3, 4, 15]], [[0, 1, 2, 3, 16], [0, 7, 8, 9, 16]]]
:return:
"""
loss = 0
# loss += Loss_sc(score_select_column, ground_truth_select_column)
# loss += Loss_sa(s_sa, g_sa)
# loss += Loss_wn(s_wn, g_wn)
# loss += Loss_wc(s_wc, g_wc)
# loss += Loss_wo(s_wo, g_wn, g_wo)
# loss += Loss_wv_se(s_wv, g_wn, g_wvi)
return loss
def Loss_sw_se(score_select_column, s_sa, s_wn, s_wc, s_wo,
s_wv, ground_truth_select_column, g_sa, g_wn, g_wc, g_wo, g_wvi):
"""
:param s_wv: score [ B, n_conds, T, score]
:param g_wn: [ B ]
:param g_wvi: [B, conds, pnt], e.g. [[[0, 6, 7, 8, 15], [0, 1, 2, 3, 4, 15]], [[0, 1, 2, 3, 16], [0, 7, 8, 9, 16]]]
:return:
"""
loss = 0
loss += Loss_sc(score_select_column, ground_truth_select_column)
loss += Loss_sa(s_sa, g_sa)
loss += Loss_wn(s_wn, g_wn)
loss += Loss_wc(s_wc, g_wc)
loss += Loss_wo(s_wo, g_wn, g_wo)
loss += Loss_wv_se(s_wv, g_wn, g_wvi)
return loss
def Loss_sc(s_sc, g_sc):
loss = F.cross_entropy(s_sc, torch.tensor(g_sc).to(device))
return loss
def Loss_sa(s_sa, g_sa):
loss = F.cross_entropy(s_sa, torch.tensor(g_sa).to(device))
return loss
def Loss_wn(s_wn, g_wn):
loss = F.cross_entropy(s_wn, torch.tensor(g_wn).to(device))
return loss
def Loss_wc(s_wc, g_wc):
# Construct index matrix
bS, max_h_len = s_wc.shape
im = torch.zeros([bS, max_h_len]).to(device)
for b, g_wc1 in enumerate(g_wc):
for g_wc11 in g_wc1:
im[b, g_wc11] = 1.0
# Construct prob.
p = F.sigmoid(s_wc)
loss = F.binary_cross_entropy(p, im)
return loss
def Loss_wo(s_wo, g_wn, g_wo):
# Construct index matrix
loss = 0
for b, g_wn1 in enumerate(g_wn):
if g_wn1 == 0:
continue
g_wo1 = g_wo[b]
s_wo1 = s_wo[b]
loss += F.cross_entropy(s_wo1[:g_wn1], torch.tensor(g_wo1).to(device))
return loss
def Loss_wv_se(s_wv, g_wn, g_wvi):
"""
s_wv: [bS, 4, mL, 2], 4 stands for maximum # of condition, 2 tands for start & end logits.
g_wvi: [ [1, 3, 2], [4,3] ] (when B=2, wn(b=1) = 3, wn(b=2) = 2).
"""
loss = 0
# g_wvi = torch.tensor(g_wvi).to(device)
for b, g_wvi1 in enumerate(g_wvi):
# for i_wn, g_wvi11 in enumerate(g_wvi1):
g_wn1 = len(g_wvi1) # 有改动
# g_wn1 = g_wn[b] # 有改动
if g_wn1 == 0:
continue
g_wvi1 = torch.tensor(g_wvi1)[:g_wn1].to(device) # 有改动
g_st1 = g_wvi1[:,0]
g_ed1 = g_wvi1[:,1]
# loss from the start position
loss += F.cross_entropy(s_wv[b,:g_wn1,:,0], g_st1)
# print("st_login: ", s_wv[b,:g_wn1,:,0], g_st1, loss)
# loss from the end position
loss += F.cross_entropy(s_wv[b,:g_wn1,:,1], g_ed1)
# print("ed_login: ", s_wv[b,:g_wn1,:,1], g_ed1, loss)
return loss
# ========= Decoder-Layer ===========
class FT_s2s_1(nn.Module):
""" Decoder-Layer """
def __init__(self, input_size, hidden_size, num_layer, dropout, max_seq_length, n_cond_ops, n_agg_ops, old=False):
super(FT_s2s_1, self).__init__()
self.input_size = input_size # input_size
self.hidden_size = hidden_size # hidden_size
self.ls = num_layer
self.dropout = dropout
self.n_cond_ops = n_cond_ops
self.n_agg_ops = n_agg_ops
self.n_where_num = 4
self.decoder_s2s = Decoder_s2s(input_size, hidden_size, num_layer, dropout, max_seq_length)
def forward(self, wenc_s2s, l_input, cls_vec, pnt_start_tok, g_pnt_idxs=None):
score = self.decoder_s2s(wenc_s2s, l_input, cls_vec, pnt_start_tok, g_pnt_idxs)
return score
def EG_forward(self, wenc_s2s, l_input, cls_vec,
pnt_start_tok, pnt_end_tok,
i_sql_vocab, i_nlu, i_hds, # for EG
tokens, nlu, nlu_t, hds, tt_to_t_idx, # for EG
tb, engine,
beam_size=4, beam_only=True):
""" EG-guided beam-search """
score = self.decoder_s2s.EG_forward(wenc_s2s, l_input, cls_vec,
pnt_start_tok, pnt_end_tok,
i_sql_vocab, i_nlu, i_hds, # for EG
tokens, nlu, nlu_t, hds, tt_to_t_idx, # for EG
tb, engine,
beam_size, beam_only)
return score
class Decoder_s2s(nn.Module):
def __init__(self, input_size=300, hidden_size=100, num_layer=2, dropout=0.3, max_seq_length=222, n_cond_ops=3):
super(Decoder_s2s, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layer = num_layer
self.dropout = dropout
self.mL = max_seq_length
self.Tmax = 200
self.enc_h = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.enc_n = nn.LSTM(input_size=input_size, hidden_size=int(hidden_size / 2),
num_layers=num_layer, batch_first=True,
dropout=dropout, bidirectional=True)
self.decode_pn = nn.LSTM(input_size=max_seq_length, hidden_size=hidden_size,
num_layers=num_layer, batch_first=True,
dropout=dropout)
self.W_s2s = nn.Linear(input_size, hidden_size)
self.W_pnt = nn.Linear(hidden_size, hidden_size)
self.wv_out = nn.Sequential(nn.Tanh(), nn.Linear(hidden_size, 1))
def forward(self, wenc_s2s, l_input, cls_vec, pnt_start_tok, g_pnt_idxs=None,):
# Encode
bS, mL_input, input_size = wenc_s2s.shape
# Now, pointer network.
ipnt = wenc_s2s.new_zeros(bS, 1, mL_input).to(device) # [B, 1, 200]
ipnt[:, 0, pnt_start_tok] = 1 # 27 is of start token under current tokenization scheme
# initial (current) pointer
cpnt = ipnt
# reshape wenc_s2s to incorporate T later
wenc_s2s = wenc_s2s.unsqueeze(1)
# h_0 and c_0 from cls_vec
# They are not bidirectional.
h_0 = torch.zeros([self.num_layer, bS, self.hidden_size]).to(device)
c_0 = torch.zeros([self.num_layer, bS, self.hidden_size]).to(device)
for i_layer in range(self.num_layer):
h_st = (2*i_layer)*self.hidden_size
h_ed = h_st + self.hidden_size
c_st = (2*i_layer+1)*self.hidden_size
c_ed = c_st + self.hidden_size
h_0[i_layer] = cls_vec[:, h_st:h_ed] # [ # of layers, batch, dim]
c_0[i_layer] = cls_vec[:, c_st:c_ed] # [ # of layers, batch, dim]
if g_pnt_idxs:
pnt_n = torch.zeros(bS, self.Tmax, mL_input).to(device) # one hot
# assign index
for b, g_pnt_idxs1 in enumerate(g_pnt_idxs):
for t, g_pnt_idx in enumerate(g_pnt_idxs1):
pnt_n[b, t, g_pnt_idx] = 1
# Encode
dec_pn, _ = self.decode_pn(pnt_n, (h_0, c_0))
dec_pn = dec_pn.contiguous()
# [bS, T, input_size]
dec_pn = dec_pn.unsqueeze(2)
# Calculate score
s_wv = self.wv_out(
self.W_s2s(wenc_s2s)
+ self.W_pnt(dec_pn)
).squeeze(3) # [B, T, mL_input, dim] -> [B, T, mL_input, 1] -> [B, T, mL_input]
# s_wv = [B, 4, T, mL_n] = [batch, conds, token idx, score]
# penalty
for b, l_input1 in enumerate(l_input):
if l_input1 < mL_input:
s_wv[b, :, l_input1:] = -10000000000
else:
t = 0
s_wv_list = []
cpnt_h = (h_0, c_0)
while t < self.Tmax:
dec_pn, cpnt_h = self.decode_pn(cpnt, cpnt_h) # lstm
# [B, 1, 100] -> [B, 1, 1, 100]
dec_pn = dec_pn.unsqueeze(2)
# [bS, T, input_size]
# get score
s_wv1 = self.wv_out(
self.W_s2s(wenc_s2s) # [B, 1, mL_input, dim]
+ self.W_pnt(dec_pn) # [B, T=1, 1, dim] Now, T=1
).squeeze(3)
# s_wv = [B, 4, 1, mL_n, 1] = [batch, conds, token idx, score]
# -> [B, 4, mL_n]
# Masking --
for b, l_input1 in enumerate(l_input):
if l_input1 < mL_input:
s_wv1[b, :, l_input1:] = -10000000000
# Collect score--
s_wv_list.append(s_wv1)
# [B, 1, mL_input] -> [B, mL_n] -> [bS*(5-1)]
# (max_val, max_indices)
_val, pnt_n = s_wv1.view(bS, -1).max(dim=1)
# formatting pnt_n as a one-hot input.
cpnt = torch.zeros(bS, mL_input).to(device)
# cpnt = cpnt.scatter_(dim=1, index=pnt_n.unsqueeze(1), src=1).to(device)
cpnt = cpnt.scatter_(1, pnt_n.unsqueeze(1), 1)
cpnt = cpnt.unsqueeze(1) # --> [B * 4, 1, 200]
t += 1
s_wv = torch.stack(s_wv_list, 1) # [B,
s_wv = s_wv.squeeze(2) #
# # Following lines seems to be unnecessary.
# # Penalty to blank parts
# for b, l_input1 in enumerate(l_input):
# if l_input1 < mL_input:
# s_wv[b, :, l_input1:] = -10000000000
return s_wv
def EG_forward(self, wenc_s2s, l_input, cls_vec,
pnt_start_tok, pnt_end_tok,
i_sql_vocab, i_nlu, i_hds, # for EG
tokens, nlu, nlu_t, hds, tt_to_t_idx, # for EG
tb, engine,
beam_size, beam_only=True):
# Encode
bS, mL_input, input_size = wenc_s2s.shape
# reshape wenc_s2s to incorperate T later
wenc_s2s = wenc_s2s.unsqueeze(1)
# h_0 and c_0 from cls_vec
# They are not bidirectional.
h_0 = torch.zeros([self.num_layer, bS, self.hidden_size]).to(device)
c_0 = torch.zeros([self.num_layer, bS, self.hidden_size]).to(device)
for i_layer in range(self.num_layer):
h_st = (2*i_layer)*self.hidden_size
h_ed = h_st + self.hidden_size
c_st = (2*i_layer+1)*self.hidden_size
c_ed = c_st + self.hidden_size
h_0[i_layer] = cls_vec[:, h_st:h_ed] # [ # of layers, batch, dim]
c_0[i_layer] = cls_vec[:, c_st:c_ed] # [ # of layers, batch, dim]
# initial (current) pointer
pnt_list_beam = []
cpnt_beam = []
cpnt_h_beam = []
for i_beam in range(beam_size):
pnt_list_beam1 = []
for b in range(bS):
pnt_list_beam1.append( [ [pnt_start_tok], 0] )
pnt_list_beam.append(pnt_list_beam1)
# initisl cpnt
# Now, initialize pointer network.
ipnt = wenc_s2s.new_zeros(bS, 1, mL_input).to(device) # [B, 1, 200]
# Distort ipnt by i_bam on purpose to avoid initial duplication of beam-search
ipnt[:, 0, pnt_start_tok] = 1 # 27 is of start token under current tokenization scheme
cpnt_beam.append(ipnt)
cpnt_h_beam.append( (h_0, c_0) )
t = 0
while t < self.Tmax:
# s_wv1_beam = []
candidates = [ [] for b in range(bS) ] # [bS]
# Generate beam
for i_beam, cpnt in enumerate(cpnt_beam):
cpnt_h = cpnt_h_beam[i_beam]
pnt_list_beam1 = pnt_list_beam[i_beam]
dec_pn, cpnt_h = self.decode_pn(cpnt, cpnt_h) # lstm
cpnt_h_beam[i_beam] = cpnt_h
# [B, 1, 100] -> [B, 1, 1, 100]
dec_pn = dec_pn.unsqueeze(2)
# [bS, T, input_size]
# get score
s_wv1 = self.wv_out(
self.W_s2s(wenc_s2s) # [B, 1, mL_input, dim]
+ self.W_pnt(dec_pn) # [B, T=1, 1, dim] Now, T=1
).squeeze(3)
# s_wv = [B, 4, 1, mL_n, 1] = [batch, conds, token idx, score]
# -> [B, 4, mL_n]
# Masking --
for b, l_input1 in enumerate(l_input):
if l_input1 < mL_input:
s_wv1[b, :, l_input1:] = -10000000000
# Get the candidates only among the input space.
prob, idxs = F.softmax(s_wv1.view(bS, -1), dim=1).topk(dim=1, k=max(l_input))
log_prob = torch.log(prob) # [bS, beam_size]
for b, log_prob1 in enumerate(log_prob):
pnt_list11, score = pnt_list_beam1[b]
for i_can, log_prob11 in enumerate(log_prob1):
# no update if last token was the end-token
previous_pnt = pnt_list11[-1]
if previous_pnt== pnt_end_tok:
new_seq = pnt_list11
new_score = score
else:
new_seq = pnt_list11 + [idxs[b][i_can].item()]
new_score = score + log_prob11.item()
_candidate = [new_seq, new_score]
candidates[b].append(_candidate)
# Execution-guided beam filtering
for b, candidates1 in enumerate(candidates):
new_pnt_list_batch1 = sorted(candidates1, key=lambda list1: list1[-1], reverse=True)
count = 0
selected_candidates1 = []
for new_pnt_list_batch11 in new_pnt_list_batch1:
if new_pnt_list_batch11 not in selected_candidates1:
if beam_only:
selected_candidates1.append(new_pnt_list_batch11)
pnt_list_beam[count][b] = new_pnt_list_batch11
count +=1
else:
# Need to be modified here.
executable = False
testable = False
pr_i_vg_list, pr_i_vg_sub_list = gen_i_vg_from_pnt_idxs([new_pnt_list_batch11[0]], [i_sql_vocab[b]], [i_nlu[b]],
[i_hds[b]])
pr_sql_q_s2s, pr_sql_i = gen_sql_q_from_i_vg([tokens[b]], [nlu[b]], [nlu_t[b]], [hds[b]], [tt_to_t_idx[b]],
pnt_start_tok, pnt_end_tok,
[new_pnt_list_batch11[0]], pr_i_vg_list, pr_i_vg_sub_list)
# check testability from select-clause
try:
# check whether basic elements presents in pr_sql_i
# If so, it is testable.
idx_agg = pr_sql_i[0]["agg"]
idx_sel = pr_sql_i[0]["sel"]
testable = True
except:
testable = False
pass
# check the presence of conds
if testable:
try:
conds = pr_sql_i[0]["conds"]
except:
conds = []
try:
pr_ans1 = engine.execute(tb[b]['id'], idx_sel, idx_agg, conds)
executable = bool(pr_ans1)
except:
executable = False
#
if testable:
if executable:
add_candidate = True
else:
add_candidate = False
else:
add_candidate = True
if add_candidate:
selected_candidates1.append(new_pnt_list_batch11)
pnt_list_beam[count][b] = new_pnt_list_batch11
count += 1
if count == beam_size:
break
if count < beam_size:
# not executable at all..
# add junk sequence.
for i_junk in range(count, beam_size):
pnt_list_beam[i_junk][b] = [[pnt_end_tok],-9999999]
# generate cpnt
# formatting pnt_n as a one-hot input.
for i_beam in range(beam_size):
cpnt = torch.zeros(bS, mL_input).to(device)
# cpnt = cpnt.scatter_(dim=1, index=pnt_n.unsqueeze(1), src=1).to(device)
idx_batch = [seq_score[0][-1] for seq_score in pnt_list_beam[i_beam]]
pnt_n = torch.tensor(idx_batch).to(device)
cpnt = cpnt.scatter_(1, pnt_n.unsqueeze(1), 1)
cpnt = cpnt.unsqueeze(1) # --> [B, t=1, mL_input]
cpnt_beam[i_beam] = cpnt
t += 1
# Generate best pr_pnt_list, p_tot
pr_pnt_idxs = []
p_list = []
for b in range(bS):
pnt_list_beam_best = pnt_list_beam[0]
pr_pnt_idxs.append(pnt_list_beam_best[b][0])
p_list.append( pnt_list_beam_best[b][1])
return pr_pnt_idxs, p_list, pnt_list_beam
# ============= Shallow-Layer ===============
class FT_Scalar_1(nn.Module):
""" Shallow-Layer """
def __init__(self, input_size, hidden_size, num_layer, dropout, n_cond_ops, n_agg_ops, old=False):
super(FT_Scalar_1, self).__init__()
self.input_size = input_size # input_size
self.hidden_size = hidden_size
self.num_layer = num_layer
self.dropout = dropout
self.n_cond_ops = n_cond_ops
self.n_agg_ops = n_agg_ops
self.n_where_num = 4
def scp(self, wemb_h, l_header):
bS, max_header_len, _ = wemb_h.shape
# s_sc
s_sc = torch.zeros(bS, max_header_len).to(device)
s_sc[:, :] = wemb_h[:, :, 0] # s_sc = [B, max_header length, 1]
# s_sc[:,:] = F.tanh(wemb_h[:,:,0]) # s_sc = [B, max_header length, 1]
# s_sc = s_sc.squeeze(2)
# masking
# print(f"s_sc {s_sc}")
for b, l_header1 in enumerate(l_header):
s_sc[b, l_header1:] = -9999999999.0
return s_sc
def sap(self, wemb_h, pr_sc, idx_st, idx_ed):
bS, max_header_len, _ = wemb_h.shape
# select of aggregation operator
s_sa = torch.zeros([bS, self.n_agg_ops]).to(device)
for b, pr_sc1 in enumerate(pr_sc):
s_sa[b,:] = wemb_h[b,pr_sc1,idx_st:idx_ed]
return s_sa
def wnp(self, cls_vec):
bS = cls_vec.shape[0]
# [B,hidden_size] -> [B, n_where_num+1]
s_wn = torch.zeros(bS, (self.n_where_num + 1)).to(device)
s_wn[:, :] = cls_vec[:, 0:(self.n_where_num + 1)]
return s_wn
def wcp(self, wemb_h, l_header, idx_st, idx_ed):
bS, max_header_len, _ = wemb_h.shape
s_wc = torch.zeros(bS, max_header_len, 1).to(device)
s_wc[:, :, :] = wemb_h[:, :, idx_st:idx_ed]
s_wc = s_wc.squeeze(2) # [B, max_header_length]
# masking
for b, l_header1 in enumerate(l_header):
s_wc[b, l_header1:] = -99999999999.0
return s_wc
def wop(self, wemb_h, pr_wc, idx_st, idx_ed):
bS, max_header_len, _ = wemb_h.shape
s_wo = torch.zeros([bS, self.n_where_num, self.n_cond_ops]).to(device)
for b, pr_wc1 in enumerate(pr_wc):
if len(pr_wc1) > 0:
s_wo[b, 0:len(pr_wc1), :] = wemb_h[b, pr_wc1, idx_st:idx_ed]
else:
pass
return s_wo
def wvp(self, emb_question, len_question, pr_wc):
bS, _, _ = emb_question.shape
s_wv = torch.zeros([bS, self.n_where_num, max(len_question), 2]).to(device)
for b, pr_wc1 in enumerate(pr_wc):
if len(pr_wc1) > 0:
# start logit
s_wv[b, 0:len(pr_wc1), :, 0] = emb_question[b, :, pr_wc1].transpose(0, 1)
# end logit
s_wv[b, 0:len(pr_wc1), :, 1] = emb_question[b, :, [pr_wc11 + 100 for pr_wc11 in pr_wc1]].transpose(0, 1)
else:
pass
# masking
# penalty for spurious tokens
for b, l_n1 in enumerate(len_question):
if l_n1 < max(len_question):
s_wv[b, :, l_n1:, :] = -1e+11
return s_wv
def forward(self, emb_question, len_question, wemb_h, l_header, cls_vec,
g_sc=None, g_sa=None, g_wn=None, g_wc=None, g_wo=None, g_wvi=None,
show_p_sc=False, show_p_sa=False,
show_p_wn=False, show_p_wc=False, show_p_wo=False, show_p_wv=False):
# emb_question = [B, max_nlu_token_length, hidden_size] # here, # of target_layer is fixed to 1.
# wemb_h = [B, max_header #, hidden_size]
s_sc = self.scp(wemb_h, l_header)
if g_sc:
pr_sc = g_sc
else:
pr_sc = pred_sc(s_sc)
# s_sa
idx_st = 1
idx_ed = 1 + self.n_agg_ops
s_sa = self.sap(wemb_h, pr_sc, idx_st, idx_ed)
if g_sa:
pr_sa = g_sa
else:
pr_sa = pred_sa(s_sa)
# where_number
s_wn = self.wnp(cls_vec)
if g_wn:
pr_wn = g_wn
else:
pr_wn = pred_wn(s_wn)
# wc
idx_st = idx_ed+1
idx_ed = idx_st+1
s_wc = self.wcp(wemb_h, l_header, idx_st, idx_ed)
if g_wc:
pr_wc = g_wc
else:
pr_wc = pred_wherecolumn(pr_wn, s_wc)
# wo
idx_st = idx_ed+1
idx_ed = idx_st + self.n_cond_ops
s_wo = self.wop(wemb_h, pr_wc, idx_st, idx_ed)
if g_wo:
pr_wo = g_wo
else:
pr_wo = pred_wo(pr_wn, s_wo)
# wv
# s_wv = [bS, 4, mL, 2]
s_wv = self.wvp(emb_question, len_question, pr_wc)
# print(s_wv)
# s_wv = F.tanh(s_wv)
return s_sc, s_sa, s_wn, s_wc, s_wo, s_wv
def forward_EG(self, emb_question, len_question, wemb_h, l_header, cls_vec, engine, tb,
nlu_t, nlu_tt, tt_to_t_idx, nlu,
beam_size=4):
"""
Execution-guided beam decoding.
Essentially identical with that of NL2SQL Layer.
"""
# Select-clause
prob_sca, pr_sc_best, pr_sa_best, \
p_sc_best, p_sa_best, p_select \
= self.EG_decoding_select(wemb_h, l_header, tb, beam_size=beam_size)
# Where-clause
prob_w, prob_wn_w, pr_wn_based_on_prob, pr_sql_i, pr_wvi_best, \
p_where, p_wn_best, p_wc_best, p_wo_best, p_wvi_best \
= self.EG_decoding_where(emb_question, len_question, wemb_h, l_header, cls_vec, engine, tb,
nlu_t, nlu_tt, tt_to_t_idx, nlu,
pr_sc_best, pr_sa_best,
beam_size=4)
p_tot = cal_prob_tot(p_select, p_where)
return pr_sc_best, pr_sa_best, pr_wn_based_on_prob, pr_wvi_best, \
pr_sql_i, p_tot, p_select, p_where, p_sc_best, p_sa_best, \
p_wn_best, p_wc_best, p_wo_best, p_wvi_best
def EG_decoding_select(self, wemb_h, l_header, tb,
beam_size=4, show_p_sc=False, show_p_sa=False):
# sc
s_sc = self.scp(wemb_h, l_header)
prob_sc = F.softmax(s_sc, dim=-1)
bS, mcL = s_sc.shape
# minimum_header_length = min(l_header)
# beam_size = minimum_header_length if beam_size > minimum_header_length else beam_size
# sa
# Construct all possible sc_sa_score
prob_sc_sa = torch.zeros([bS, beam_size, self.n_agg_ops]).to(device)
score_sc_sa = torch.zeros([bS, beam_size, self.n_agg_ops]).to(device)
prob_sca = torch.zeros_like(prob_sc_sa).to(device)
# get the top-k indices. pr_sc_beam = [B, beam_size]
pr_sc_beam = pred_sc_beam(s_sc, beam_size)
# calculate and predict s_sa.
idx_st = 1
idx_ed = 1 + self.n_agg_ops
for i_beam in range(beam_size):
pr_sc = list(array(pr_sc_beam)[:, i_beam])
s_sa = self.sap(wemb_h, pr_sc, idx_st, idx_ed)
prob_sa = F.softmax(s_sa, dim=-1)
prob_sc_sa[:, i_beam, :] = prob_sa
score_sc_sa[:, i_beam, :] = s_sa
prob_sc_selected = prob_sc[range(bS), pr_sc] # [B]
prob_sca[:, i_beam, :] = (prob_sa.t() * prob_sc_selected).t()
# [mcL, B] * [B] -> [mcL, B] (element-wise multiplication)
# [mcL, B] -> [B, mcL]
# Calculate the dimension of tensor
# tot_dim = len(prob_sca.shape)
idxs = topk_multi_dim(torch.tensor(prob_sca), n_topk=beam_size, batch_exist=True)
# Now as sc_idx is already sorted, re-map them properly.
idxs = remap_sc_idx(idxs, pr_sc_beam) # [sc_beam_idx, sa_idx] -> [sc_idx, sa_idx]
idxs_arr = array(idxs)
# [B, beam_size, remainig dim]
# idxs[b][0] gives first probable [sc_idx, sa_idx] pairs.
# idxs[b][1] gives of second.
# Calculate prob_sca, a joint probability
beam_idx_sca = [0] * bS
beam_meet_the_final = [False] * bS
while True:
pr_sc = idxs_arr[range(bS), beam_idx_sca, 0]
pr_sa = idxs_arr[range(bS), beam_idx_sca, 1]
# map index properly
check = check_sc_sa_pairs(tb, pr_sc, pr_sa)
if sum(check) == bS:
break
else:
for b, check1 in enumerate(check):
if not check1: # wrong pair
beam_idx_sca[b] += 1
if beam_idx_sca[b] >= beam_size:
beam_meet_the_final[b] = True
beam_idx_sca[b] -= 1
else:
beam_meet_the_final[b] = True
if sum(beam_meet_the_final) == bS:
break
# Now pr_sc, pr_sa are properly predicted.
pr_sc_best = list(pr_sc)
pr_sa_best = list(pr_sa)
# output for later analysis.
p_sc_best = cal_prob_sc(s_sc, pr_sc_best)
p_sa_best = cal_prob_sa(score_sc_sa[range(bS), beam_idx_sca, :].squeeze(1), pr_sa_best)
p_select = cal_prob_select(p_sc_best, p_sa_best)
# p_select = prob_sca[range(bS),beam_idx_sca,pr_sa_best].detach().to('cpu').numpy()
return prob_sca, pr_sc_best, pr_sa_best, p_sc_best, p_sa_best, p_select
def EG_decoding_where(self, emb_question, len_question, wemb_h, l_header, cls_vec, engine, tb,
nlu_t, nlu_wp_t, tt_to_t_idx, nlu,
pr_sc_best, pr_sa_best,
beam_size=4, show_p_wn=False, show_p_wc=False, show_p_wo=False, show_p_wv=False):
bS, max_header_len, _ = wemb_h.shape
# Now, Where-clause beam search.
idx_st = 1
idx_ed = 1 + self.n_agg_ops
s_wn = self.wnp(cls_vec)
prob_wn = F.softmax(s_wn, dim=-1).detach().to('cpu').numpy()
# Found "executable" most likely 4(=max_num_of_conditions) where-clauses.
# wc
idx_st = idx_ed + 1
idx_ed = idx_st + 1
s_wc = self.wcp(wemb_h, l_header, idx_st, idx_ed)
prob_wc = torch.sigmoid(s_wc).detach().to('cpu').numpy()
# pr_wc_sorted_by_prob = pred_wc_sorted_by_prob(s_wc)
# get max_wn # of most probable columns & their prob.
pr_wn_max = [self.n_where_num] * bS
pr_wc_max = pred_wherecolumn(pr_wn_max, s_wc) # if some column do not have executable where-claouse, omit that column
prob_wc_max = zeros([bS, self.n_where_num])
for b, pr_wc_max1 in enumerate(pr_wc_max):
prob_wc_max[b, :] = prob_wc[b, pr_wc_max1]
# get most probable n_where_num where-clouses
# wo
idx_st = idx_ed + 1
idx_ed = idx_st + self.n_cond_ops
s_wo_max = self.wop(wemb_h, pr_wc_max, idx_st, idx_ed)
prob_wo_max = F.softmax(s_wo_max, dim=-1).detach().to('cpu').numpy()
# [B, n_where_num, n_cond_op]
pr_wvi_beam_op_list = []
prob_wvi_beam_op_list = []
prob_wvi_beam_st_op_list = []
prob_wvi_beam_ed_op_list = []
# To re-use code, repeat the calculation unnecessarily.
for i_op in range(self.n_cond_ops - 1):
pr_wo_temp = [[i_op] * self.n_where_num] * bS
# wv
s_wv = self.wvp(emb_question, len_question, pr_wc_max)
prob_wv = F.softmax(s_wv, dim=-2).detach().to('cpu').numpy()
# prob_wv
pr_wvi_beam, prob_wvi_beam, prob_wvi_beam_st, prob_wvi_beam_ed = pred_wvi_se_beam(self.n_where_num, s_wv, beam_size)
pr_wvi_beam_op_list.append(pr_wvi_beam)
prob_wvi_beam_op_list.append(prob_wvi_beam)
prob_wvi_beam_st_op_list.append(prob_wvi_beam_st)
prob_wvi_beam_ed_op_list.append(prob_wvi_beam_ed)
# pr_wvi_beam = [B, n_where_num, k_logit**2 [st, ed] paris]
# pred_wv_beam
# Calculate joint probability of where-clause
# prob_w = [batch, wc, wo, wv] = [B, n_where_num, n_cond_op, n_pairs]
n_wv_beam_pairs = prob_wvi_beam.shape[2]
prob_w = zeros([bS, self.n_where_num, self.n_cond_ops - 1, n_wv_beam_pairs])
prob_wc_dupl = zeros([bS, self.n_where_num, self.n_cond_ops - 1, n_wv_beam_pairs])
prob_wo_dupl = zeros([bS, self.n_where_num, self.n_cond_ops - 1, n_wv_beam_pairs])
prob_wvi_st_dupl = zeros([bS, self.n_where_num, self.n_cond_ops - 1, n_wv_beam_pairs])
prob_wvi_ed_dupl = zeros([bS, self.n_where_num, self.n_cond_ops - 1, n_wv_beam_pairs])
for b in range(bS):
for i_wn in range(self.n_where_num):
for i_op in range(self.n_cond_ops - 1): # do not use final one
p_wc = prob_wc_max[b, i_wn]
for i_wv_beam in range(n_wv_beam_pairs):
# i_wc = pr_wc_max[b][i_wn] # already done
p_wo = prob_wo_max[b, i_wn, i_op]
p_wv = prob_wvi_beam_op_list[i_op][b, i_wn, i_wv_beam]
prob_w[b, i_wn, i_op, i_wv_beam] = p_wc * p_wo * p_wv
prob_wc_dupl[b, i_wn, i_op, i_wv_beam] = p_wc
prob_wo_dupl[b, i_wn, i_op, i_wv_beam] = p_wo
p_wv_st = prob_wvi_beam_st_op_list[i_op][b, i_wn, i_wv_beam]
p_wv_ed = prob_wvi_beam_ed_op_list[i_op][b, i_wn, i_wv_beam]
prob_wvi_st_dupl[b, i_wn, i_op, i_wv_beam] = p_wv_st
prob_wvi_ed_dupl[b, i_wn, i_op, i_wv_beam] = p_wv_ed
# Perform execution guided decoding
conds_max = []
prob_conds_max = []
# while len(conds_max) < self.n_where_num:
idxs = topk_multi_dim(torch.tensor(prob_w), n_topk=beam_size, batch_exist=True)
# idxs = [B, i_wc_beam, i_op, i_wv_pairs]
# Construct conds1. Collect only executable one. It is descending order of the probability.
pr_wvi_max = []
p_wc_max = []
p_wo_max = []
p_wvi_max = []
for b, idxs1 in enumerate(idxs):
conds_max1 = []
prob_conds_max1 = []
pr_wvi1_max = []
p_wc1_max = []
p_wo1_max = []
p_wvi1_max = []
for i_wn, idxs11 in enumerate(idxs1):
i_wc = pr_wc_max[b][idxs11[0]]
i_op = idxs11[1]
wvi = pr_wvi_beam_op_list[i_op][b][idxs11[0]][idxs11[2]]
# idx11[0]
# get wv_str
temp_pr_wv_str, _ = convert_pred_wvi_to_string([[wvi]], [nlu_t[b]], [nlu_wp_t[b]], [tt_to_t_idx[b]],
[nlu[b]])
merged_wv11 = merge_wv_t1_eng(temp_pr_wv_str[0][0], nlu[b])
conds11 = [i_wc, i_op, merged_wv11]
prob_conds11 = prob_w[b, idxs11[0], idxs11[1], idxs11[2]]
p_wc11_max = prob_wc_dupl[b, idxs11[0], idxs11[1], idxs11[2]]
p_wo11_max = prob_wo_dupl[b, idxs11[0], idxs11[1], idxs11[2]]
p_wvi11_max = [ prob_wvi_st_dupl[b, idxs11[0], idxs11[1], idxs11[2]],
prob_wvi_ed_dupl[b, idxs11[0], idxs11[1], idxs11[2]] ]
# test execution
# print(nlu[b])
# print(tb[b]['id'], tb[b]['types'], pr_sc[b], pr_sa[b], [conds11])
pr_ans = engine.execute(tb[b]['id'], pr_sc_best[b], pr_sa_best[b], [conds11])
if bool(pr_ans):
# pr_ans is not empty!
conds_max1.append(conds11)
prob_conds_max1.append(prob_conds11)
pr_wvi1_max.append(wvi)
p_wc1_max.append(p_wc11_max)
p_wo1_max.append(p_wo11_max)
p_wvi1_max.append(p_wvi11_max)
conds_max.append(conds_max1)
prob_conds_max.append(prob_conds_max1)
pr_wvi_max.append(pr_wvi1_max)
p_wc_max.append(p_wc1_max)
p_wo_max.append(p_wo1_max)
p_wvi_max.append(p_wvi1_max)
# May need to do more exhuastive search?
# i.e. up to.. getting all executable cases.
# Calculate total probability to decide the number of where-clauses
pr_sql_i = []
prob_wn_w = [] # total where-clause probability
pr_wn_based_on_prob = []
pr_wvi_best = []
p_wc = []
p_wo = []
p_wvi = []
for b, prob_wn1 in enumerate(prob_wn):
max_executable_wn1 = len(conds_max[b])
prob_wn_w1 = []
prob_wn_w1.append(prob_wn1[0]) # wn=0 case.
for i_wn in range(max_executable_wn1):
prob_wn_w11 = prob_wn1[i_wn + 1] * prob_conds_max[b][i_wn]
prob_wn_w1.append(prob_wn_w11)
pr_wn_based_on_prob.append(argmax(prob_wn_w1))
prob_wn_w.append(prob_wn_w1)
pr_sql_i1 = {'agg': pr_sa_best[b], 'sel': pr_sc_best[b], 'conds': conds_max[b][:pr_wn_based_on_prob[b]]}
pr_wvi_best1 = pr_wvi_max[b][:pr_wn_based_on_prob[b]]
pr_sql_i.append(pr_sql_i1)
pr_wvi_best.append(pr_wvi_best1)
p_wc.append( p_wc_max[b][:pr_wn_based_on_prob[b]] )
p_wo.append( p_wo_max[b][:pr_wn_based_on_prob[b]] )
p_wvi.append( p_wvi_max[b][:pr_wn_based_on_prob[b]] )
# s_wv = [B, n_where_num, max_nlu_tokens, 2]
p_wn = cal_prob_wn(s_wn, pr_wn_based_on_prob)
p_where = cal_prob_where(p_wn, p_wc, p_wo, p_wvi)
return prob_w, prob_wn_w, pr_wn_based_on_prob, pr_sql_i, pr_wvi_best, \
p_where, p_wn, p_wc, p_wo, p_wvi
def Loss_s2s(score, g_pnt_idxs):
"""
score = [B, T, max_seq_length]
"""
# WHERE string part
loss = 0
for b, g_pnt_idxs1 in enumerate(g_pnt_idxs):
ed = len(g_pnt_idxs1) - 1
score_part = score[b, :ed]
loss += F.cross_entropy(score_part, torch.tensor(g_pnt_idxs1[1:]).to(device)) # +1 shift.
return loss
| [((40303, 40318), 'torch.nn.functional.sigmoid', 'F.sigmoid', (['s_wc'], {}), '(s_wc)\n', (40312, 40318), True, 'import torch.nn.functional as F\n'), ((40330, 40359), 'torch.nn.functional.binary_cross_entropy', 'F.binary_cross_entropy', (['p', 'im'], {}), '(p, im)\n', (40352, 40359), True, 'import torch.nn.functional as F\n'), ((252, 277), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (275, 277), False, 'import torch\n'), ((4189, 4212), 'torch.nn.functional.softmax', 'F.softmax', (['s_sc'], {'dim': '(-1)'}), '(s_sc, dim=-1)\n', (4198, 4212), True, 'import torch.nn.functional as F\n'), ((13046, 13081), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (13055, 13081), True, 'import torch.nn as nn\n'), ((13101, 13136), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (13110, 13136), True, 'import torch.nn as nn\n'), ((13161, 13196), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (13170, 13196), True, 'import torch.nn as nn\n'), ((13304, 13321), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (13314, 13321), True, 'import torch.nn as nn\n'), ((13350, 13367), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(2)'}), '(dim=2)\n', (13360, 13367), True, 'import torch.nn as nn\n'), ((13397, 13415), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (13407, 13415), True, 'import torch.nn as nn\n'), ((16976, 17011), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (16985, 17011), True, 'import torch.nn as nn\n'), ((17273, 17290), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (17283, 17290), True, 'import torch.nn as nn\n'), ((17319, 17336), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(2)'}), '(dim=2)\n', (17329, 17336), True, 'import torch.nn as nn\n'), ((17366, 17384), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (17376, 17384), True, 'import torch.nn as nn\n'), ((20158, 20183), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', '(1)'], {}), '(hidden_size, 1)\n', (20167, 20183), True, 'import torch.nn as nn\n'), ((20208, 20255), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', '(num_layer * hidden_size)'], {}), '(hidden_size, num_layer * hidden_size)\n', (20217, 20255), True, 'import torch.nn as nn\n'), ((20278, 20325), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', '(num_layer * hidden_size)'], {}), '(hidden_size, num_layer * hidden_size)\n', (20287, 20325), True, 'import torch.nn as nn\n'), ((20350, 20375), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', '(1)'], {}), '(hidden_size, 1)\n', (20359, 20375), True, 'import torch.nn as nn\n'), ((20622, 20639), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (20632, 20639), True, 'import torch.nn as nn\n'), ((20668, 20685), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(2)'}), '(dim=2)\n', (20678, 20685), True, 'import torch.nn as nn\n'), ((20715, 20733), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (20725, 20733), True, 'import torch.nn as nn\n'), ((24656, 24691), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (24665, 24691), True, 'import torch.nn as nn\n'), ((24711, 24746), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (24720, 24746), True, 'import torch.nn as nn\n'), ((24771, 24806), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (24780, 24806), True, 'import torch.nn as nn\n'), ((24935, 24952), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (24945, 24952), True, 'import torch.nn as nn\n'), ((24981, 24998), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(2)'}), '(dim=2)\n', (24991, 24998), True, 'import torch.nn as nn\n'), ((25028, 25046), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (25038, 25046), True, 'import torch.nn as nn\n'), ((28387, 28422), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (28396, 28422), True, 'import torch.nn as nn\n'), ((28442, 28477), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (28451, 28477), True, 'import torch.nn as nn\n'), ((28502, 28537), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (28511, 28537), True, 'import torch.nn as nn\n'), ((28735, 28752), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (28745, 28752), True, 'import torch.nn as nn\n'), ((28781, 28798), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(2)'}), '(dim=2)\n', (28791, 28798), True, 'import torch.nn as nn\n'), ((28829, 28847), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (28839, 28847), True, 'import torch.nn as nn\n'), ((29955, 29982), 'torch.stack', 'torch.stack', (['wenc_header_ob'], {}), '(wenc_header_ob)\n', (29966, 29982), False, 'import torch\n'), ((32871, 32906), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (32880, 32906), True, 'import torch.nn as nn\n'), ((32926, 32961), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (32935, 32961), True, 'import torch.nn as nn\n'), ((32986, 33021), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (32995, 33021), True, 'import torch.nn as nn\n'), ((33042, 33076), 'torch.nn.Linear', 'nn.Linear', (['n_cond_ops', 'hidden_size'], {}), '(n_cond_ops, hidden_size)\n', (33051, 33076), True, 'import torch.nn as nn\n'), ((33652, 33669), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (33662, 33669), True, 'import torch.nn as nn\n'), ((33698, 33715), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(2)'}), '(dim=2)\n', (33708, 33715), True, 'import torch.nn as nn\n'), ((33746, 33764), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (33756, 33764), True, 'import torch.nn as nn\n'), ((34862, 34889), 'torch.stack', 'torch.stack', (['wenc_header_ob'], {}), '(wenc_header_ob)\n', (34873, 34889), False, 'import torch\n'), ((37238, 37258), 'torch.stack', 'torch.stack', (['wenc_op'], {}), '(wenc_op)\n', (37249, 37258), False, 'import torch\n'), ((37904, 37938), 'torch.cat', 'torch.cat', (['[vec1e, wenc_ne]'], {'dim': '(3)'}), '([vec1e, wenc_ne], dim=3)\n', (37913, 37938), False, 'import torch\n'), ((41331, 41380), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['s_wv[(b), :g_wn1, :, (0)]', 'g_st1'], {}), '(s_wv[(b), :g_wn1, :, (0)], g_st1)\n', (41346, 41380), True, 'import torch.nn.functional as F\n'), ((41491, 41540), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['s_wv[(b), :g_wn1, :, (1)]', 'g_ed1'], {}), '(s_wv[(b), :g_wn1, :, (1)], g_ed1)\n', (41506, 41540), True, 'import torch.nn.functional as F\n'), ((44095, 44216), 'torch.nn.LSTM', 'nn.LSTM', ([], {'input_size': 'max_seq_length', 'hidden_size': 'hidden_size', 'num_layers': 'num_layer', 'batch_first': '(True)', 'dropout': 'dropout'}), '(input_size=max_seq_length, hidden_size=hidden_size, num_layers=\n num_layer, batch_first=True, dropout=dropout)\n', (44102, 44216), True, 'import torch.nn as nn\n'), ((44300, 44334), 'torch.nn.Linear', 'nn.Linear', (['input_size', 'hidden_size'], {}), '(input_size, hidden_size)\n', (44309, 44334), True, 'import torch.nn as nn\n'), ((44356, 44391), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (44365, 44391), True, 'import torch.nn as nn\n'), ((62905, 62928), 'torch.nn.functional.softmax', 'F.softmax', (['s_sc'], {'dim': '(-1)'}), '(s_sc, dim=-1)\n', (62914, 62928), True, 'import torch.nn.functional as F\n'), ((4999, 5022), 'torch.nn.functional.softmax', 'F.softmax', (['s_sa'], {'dim': '(-1)'}), '(s_sa, dim=-1)\n', (5008, 5022), True, 'import torch.nn.functional as F\n'), ((5460, 5482), 'torch.tensor', 'torch.tensor', (['prob_sca'], {}), '(prob_sca)\n', (5472, 5482), False, 'import torch\n'), ((9926, 9946), 'torch.tensor', 'torch.tensor', (['prob_w'], {}), '(prob_w)\n', (9938, 9946), False, 'import torch\n'), ((13233, 13242), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (13240, 13242), True, 'import torch.nn as nn\n'), ((13244, 13273), 'torch.nn.Linear', 'nn.Linear', (['(2 * hidden_size)', '(1)'], {}), '(2 * hidden_size, 1)\n', (13253, 13273), True, 'import torch.nn as nn\n'), ((17048, 17083), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (17057, 17083), True, 'import torch.nn as nn\n'), ((17121, 17130), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (17128, 17130), True, 'import torch.nn as nn\n'), ((17168, 17201), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'n_agg_ops'], {}), '(hidden_size, n_agg_ops)\n', (17177, 17201), True, 'import torch.nn as nn\n'), ((17466, 17501), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (17475, 17501), True, 'import torch.nn as nn\n'), ((17530, 17565), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (17539, 17565), True, 'import torch.nn as nn\n'), ((20412, 20447), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (20421, 20447), True, 'import torch.nn as nn\n'), ((20485, 20494), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (20492, 20494), True, 'import torch.nn as nn\n'), ((20532, 20569), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', '(self.mL_w + 1)'], {}), '(hidden_size, self.mL_w + 1)\n', (20541, 20569), True, 'import torch.nn as nn\n'), ((24855, 24864), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (24862, 24864), True, 'import torch.nn as nn\n'), ((24866, 24895), 'torch.nn.Linear', 'nn.Linear', (['(2 * hidden_size)', '(1)'], {}), '(2 * hidden_size, 1)\n', (24875, 24895), True, 'import torch.nn as nn\n'), ((28587, 28626), 'torch.nn.Linear', 'nn.Linear', (['(2 * hidden_size)', 'hidden_size'], {}), '(2 * hidden_size, hidden_size)\n', (28596, 28626), True, 'import torch.nn as nn\n'), ((28638, 28647), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (28645, 28647), True, 'import torch.nn as nn\n'), ((28661, 28695), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'n_cond_ops'], {}), '(hidden_size, n_cond_ops)\n', (28670, 28695), True, 'import torch.nn as nn\n'), ((29777, 29800), 'torch.stack', 'torch.stack', (['(real + pad)'], {}), '(real + pad)\n', (29788, 29800), False, 'import torch\n'), ((34683, 34706), 'torch.stack', 'torch.stack', (['(real + pad)'], {}), '(real + pad)\n', (34694, 34706), False, 'import torch\n'), ((36693, 36732), 'torch.zeros', 'torch.zeros', (['self.mL_w', 'self.n_cond_ops'], {}), '(self.mL_w, self.n_cond_ops)\n', (36704, 36732), False, 'import torch\n'), ((40135, 40163), 'torch.zeros', 'torch.zeros', (['[bS, max_h_len]'], {}), '([bS, max_h_len])\n', (40146, 40163), False, 'import torch\n'), ((44429, 44438), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (44436, 44438), True, 'import torch.nn as nn\n'), ((44440, 44465), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', '(1)'], {}), '(hidden_size, 1)\n', (44449, 44465), True, 'import torch.nn as nn\n'), ((48044, 48069), 'torch.stack', 'torch.stack', (['s_wv_list', '(1)'], {}), '(s_wv_list, 1)\n', (48055, 48069), False, 'import torch\n'), ((63761, 63784), 'torch.nn.functional.softmax', 'F.softmax', (['s_sa'], {'dim': '(-1)'}), '(s_sa, dim=-1)\n', (63770, 63784), True, 'import torch.nn.functional as F\n'), ((64238, 64260), 'torch.tensor', 'torch.tensor', (['prob_sca'], {}), '(prob_sca)\n', (64250, 64260), False, 'import torch\n'), ((70437, 70457), 'torch.tensor', 'torch.tensor', (['prob_w'], {}), '(prob_w)\n', (70449, 70457), False, 'import torch\n'), ((4467, 4516), 'torch.zeros', 'torch.zeros', (['[bS, beam_size, self.number_agg_ops]'], {}), '([bS, beam_size, self.number_agg_ops])\n', (4478, 4516), False, 'import torch\n'), ((4547, 4575), 'torch.zeros_like', 'torch.zeros_like', (['prob_sc_sa'], {}), '(prob_sc_sa)\n', (4563, 4575), False, 'import torch\n'), ((26945, 26975), 'torch.mul', 'torch.mul', (['encoded_question', 'p'], {}), '(encoded_question, p)\n', (26954, 26975), False, 'import torch\n'), ((27199, 27217), 'torch.isnan', 'torch.isnan', (['score'], {}), '(score)\n', (27210, 27217), False, 'import torch\n'), ((33205, 33234), 'torch.nn.Linear', 'nn.Linear', (['(4 * hidden_size)', '(2)'], {}), '(4 * hidden_size, 2)\n', (33214, 33234), True, 'import torch.nn as nn\n'), ((33320, 33359), 'torch.nn.Linear', 'nn.Linear', (['(4 * hidden_size)', 'hidden_size'], {}), '(4 * hidden_size, hidden_size)\n', (33329, 33359), True, 'import torch.nn as nn\n'), ((33377, 33386), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (33384, 33386), True, 'import torch.nn as nn\n'), ((33404, 33429), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', '(2)'], {}), '(hidden_size, 2)\n', (33413, 33429), True, 'import torch.nn as nn\n'), ((37113, 37138), 'torch.tensor', 'torch.tensor', (['idx_scatter'], {}), '(idx_scatter)\n', (37125, 37138), False, 'import torch\n'), ((39778, 39796), 'torch.tensor', 'torch.tensor', (['g_sc'], {}), '(g_sc)\n', (39790, 39796), False, 'import torch\n'), ((39885, 39903), 'torch.tensor', 'torch.tensor', (['g_sa'], {}), '(g_sa)\n', (39897, 39903), False, 'import torch\n'), ((39991, 40009), 'torch.tensor', 'torch.tensor', (['g_wn'], {}), '(g_wn)\n', (40003, 40009), False, 'import torch\n'), ((45062, 45113), 'torch.zeros', 'torch.zeros', (['[self.num_layer, bS, self.hidden_size]'], {}), '([self.num_layer, bS, self.hidden_size])\n', (45073, 45113), False, 'import torch\n'), ((45139, 45190), 'torch.zeros', 'torch.zeros', (['[self.num_layer, bS, self.hidden_size]'], {}), '([self.num_layer, bS, self.hidden_size])\n', (45150, 45190), False, 'import torch\n'), ((48932, 48983), 'torch.zeros', 'torch.zeros', (['[self.num_layer, bS, self.hidden_size]'], {}), '([self.num_layer, bS, self.hidden_size])\n', (48943, 48983), False, 'import torch\n'), ((49009, 49060), 'torch.zeros', 'torch.zeros', (['[self.num_layer, bS, self.hidden_size]'], {}), '([self.num_layer, bS, self.hidden_size])\n', (49020, 49060), False, 'import torch\n'), ((51514, 51529), 'torch.log', 'torch.log', (['prob'], {}), '(prob)\n', (51523, 51529), False, 'import torch\n'), ((57343, 57374), 'torch.zeros', 'torch.zeros', (['bS', 'max_header_len'], {}), '(bS, max_header_len)\n', (57354, 57374), False, 'import torch\n'), ((57893, 57926), 'torch.zeros', 'torch.zeros', (['[bS, self.n_agg_ops]'], {}), '([bS, self.n_agg_ops])\n', (57904, 57926), False, 'import torch\n'), ((58179, 58216), 'torch.zeros', 'torch.zeros', (['bS', '(self.n_where_num + 1)'], {}), '(bS, self.n_where_num + 1)\n', (58190, 58216), False, 'import torch\n'), ((58424, 58458), 'torch.zeros', 'torch.zeros', (['bS', 'max_header_len', '(1)'], {}), '(bS, max_header_len, 1)\n', (58435, 58458), False, 'import torch\n'), ((58830, 58882), 'torch.zeros', 'torch.zeros', (['[bS, self.n_where_num, self.n_cond_ops]'], {}), '([bS, self.n_where_num, self.n_cond_ops])\n', (58841, 58882), False, 'import torch\n'), ((63183, 63227), 'torch.zeros', 'torch.zeros', (['[bS, beam_size, self.n_agg_ops]'], {}), '([bS, beam_size, self.n_agg_ops])\n', (63194, 63227), False, 'import torch\n'), ((63261, 63305), 'torch.zeros', 'torch.zeros', (['[bS, beam_size, self.n_agg_ops]'], {}), '([bS, beam_size, self.n_agg_ops])\n', (63272, 63305), False, 'import torch\n'), ((63337, 63365), 'torch.zeros_like', 'torch.zeros_like', (['prob_sc_sa'], {}), '(prob_sc_sa)\n', (63353, 63365), False, 'import torch\n'), ((40629, 40648), 'torch.tensor', 'torch.tensor', (['g_wo1'], {}), '(g_wo1)\n', (40641, 40648), False, 'import torch\n'), ((41174, 41194), 'torch.tensor', 'torch.tensor', (['g_wvi1'], {}), '(g_wvi1)\n', (41186, 41194), False, 'import torch\n'), ((45635, 45671), 'torch.zeros', 'torch.zeros', (['bS', 'self.Tmax', 'mL_input'], {}), '(bS, self.Tmax, mL_input)\n', (45646, 45671), False, 'import torch\n'), ((74707, 74736), 'torch.tensor', 'torch.tensor', (['g_pnt_idxs1[1:]'], {}), '(g_pnt_idxs1[1:])\n', (74719, 74736), False, 'import torch\n'), ((47745, 47770), 'torch.zeros', 'torch.zeros', (['bS', 'mL_input'], {}), '(bS, mL_input)\n', (47756, 47770), False, 'import torch\n'), ((55929, 55954), 'torch.zeros', 'torch.zeros', (['bS', 'mL_input'], {}), '(bS, mL_input)\n', (55940, 55954), False, 'import torch\n'), ((56166, 56189), 'torch.tensor', 'torch.tensor', (['idx_batch'], {}), '(idx_batch)\n', (56178, 56189), False, 'import torch\n'), ((7040, 7063), 'torch.nn.functional.softmax', 'F.softmax', (['s_wn'], {'dim': '(-1)'}), '(s_wn, dim=-1)\n', (7049, 7063), True, 'import torch.nn.functional as F\n'), ((7334, 7349), 'torch.nn.functional.sigmoid', 'F.sigmoid', (['s_wc'], {}), '(s_wc)\n', (7343, 7349), True, 'import torch.nn.functional as F\n'), ((8070, 8097), 'torch.nn.functional.softmax', 'F.softmax', (['s_wo_max'], {'dim': '(-1)'}), '(s_wo_max, dim=-1)\n', (8079, 8097), True, 'import torch.nn.functional as F\n'), ((66549, 66572), 'torch.nn.functional.softmax', 'F.softmax', (['s_wn'], {'dim': '(-1)'}), '(s_wn, dim=-1)\n', (66558, 66572), True, 'import torch.nn.functional as F\n'), ((66829, 66848), 'torch.sigmoid', 'torch.sigmoid', (['s_wc'], {}), '(s_wc)\n', (66842, 66848), False, 'import torch\n'), ((67553, 67580), 'torch.nn.functional.softmax', 'F.softmax', (['s_wo_max'], {'dim': '(-1)'}), '(s_wo_max, dim=-1)\n', (67562, 67580), True, 'import torch.nn.functional as F\n'), ((8545, 8568), 'torch.nn.functional.softmax', 'F.softmax', (['s_wv'], {'dim': '(-2)'}), '(s_wv, dim=-2)\n', (8554, 8568), True, 'import torch.nn.functional as F\n'), ((68068, 68091), 'torch.nn.functional.softmax', 'F.softmax', (['s_wv'], {'dim': '(-2)'}), '(s_wv, dim=-2)\n', (68077, 68091), True, 'import torch.nn.functional as F\n')] |
leeeGreat/xlw_study_python | www/app.py | 03d8eb59f6826b4689d6598ede6393ecbb5058fb | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'Michael Liao'
'''
async web application.
'''
import logging; logging.basicConfig(level=logging.INFO)
import asyncio, os, json, time
from datetime import datetime
from aiohttp import web
def index(request):
return web.Response(body=b'<h1>Awesome</h1>')
async def init(loop):
app = web.Application(loop=loop)
app.router.add_route('GET', '/', index)
srv = await loop.create_server(app.make_handler(), '127.0.0.1', 9000)
logging.info('server started at http://127.0.0.1:9000...')
return srv
loop = asyncio.get_event_loop()
loop.run_until_complete(init(loop))
loop.run_forever()
| [((125, 164), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (144, 164), False, 'import logging\n'), ((587, 611), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (609, 611), False, 'import asyncio, os, json, time\n'), ((284, 322), 'aiohttp.web.Response', 'web.Response', ([], {'body': "b'<h1>Awesome</h1>'"}), "(body=b'<h1>Awesome</h1>')\n", (296, 322), False, 'from aiohttp import web\n'), ((356, 382), 'aiohttp.web.Application', 'web.Application', ([], {'loop': 'loop'}), '(loop=loop)\n', (371, 382), False, 'from aiohttp import web\n'), ((505, 563), 'logging.info', 'logging.info', (['"""server started at http://127.0.0.1:9000..."""'], {}), "('server started at http://127.0.0.1:9000...')\n", (517, 563), False, 'import logging\n')] |
ritchie46/flopy | examples/Testing/flopy3_plotdata.py | 8e7284dcb3aaf5c12293d442248c2c2d9959f835 | from __future__ import print_function
import os
import numpy as np
import matplotlib.pyplot as plt
import flopy
fb = flopy.modflow.Modflow.load('freyberg', version='mf2005', model_ws=os.path.join('..', 'data', 'freyberg'), verbose=True)
dis = fb.dis
top = fb.dis.top
fb.dis.top.plot(grid=True, colorbar=True)
fb.dis.botm.plot(grid=True, colorbar=True)
fb.dis.plot()
plt.show()
fb.dis.plot()
plt.show()
fig = plt.figure(figsize=(8, 8))
ax = fig.add_subplot(1,2,1, aspect='equal')
fb.dis.top.plot(grid=True, axes=ax, colorbar=True)
ax = fig.add_subplot(1,2,2, aspect='equal')
fb.dis.botm.plot(grid=True, axes=ax, colorbar=True)
plt.show()
print('this is the end my friend') | [((374, 384), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (382, 384), True, 'import matplotlib.pyplot as plt\n'), ((400, 410), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (408, 410), True, 'import matplotlib.pyplot as plt\n'), ((419, 445), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (429, 445), True, 'import matplotlib.pyplot as plt\n'), ((638, 648), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (646, 648), True, 'import matplotlib.pyplot as plt\n'), ((187, 225), 'os.path.join', 'os.path.join', (['""".."""', '"""data"""', '"""freyberg"""'], {}), "('..', 'data', 'freyberg')\n", (199, 225), False, 'import os\n')] |
cabrust/chia | chia/components/sample_transformers/__init__.py | 3eaf815b261dc8a85d64fd698e0079515ec0dde9 | from chia import components
from chia.components.sample_transformers import identity
from chia.components.sample_transformers.sample_transformer import SampleTransformer
class SampleTransformerFactory(components.Factory):
name_to_class_mapping = {"identity": identity.IdentitySampleTransformer}
__all__ = ["SampleTransformer", "SampleTransformerFactory"]
| [] |
singi2016cn/python-scaffold | 3/3.6/add_guest.py | 274e508d1919da67e599aa73be139800c043bce4 | # 添加嘉宾
names = []
names.append('singi')
names.append('lily')
names.append('sam')
print('I find a big dining-table,I can invite more friends.')
names.insert(0, 'xiaoling')
names.insert(2, 'fangsi')
names.append('zhangqing')
greets = ',would you like to have dinner with me ?'
print(names[0]+greets)
print(names[1]+greets)
print(names[2]+greets)
print(names[3]+greets)
print(names[4]+greets)
print(names[5]+greets) | [] |
cartwheelweb/packaginator | apps/pypi/tests/test_slurper.py | f6ce11da22154bce9cba42e896989bdb0fd5e865 | from django.template.defaultfilters import slugify
from django.test import TestCase
from package.models import Package, Version
from pypi.slurper import Slurper
TEST_PACKAGE_NAME = 'Django'
TEST_PACKAGE_VERSION = '1.3'
TEST_PACKAGE_REPO_NAME = 'django-uni-form'
class SlurpAllTests(TestCase):
def test_get_latest_version_number(self):
slurper = Slurper(TEST_PACKAGE_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_NAME)
self.assertEquals(version, TEST_PACKAGE_VERSION)
def test_get_or_create_package(self):
slurper = Slurper(TEST_PACKAGE_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_NAME)
package, created = slurper.get_or_create_package(TEST_PACKAGE_NAME, version)
self.assertTrue(created)
self.assertTrue(isinstance(package, Package))
self.assertEquals(package.title, TEST_PACKAGE_NAME)
self.assertEquals(package.slug, slugify(TEST_PACKAGE_NAME))
def test_get_or_create_with_repo(self):
slurper = Slurper(TEST_PACKAGE_REPO_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_REPO_NAME)
package, created = slurper.get_or_create_package(TEST_PACKAGE_REPO_NAME, version)
self.assertTrue(created)
self.assertTrue(isinstance(package, Package))
self.assertEquals(package.title, TEST_PACKAGE_REPO_NAME)
self.assertEquals(package.slug, slugify(TEST_PACKAGE_REPO_NAME))
def test_check_versions(self):
slurper = Slurper(TEST_PACKAGE_REPO_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_REPO_NAME)
# make me a package (Actually, make me a billionare)
slurper.get_or_create_package(TEST_PACKAGE_REPO_NAME, version)
# fetch the package for testing
package = Package.objects.get(title=TEST_PACKAGE_REPO_NAME)
self.assertTrue(package.pypi_downloads > 1000) | [((382, 408), 'pypi.slurper.Slurper', 'Slurper', (['TEST_PACKAGE_NAME'], {}), '(TEST_PACKAGE_NAME)\n', (389, 408), False, 'from pypi.slurper import Slurper\n'), ((607, 633), 'pypi.slurper.Slurper', 'Slurper', (['TEST_PACKAGE_NAME'], {}), '(TEST_PACKAGE_NAME)\n', (614, 633), False, 'from pypi.slurper import Slurper\n'), ((1069, 1100), 'pypi.slurper.Slurper', 'Slurper', (['TEST_PACKAGE_REPO_NAME'], {}), '(TEST_PACKAGE_REPO_NAME)\n', (1076, 1100), False, 'from pypi.slurper import Slurper\n'), ((1572, 1603), 'pypi.slurper.Slurper', 'Slurper', (['TEST_PACKAGE_REPO_NAME'], {}), '(TEST_PACKAGE_REPO_NAME)\n', (1579, 1603), False, 'from pypi.slurper import Slurper\n'), ((1912, 1961), 'package.models.Package.objects.get', 'Package.objects.get', ([], {'title': 'TEST_PACKAGE_REPO_NAME'}), '(title=TEST_PACKAGE_REPO_NAME)\n', (1931, 1961), False, 'from package.models import Package, Version\n'), ((977, 1003), 'django.template.defaultfilters.slugify', 'slugify', (['TEST_PACKAGE_NAME'], {}), '(TEST_PACKAGE_NAME)\n', (984, 1003), False, 'from django.template.defaultfilters import slugify\n'), ((1475, 1506), 'django.template.defaultfilters.slugify', 'slugify', (['TEST_PACKAGE_REPO_NAME'], {}), '(TEST_PACKAGE_REPO_NAME)\n', (1482, 1506), False, 'from django.template.defaultfilters import slugify\n')] |
azuresdkci1x/azure-sdk-for-python-1722 | azure-mgmt-logic/azure/mgmt/logic/models/recurrence_schedule_occurrence.py | e08fa6606543ce0f35b93133dbb78490f8e6bcc9 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RecurrenceScheduleOccurrence(Model):
"""RecurrenceScheduleOccurrence.
:param day: The day of the week. Possible values include: 'Sunday',
'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'
:type day: str or :class:`DayOfWeek <azure.mgmt.logic.models.DayOfWeek>`
:param occurrence: The occurrence.
:type occurrence: int
"""
_attribute_map = {
'day': {'key': 'day', 'type': 'DayOfWeek'},
'occurrence': {'key': 'occurrence', 'type': 'int'},
}
def __init__(self, day=None, occurrence=None):
self.day = day
self.occurrence = occurrence
| [] |
AakankshaAshok/pandas | pandas/core/apply.py | 6498bc1e8a12003640139db4794bd5cd2462c116 | import inspect
import numpy as np
from pandas._libs import reduction as libreduction
from pandas.util._decorators import cache_readonly
from pandas.core.dtypes.common import (
is_dict_like,
is_extension_array_dtype,
is_list_like,
is_sequence,
)
from pandas.core.dtypes.generic import ABCSeries
def frame_apply(
obj,
func,
axis=0,
raw=False,
result_type=None,
ignore_failures=False,
args=None,
kwds=None,
):
""" construct and return a row or column based frame apply object """
axis = obj._get_axis_number(axis)
if axis == 0:
klass = FrameRowApply
elif axis == 1:
klass = FrameColumnApply
return klass(
obj,
func,
raw=raw,
result_type=result_type,
ignore_failures=ignore_failures,
args=args,
kwds=kwds,
)
class FrameApply:
def __init__(self, obj, func, raw, result_type, ignore_failures, args, kwds):
self.obj = obj
self.raw = raw
self.ignore_failures = ignore_failures
self.args = args or ()
self.kwds = kwds or {}
if result_type not in [None, "reduce", "broadcast", "expand"]:
raise ValueError(
"invalid value for result_type, must be one "
"of {None, 'reduce', 'broadcast', 'expand'}"
)
self.result_type = result_type
# curry if needed
if (kwds or args) and not isinstance(func, (np.ufunc, str)):
def f(x):
return func(x, *args, **kwds)
else:
f = func
self.f = f
# results
self.result = None
self.res_index = None
self.res_columns = None
@property
def columns(self):
return self.obj.columns
@property
def index(self):
return self.obj.index
@cache_readonly
def values(self):
return self.obj.values
@cache_readonly
def dtypes(self):
return self.obj.dtypes
@property
def agg_axis(self):
return self.obj._get_agg_axis(self.axis)
def get_result(self):
""" compute the results """
# dispatch to agg
if is_list_like(self.f) or is_dict_like(self.f):
return self.obj.aggregate(self.f, axis=self.axis, *self.args, **self.kwds)
# all empty
if len(self.columns) == 0 and len(self.index) == 0:
return self.apply_empty_result()
# string dispatch
if isinstance(self.f, str):
# Support for `frame.transform('method')`
# Some methods (shift, etc.) require the axis argument, others
# don't, so inspect and insert if necessary.
func = getattr(self.obj, self.f)
sig = inspect.getfullargspec(func)
if "axis" in sig.args:
self.kwds["axis"] = self.axis
return func(*self.args, **self.kwds)
# ufunc
elif isinstance(self.f, np.ufunc):
with np.errstate(all="ignore"):
results = self.obj._data.apply("apply", func=self.f)
return self.obj._constructor(
data=results, index=self.index, columns=self.columns, copy=False
)
# broadcasting
if self.result_type == "broadcast":
return self.apply_broadcast()
# one axis empty
elif not all(self.obj.shape):
return self.apply_empty_result()
# raw
elif self.raw and not self.obj._is_mixed_type:
return self.apply_raw()
return self.apply_standard()
def apply_empty_result(self):
"""
we have an empty result; at least 1 axis is 0
we will try to apply the function to an empty
series in order to see if this is a reduction function
"""
# we are not asked to reduce or infer reduction
# so just return a copy of the existing object
if self.result_type not in ["reduce", None]:
return self.obj.copy()
# we may need to infer
should_reduce = self.result_type == "reduce"
from pandas import Series
if not should_reduce:
try:
r = self.f(Series([]))
except Exception:
pass
else:
should_reduce = not isinstance(r, Series)
if should_reduce:
if len(self.agg_axis):
r = self.f(Series([]))
else:
r = np.nan
return self.obj._constructor_sliced(r, index=self.agg_axis)
else:
return self.obj.copy()
def apply_raw(self):
""" apply to the values as a numpy array """
try:
result = libreduction.compute_reduction(self.values, self.f, axis=self.axis)
except ValueError as err:
if "Function does not reduce" not in str(err):
# catch only ValueError raised intentionally in libreduction
raise
result = np.apply_along_axis(self.f, self.axis, self.values)
# TODO: mixed type case
if result.ndim == 2:
return self.obj._constructor(result, index=self.index, columns=self.columns)
else:
return self.obj._constructor_sliced(result, index=self.agg_axis)
def apply_broadcast(self, target):
result_values = np.empty_like(target.values)
# axis which we want to compare compliance
result_compare = target.shape[0]
for i, col in enumerate(target.columns):
res = self.f(target[col])
ares = np.asarray(res).ndim
# must be a scalar or 1d
if ares > 1:
raise ValueError("too many dims to broadcast")
elif ares == 1:
# must match return dim
if result_compare != len(res):
raise ValueError("cannot broadcast result")
result_values[:, i] = res
# we *always* preserve the original index / columns
result = self.obj._constructor(
result_values, index=target.index, columns=target.columns
)
return result
def apply_standard(self):
# try to reduce first (by default)
# this only matters if the reduction in values is of different dtype
# e.g. if we want to apply to a SparseFrame, then can't directly reduce
# we cannot reduce using non-numpy dtypes,
# as demonstrated in gh-12244
if (
self.result_type in ["reduce", None]
and not self.dtypes.apply(is_extension_array_dtype).any()
# Disallow complex_internals since libreduction shortcut
# cannot handle MultiIndex
and not self.agg_axis._has_complex_internals
):
values = self.values
index = self.obj._get_axis(self.axis)
labels = self.agg_axis
empty_arr = np.empty(len(index), dtype=values.dtype)
# Preserve subclass for e.g. test_subclassed_apply
dummy = self.obj._constructor_sliced(
empty_arr, index=index, dtype=values.dtype
)
try:
result = libreduction.compute_reduction(
values, self.f, axis=self.axis, dummy=dummy, labels=labels
)
except ValueError as err:
if "Function does not reduce" not in str(err):
# catch only ValueError raised intentionally in libreduction
raise
except TypeError:
# e.g. test_apply_ignore_failures we just ignore
if not self.ignore_failures:
raise
except ZeroDivisionError:
# reached via numexpr; fall back to python implementation
pass
else:
return self.obj._constructor_sliced(result, index=labels)
# compute the result using the series generator
self.apply_series_generator()
# wrap results
return self.wrap_results()
def apply_series_generator(self):
series_gen = self.series_generator
res_index = self.result_index
i = None
keys = []
results = {}
if self.ignore_failures:
successes = []
for i, v in enumerate(series_gen):
try:
results[i] = self.f(v)
except Exception:
pass
else:
keys.append(v.name)
successes.append(i)
# so will work with MultiIndex
if len(successes) < len(res_index):
res_index = res_index.take(successes)
else:
for i, v in enumerate(series_gen):
results[i] = self.f(v)
keys.append(v.name)
self.results = results
self.res_index = res_index
self.res_columns = self.result_columns
def wrap_results(self):
results = self.results
# see if we can infer the results
if len(results) > 0 and 0 in results and is_sequence(results[0]):
return self.wrap_results_for_axis()
# dict of scalars
result = self.obj._constructor_sliced(results)
result.index = self.res_index
return result
class FrameRowApply(FrameApply):
axis = 0
def apply_broadcast(self):
return super().apply_broadcast(self.obj)
@property
def series_generator(self):
return (self.obj._ixs(i, axis=1) for i in range(len(self.columns)))
@property
def result_index(self):
return self.columns
@property
def result_columns(self):
return self.index
def wrap_results_for_axis(self):
""" return the results for the rows """
results = self.results
result = self.obj._constructor(data=results)
if not isinstance(results[0], ABCSeries):
if len(result.index) == len(self.res_columns):
result.index = self.res_columns
if len(result.columns) == len(self.res_index):
result.columns = self.res_index
return result
class FrameColumnApply(FrameApply):
axis = 1
def apply_broadcast(self):
result = super().apply_broadcast(self.obj.T)
return result.T
@property
def series_generator(self):
constructor = self.obj._constructor_sliced
return (
constructor(arr, index=self.columns, name=name)
for i, (arr, name) in enumerate(zip(self.values, self.index))
)
@property
def result_index(self):
return self.index
@property
def result_columns(self):
return self.columns
def wrap_results_for_axis(self):
""" return the results for the columns """
results = self.results
# we have requested to expand
if self.result_type == "expand":
result = self.infer_to_same_shape()
# we have a non-series and don't want inference
elif not isinstance(results[0], ABCSeries):
from pandas import Series
result = Series(results)
result.index = self.res_index
# we may want to infer results
else:
result = self.infer_to_same_shape()
return result
def infer_to_same_shape(self):
""" infer the results to the same shape as the input object """
results = self.results
result = self.obj._constructor(data=results)
result = result.T
# set the index
result.index = self.res_index
# infer dtypes
result = result.infer_objects()
return result
| [((5381, 5409), 'numpy.empty_like', 'np.empty_like', (['target.values'], {}), '(target.values)\n', (5394, 5409), True, 'import numpy as np\n'), ((2193, 2213), 'pandas.core.dtypes.common.is_list_like', 'is_list_like', (['self.f'], {}), '(self.f)\n', (2205, 2213), False, 'from pandas.core.dtypes.common import is_dict_like, is_extension_array_dtype, is_list_like, is_sequence\n'), ((2217, 2237), 'pandas.core.dtypes.common.is_dict_like', 'is_dict_like', (['self.f'], {}), '(self.f)\n', (2229, 2237), False, 'from pandas.core.dtypes.common import is_dict_like, is_extension_array_dtype, is_list_like, is_sequence\n'), ((2764, 2792), 'inspect.getfullargspec', 'inspect.getfullargspec', (['func'], {}), '(func)\n', (2786, 2792), False, 'import inspect\n'), ((4742, 4809), 'pandas._libs.reduction.compute_reduction', 'libreduction.compute_reduction', (['self.values', 'self.f'], {'axis': 'self.axis'}), '(self.values, self.f, axis=self.axis)\n', (4772, 4809), True, 'from pandas._libs import reduction as libreduction\n'), ((9164, 9187), 'pandas.core.dtypes.common.is_sequence', 'is_sequence', (['results[0]'], {}), '(results[0])\n', (9175, 9187), False, 'from pandas.core.dtypes.common import is_dict_like, is_extension_array_dtype, is_list_like, is_sequence\n'), ((5023, 5074), 'numpy.apply_along_axis', 'np.apply_along_axis', (['self.f', 'self.axis', 'self.values'], {}), '(self.f, self.axis, self.values)\n', (5042, 5074), True, 'import numpy as np\n'), ((5610, 5625), 'numpy.asarray', 'np.asarray', (['res'], {}), '(res)\n', (5620, 5625), True, 'import numpy as np\n'), ((7224, 7318), 'pandas._libs.reduction.compute_reduction', 'libreduction.compute_reduction', (['values', 'self.f'], {'axis': 'self.axis', 'dummy': 'dummy', 'labels': 'labels'}), '(values, self.f, axis=self.axis, dummy=dummy,\n labels=labels)\n', (7254, 7318), True, 'from pandas._libs import reduction as libreduction\n'), ((11205, 11220), 'pandas.Series', 'Series', (['results'], {}), '(results)\n', (11211, 11220), False, 'from pandas import Series\n'), ((3000, 3025), 'numpy.errstate', 'np.errstate', ([], {'all': '"""ignore"""'}), "(all='ignore')\n", (3011, 3025), True, 'import numpy as np\n'), ((4222, 4232), 'pandas.Series', 'Series', (['[]'], {}), '([])\n', (4228, 4232), False, 'from pandas import Series\n'), ((4450, 4460), 'pandas.Series', 'Series', (['[]'], {}), '([])\n', (4456, 4460), False, 'from pandas import Series\n')] |
YinAoXiong/ZCls | tests/test_model/test_recognizer/test_shufflenetv1.py | 8aeea3640f8456937db35d043e37cf2c03ac9017 | # -*- coding: utf-8 -*-
"""
@date: 2021/5/16 下午10:22
@file: test_shufflenetv1.py
@author: zj
@description:
"""
import torch
from zcls.config import cfg
from zcls.config.key_word import KEY_OUTPUT
from zcls.model.recognizers.build import build_recognizer
def test_data(model):
data = torch.randn(1, 3, 224, 224)
outputs = model(data)[KEY_OUTPUT]
print(outputs.shape)
assert outputs.shape == (1, 1000)
def test_shufflenet():
cfg.merge_from_file('configs/benchmarks/shufflenet/shufflenet_v1_3g2x_zcls_imagenet_224.yaml')
print(cfg)
model = build_recognizer(cfg, torch.device('cpu'))
print(model)
test_data(model)
if __name__ == '__main__':
test_shufflenet()
| [((293, 320), 'torch.randn', 'torch.randn', (['(1)', '(3)', '(224)', '(224)'], {}), '(1, 3, 224, 224)\n', (304, 320), False, 'import torch\n'), ((452, 551), 'zcls.config.cfg.merge_from_file', 'cfg.merge_from_file', (['"""configs/benchmarks/shufflenet/shufflenet_v1_3g2x_zcls_imagenet_224.yaml"""'], {}), "(\n 'configs/benchmarks/shufflenet/shufflenet_v1_3g2x_zcls_imagenet_224.yaml')\n", (471, 551), False, 'from zcls.config import cfg\n'), ((596, 615), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (608, 615), False, 'import torch\n')] |
jpapadakis/gdal | autotest/gcore/vsis3.py | f07aa15fd65af36b04291303cc6834c87f662814 | #!/usr/bin/env pytest
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test /vsis3
# Author: Even Rouault <even dot rouault at spatialys dot com>
#
###############################################################################
# Copyright (c) 2015, Even Rouault <even dot rouault at spatialys dot com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import json
import os.path
import stat
import sys
from osgeo import gdal
import gdaltest
import webserver
import pytest
def open_for_read(uri):
"""
Opens a test file for reading.
"""
return gdal.VSIFOpenExL(uri, 'rb', 1)
###############################################################################
def test_vsis3_init():
gdaltest.aws_vars = {}
for var in ('AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_TIMESTAMP', 'AWS_HTTPS', 'AWS_VIRTUAL_HOSTING', 'AWS_S3_ENDPOINT', 'AWS_REQUEST_PAYER', 'AWS_DEFAULT_REGION', 'AWS_DEFAULT_PROFILE', 'AWS_PROFILE', 'AWS_NO_SIGN_REQUEST'):
gdaltest.aws_vars[var] = gdal.GetConfigOption(var)
if gdaltest.aws_vars[var] is not None:
gdal.SetConfigOption(var, "")
# To avoid user AWS credentials in ~/.aws/credentials and ~/.aws/config
# to mess up our tests
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '')
assert gdal.GetSignedURL('/vsis3/foo/bar') is None
###############################################################################
# Test AWS_NO_SIGN_REQUEST=YES
def test_vsis3_no_sign_request():
if not gdaltest.built_against_curl():
pytest.skip()
with gdaltest.config_option('AWS_NO_SIGN_REQUEST', 'YES'):
actual_url = gdal.GetActualURL('/vsis3/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF')
assert actual_url == 'https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF'
actual_url = gdal.GetActualURL('/vsis3_streaming/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF')
assert actual_url == 'https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF'
f = open_for_read('/vsis3/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF')
if f is None:
if gdaltest.gdalurlopen('https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF') is None:
pytest.skip('cannot open URL')
pytest.fail()
gdal.VSIFCloseL(f)
###############################################################################
# Test Sync() and multithreaded download
def test_vsis3_sync_multithreaded_download():
if not gdaltest.built_against_curl():
pytest.skip()
def cbk(pct, _, tab):
assert pct >= tab[0]
tab[0] = pct
return True
tab = [ -1 ]
# Use a public bucket with /test_dummy/foo and /test_dummy/bar files
with gdaltest.config_option('AWS_NO_SIGN_REQUEST', 'YES'):
assert gdal.Sync('/vsis3/cdn.proj.org/test_dummy',
'/vsimem/test_vsis3_no_sign_request_sync',
options=['NUM_THREADS=2'],
callback=cbk, callback_data=tab)
assert tab[0] == 1.0
assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo').size == 4
assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar').size == 4
gdal.RmdirRecursive('/vsimem/test_vsis3_no_sign_request_sync')
###############################################################################
# Test Sync() and multithreaded download and CHUNK_SIZE
def test_vsis3_sync_multithreaded_download_chunk_size():
if not gdaltest.built_against_curl():
pytest.skip()
def cbk(pct, _, tab):
assert pct >= tab[0]
tab[0] = pct
return True
tab = [ -1 ]
# Use a public bucket with /test_dummy/foo and /test_dummy/bar files
with gdaltest.config_option('AWS_NO_SIGN_REQUEST', 'YES'):
assert gdal.Sync('/vsis3/cdn.proj.org/test_dummy',
'/vsimem/test_vsis3_no_sign_request_sync',
options=['NUM_THREADS=2', 'CHUNK_SIZE=3'],
callback=cbk, callback_data=tab)
assert tab[0] == 1.0
assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo').size == 4
assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar').size == 4
gdal.RmdirRecursive('/vsimem/test_vsis3_no_sign_request_sync')
###############################################################################
# Error cases
def test_vsis3_1():
if not gdaltest.built_against_curl():
pytest.skip()
# Missing AWS_SECRET_ACCESS_KEY
gdal.ErrorReset()
with gdaltest.error_handler():
f = open_for_read('/vsis3/foo/bar')
assert f is None and gdal.VSIGetLastErrorMsg().find('AWS_SECRET_ACCESS_KEY') >= 0
gdal.ErrorReset()
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/foo/bar')
assert f is None and gdal.VSIGetLastErrorMsg().find('AWS_SECRET_ACCESS_KEY') >= 0
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY')
# Missing AWS_ACCESS_KEY_ID
gdal.ErrorReset()
with gdaltest.error_handler():
f = open_for_read('/vsis3/foo/bar')
assert f is None and gdal.VSIGetLastErrorMsg().find('AWS_ACCESS_KEY_ID') >= 0
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID')
# ERROR 1: The AWS Access Key Id you provided does not exist in our records.
gdal.ErrorReset()
with gdaltest.error_handler():
f = open_for_read('/vsis3/foo/bar.baz')
if f is not None or gdal.VSIGetLastErrorMsg() == '':
if f is not None:
gdal.VSIFCloseL(f)
if gdal.GetConfigOption('APPVEYOR') is not None:
return
pytest.fail(gdal.VSIGetLastErrorMsg())
gdal.ErrorReset()
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/foo/bar.baz')
assert f is None and gdal.VSIGetLastErrorMsg() != ''
###############################################################################
def test_vsis3_start_webserver():
gdaltest.webserver_process = None
gdaltest.webserver_port = 0
if not gdaltest.built_against_curl():
pytest.skip()
(gdaltest.webserver_process, gdaltest.webserver_port) = webserver.launch(handler=webserver.DispatcherHttpHandler)
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID')
gdal.SetConfigOption('AWS_TIMESTAMP', '20150101T000000Z')
gdal.SetConfigOption('AWS_HTTPS', 'NO')
gdal.SetConfigOption('AWS_VIRTUAL_HOSTING', 'NO')
gdal.SetConfigOption('AWS_S3_ENDPOINT', '127.0.0.1:%d' % gdaltest.webserver_port)
def get_s3_fake_bucket_resource_method(request):
request.protocol_version = 'HTTP/1.1'
if 'Authorization' not in request.headers:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
expected_authorization_8080 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=38901846b865b12ac492bc005bb394ca8d60c098b68db57c084fac686a932f9e'
expected_authorization_8081 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=9f623b7ffce76188a456c70fb4813eb31969e88d130d6b4d801b3accbf050d6c'
if request.headers['Authorization'] != expected_authorization_8080 and request.headers['Authorization'] != expected_authorization_8081:
sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization']))
request.send_response(403)
return
request.send_response(200)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Length', 3)
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write("""foo""".encode('ascii'))
###############################################################################
# Test with a fake AWS server
def test_vsis3_2():
if gdaltest.webserver_port == 0:
pytest.skip()
signed_url = gdal.GetSignedURL('/vsis3/s3_fake_bucket/resource')
expected_url_8080 = 'http://127.0.0.1:8080/s3_fake_bucket/resource?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AWS_ACCESS_KEY_ID%2F20150101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20150101T000000Z&X-Amz-Expires=3600&X-Amz-Signature=dca239dd95f72ff8c37c15c840afc54cd19bdb07f7aaee2223108b5b0ad35da8&X-Amz-SignedHeaders=host'
expected_url_8081 = 'http://127.0.0.1:8081/s3_fake_bucket/resource?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AWS_ACCESS_KEY_ID%2F20150101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20150101T000000Z&X-Amz-Expires=3600&X-Amz-Signature=ef5216bc5971863414c69f6ca095276c0d62c0da97fa4f6ab80c30bd7fc146ac&X-Amz-SignedHeaders=host'
assert signed_url in (expected_url_8080, expected_url_8081)
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3_streaming/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if 'Authorization' not in request.headers:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
expected_authorization_8080 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-security-token,Signature=464a21835038b4f4d292b6463b8a005b9aaa980513aa8c42fc170abb733dce85'
expected_authorization_8081 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-security-token,Signature=b10e91575186342f9f2acfc91c4c2c9938c4a9e8cdcbc043d09d59d9641ad7fb'
if request.headers['Authorization'] != expected_authorization_8080 and request.headers['Authorization'] != expected_authorization_8081:
sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization']))
request.send_response(403)
return
request.send_response(200)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Length', 3)
request.end_headers()
request.wfile.write("""foo""".encode('ascii'))
handler.add('GET', '/s3_fake_bucket_with_session_token/resource', custom_method=method)
# Test with temporary credentials
with gdaltest.config_option('AWS_SESSION_TOKEN', 'AWS_SESSION_TOKEN'):
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket_with_session_token/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if 'Range' in request.headers:
if request.headers['Range'] != 'bytes=0-16383':
sys.stderr.write("Bad Range: '%s'\n" % str(request.headers['Range']))
request.send_response(403)
return
request.send_response(206)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Range', 'bytes 0-16383/1000000')
request.send_header('Content-Length', 16384)
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(('a' * 16384).encode('ascii'))
else:
request.send_response(200)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Length', 1000000)
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(('a' * 1000000).encode('ascii'))
handler.add('GET', '/s3_fake_bucket/resource2.bin', custom_method=method)
with webserver.install_http_handler(handler):
# old_val = gdal.GetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN')
# gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'EMPTY_DIR')
stat_res = gdal.VSIStatL('/vsis3/s3_fake_bucket/resource2.bin')
# gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', old_val)
if stat_res is None or stat_res.size != 1000000:
if stat_res is not None:
print(stat_res.size)
else:
print(stat_res)
pytest.fail()
handler = webserver.SequentialHandler()
handler.add('HEAD', '/s3_fake_bucket/resource2.bin', 200,
{'Content-type': 'text/plain',
'Content-Length': 1000000,
'Connection': 'close'})
with webserver.install_http_handler(handler):
stat_res = gdal.VSIStatL('/vsis3_streaming/s3_fake_bucket/resource2.bin')
if stat_res is None or stat_res.size != 1000000:
if stat_res is not None:
print(stat_res.size)
else:
print(stat_res)
pytest.fail()
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-east-1') >= 0:
request.send_response(400)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
handler.add('GET', '/s3_fake_bucket/redirect', custom_method=method)
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-west-2') >= 0 and request.headers['Host'].startswith('127.0.0.1'):
request.send_response(301)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>PermanentRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
handler.add('GET', '/s3_fake_bucket/redirect', custom_method=method)
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-west-2') >= 0 and request.headers['Host'].startswith('localhost'):
request.send_response(200)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Length', 3)
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write("""foo""".encode('ascii'))
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
handler.add('GET', '/s3_fake_bucket/redirect', custom_method=method)
# Test region and endpoint 'redirects'
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/redirect')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
if data != 'foo':
if gdaltest.is_travis_branch('trusty'):
pytest.skip('Skipped on trusty branch, but should be investigated')
pytest.fail(data)
# Test region and endpoint 'redirects'
gdal.VSICurlClearCache()
handler.req_count = 0
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3_streaming/s3_fake_bucket/redirect')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
handler = webserver.SequentialHandler()
def method(request):
# /vsis3_streaming/ should have remembered the change of region and endpoint
if request.headers['Authorization'].find('us-west-2') < 0 or \
not request.headers['Host'].startswith('localhost'):
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
request.protocol_version = 'HTTP/1.1'
request.send_response(400)
response = 'bla'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('GET', '/s3_fake_bucket/non_xml_error', custom_method=method)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/non_xml_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('bla') >= 0
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><oops>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
handler.add('GET', '/s3_fake_bucket/invalid_xml_error', 400,
{'Content-type': 'application/xml',
'Transfer-Encoding': 'chunked',
'Connection': 'close'}, response)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/invalid_xml_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('<oops>') >= 0
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><Error/>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
handler.add('GET', '/s3_fake_bucket/no_code_in_error', 400,
{'Content-type': 'application/xml',
'Transfer-Encoding': 'chunked',
'Connection': 'close'}, response)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_code_in_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('<Error/>') >= 0
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Code>AuthorizationHeaderMalformed</Code></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
handler.add('GET', '/s3_fake_bucket/no_region_in_AuthorizationHeaderMalformed_error', 400,
{'Content-type': 'application/xml',
'Transfer-Encoding': 'chunked',
'Connection': 'close'}, response)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_region_in_AuthorizationHeaderMalformed_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('<Error>') >= 0
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Code>PermanentRedirect</Code></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
handler.add('GET', '/s3_fake_bucket/no_endpoint_in_PermanentRedirect_error', 400,
{'Content-type': 'application/xml',
'Transfer-Encoding': 'chunked',
'Connection': 'close'}, response)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_endpoint_in_PermanentRedirect_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('<Error>') >= 0
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Code>bla</Code></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
handler.add('GET', '/s3_fake_bucket/no_message_in_error', 400,
{'Content-type': 'application/xml',
'Transfer-Encoding': 'chunked',
'Connection': 'close'}, response)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_message_in_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('<Error>') >= 0
# Test with requester pays
handler = webserver.SequentialHandler()
def method(request):
if 'x-amz-request-payer' not in request.headers:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
expected_authorization_8080 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-request-payer,Signature=cf713a394e1b629ac0e468d60d3d4a12f5236fd72d21b6005c758b0dfc7049cd'
expected_authorization_8081 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-request-payer,Signature=4756166679008a1a40cd6ff91dbbef670a71c11bf8e3c998dd7385577c3ac4d9'
if request.headers['Authorization'] != expected_authorization_8080 and request.headers['Authorization'] != expected_authorization_8081:
sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization']))
request.send_response(403)
return
if request.headers['x-amz-request-payer'] != 'requester':
sys.stderr.write("Bad x-amz-request-payer: '%s'\n" % str(request.headers['x-amz-request-payer']))
request.send_response(403)
return
request.send_response(200)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Length', 3)
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write("""foo""".encode('ascii'))
handler.add('GET', '/s3_fake_bucket_with_requester_pays/resource', custom_method=method)
with gdaltest.config_option('AWS_REQUEST_PAYER', 'requester'):
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3/s3_fake_bucket_with_requester_pays/resource')
assert f is not None
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
# Test temporary redirect
handler = webserver.SequentialHandler()
class HandlerClass(object):
def __init__(self, response_value):
self.old_authorization = None
self.response_value = response_value
def method_req_1(self, request):
if request.headers['Host'].find('127.0.0.1') < 0:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
self.old_authorization = request.headers['Authorization']
request.protocol_version = 'HTTP/1.1'
request.send_response(307)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>TemporaryRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
def method_req_2(self, request):
if request.headers['Host'].find('localhost') < 0:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
if self.old_authorization == request.headers['Authorization']:
sys.stderr.write('Should have get a different Authorization. Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
response = self.response_value
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
h = HandlerClass('foo')
handler.add('GET', '/s3_test_temporary_redirect_read/resource', custom_method=h.method_req_1)
handler.add('GET', '/s3_test_temporary_redirect_read/resource', custom_method=h.method_req_2)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_test_temporary_redirect_read/resource')
assert f is not None
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
# Retry on the same bucket and check that the redirection was indeed temporary
handler = webserver.SequentialHandler()
h = HandlerClass('bar')
handler.add('GET', '/s3_test_temporary_redirect_read/resource2', custom_method=h.method_req_1)
handler.add('GET', '/s3_test_temporary_redirect_read/resource2', custom_method=h.method_req_2)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_test_temporary_redirect_read/resource2')
assert f is not None
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'bar'
###############################################################################
# Test re-opening after changing configuration option (#2294)
def test_vsis3_open_after_config_option_chage():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/test_vsis3_change_config_options/?delimiter=%2F', 403)
handler.add('GET', '/test_vsis3_change_config_options/test.bin', 403)
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3/test_vsis3_change_config_options/test.bin')
assert f is None
# Does not attempt any network access since we didn't change significant
# parameters
f = open_for_read('/vsis3/test_vsis3_change_config_options/test.bin')
assert f is None
with gdaltest.config_option('AWS_ACCESS_KEY_ID', 'another_key_id'):
handler = webserver.SequentialHandler()
handler.add('GET', '/test_vsis3_change_config_options/?delimiter=%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix></Prefix>
<Contents>
<Key>test.bin</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>123456</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/test_vsis3_change_config_options/test.bin')
assert f is not None
gdal.VSIFCloseL(f)
###############################################################################
# Test ReadDir() with a fake AWS server
def test_vsis3_readdir():
if gdaltest.webserver_port == 0:
pytest.skip()
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-east-1') >= 0:
request.send_response(400)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
elif request.headers['Authorization'].find('us-west-2') >= 0:
if request.headers['Host'].startswith('127.0.0.1'):
request.send_response(301)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>PermanentRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
elif request.headers['Host'].startswith('localhost'):
request.send_response(200)
request.send_header('Content-type', 'application/xml')
response = """<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>a_dir with_space/</Prefix>
<NextMarker>bla</NextMarker>
<Contents>
<Key>a_dir with_space/resource3 with_space.bin</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>123456</Size>
</Contents>
</ListBucketResult>
"""
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%20with_space%2F', custom_method=method)
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%20with_space%2F', custom_method=method)
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%20with_space%2F', custom_method=method)
def method(request):
# /vsis3/ should have remembered the change of region and endpoint
if request.headers['Authorization'].find('us-west-2') < 0 or \
not request.headers['Host'].startswith('localhost'):
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
request.send_header('Content-type', 'application/xml')
response = """<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>a_dir with_space/</Prefix>
<Contents>
<Key>a_dir with_space/resource4.bin</Key>
<LastModified>2015-10-16T12:34:56.000Z</LastModified>
<Size>456789</Size>
</Contents>
<Contents>
<Key>a_dir with_space/i_am_a_glacier_file</Key>
<LastModified>2015-10-16T12:34:56.000Z</LastModified>
<Size>456789</Size>
<StorageClass>GLACIER</StorageClass>
</Contents>
<CommonPrefixes>
<Prefix>a_dir with_space/subdir/</Prefix>
</CommonPrefixes>
</ListBucketResult>
"""
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&marker=bla&prefix=a_dir%20with_space%2F', custom_method=method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin')
if f is None:
if gdaltest.is_travis_branch('trusty'):
pytest.skip('Skipped on trusty branch, but should be investigated')
pytest.fail()
gdal.VSIFCloseL(f)
with webserver.install_http_handler(webserver.SequentialHandler()):
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space')
assert dir_contents == ['resource3 with_space.bin', 'resource4.bin', 'subdir']
assert gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin').size == 123456
assert gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin').mtime == 1
# Same as above: cached
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space')
assert dir_contents == ['resource3 with_space.bin', 'resource4.bin', 'subdir']
# ReadDir on something known to be a file shouldn't cause network access
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin')
assert dir_contents is None
# Test unrelated partial clear of the cache
gdal.VSICurlPartialClearCache('/vsis3/s3_fake_bucket_unrelated')
assert gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin').size == 123456
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space')
assert dir_contents == ['resource3 with_space.bin', 'resource4.bin', 'subdir']
# Test partial clear of the cache
gdal.VSICurlPartialClearCache('/vsis3/s3_fake_bucket2/a_dir with_space')
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket2/a_dir%20with_space/resource3%20with_space.bin', 400)
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&max-keys=100&prefix=a_dir%20with_space%2Fresource3%20with_space.bin%2F', 400)
with webserver.install_http_handler(handler):
gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin')
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>a_dir/</Prefix>
<Contents>
<Key>a_dir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir')
assert dir_contents == ['test.txt']
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%2F', 200, {},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>a_dir/</Prefix>
<Contents>
<Key>a_dir/resource4.bin</Key>
<LastModified>2015-10-16T12:34:56.000Z</LastModified>
<Size>456789</Size>
</Contents>
<Contents>
<Key>a_dir/i_am_a_glacier_file</Key>
<LastModified>2015-10-16T12:34:56.000Z</LastModified>
<Size>456789</Size>
<StorageClass>GLACIER</StorageClass>
</Contents>
<CommonPrefixes>
<Prefix>a_dir/subdir/</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
with gdaltest.config_option('CPL_VSIL_CURL_IGNORE_GLACIER_STORAGE', 'NO'):
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir')
assert dir_contents == ['resource4.bin', 'i_am_a_glacier_file', 'subdir']
# Test CPL_VSIL_CURL_NON_CACHED
for config_option_value in ['/vsis3/s3_non_cached/test.txt',
'/vsis3/s3_non_cached',
'/vsis3/s3_non_cached:/vsis3/unrelated',
'/vsis3/unrelated:/vsis3/s3_non_cached',
'/vsis3/unrelated:/vsis3/s3_non_cached:/vsis3/unrelated']:
with gdaltest.config_option('CPL_VSIL_CURL_NON_CACHED', config_option_value):
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'foo')
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_non_cached/test.txt')
assert f is not None, config_option_value
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo', config_option_value
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'bar2')
with webserver.install_http_handler(handler):
size = gdal.VSIStatL('/vsis3/s3_non_cached/test.txt').size
assert size == 4, config_option_value
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'foo')
with webserver.install_http_handler(handler):
size = gdal.VSIStatL('/vsis3/s3_non_cached/test.txt').size
if size != 3:
print(config_option_value)
pytest.fail(data)
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'bar2')
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_non_cached/test.txt')
assert f is not None, config_option_value
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'bar2', config_option_value
# Retry without option
for config_option_value in [None,
'/vsis3/s3_non_cached/bar.txt']:
with gdaltest.config_option('CPL_VSIL_CURL_NON_CACHED', config_option_value):
handler = webserver.SequentialHandler()
if config_option_value is None:
handler.add('GET', '/s3_non_cached/?delimiter=%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix></Prefix>
<Contents>
<Key>test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<Contents>
<Key>test2.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'foo')
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_non_cached/test.txt')
assert f is not None, config_option_value
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo', config_option_value
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_non_cached/test.txt')
assert f is not None, config_option_value
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
# We should still get foo because of caching
assert data == 'foo', config_option_value
# List buckets (empty result)
handler = webserver.SequentialHandler()
handler.add('GET', '/', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListAllMyBucketsResult>
<Buckets>
</Buckets>
</ListAllMyBucketsResult>
""")
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/')
assert dir_contents == ['.']
gdal.VSICurlClearCache()
# List buckets
handler = webserver.SequentialHandler()
handler.add('GET', '/', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListAllMyBucketsResult>
<Buckets>
<Bucket>
<Name>mybucket</Name>
</Bucket>
</Buckets>
</ListAllMyBucketsResult>
""")
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/')
assert dir_contents == ['mybucket']
# Test temporary redirect
handler = webserver.SequentialHandler()
class HandlerClass(object):
def __init__(self, response_value):
self.old_authorization = None
self.response_value = response_value
def method_req_1(self, request):
if request.headers['Host'].find('127.0.0.1') < 0:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
self.old_authorization = request.headers['Authorization']
request.protocol_version = 'HTTP/1.1'
request.send_response(307)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>TemporaryRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
def method_req_2(self, request):
if request.headers['Host'].find('localhost') < 0:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
if self.old_authorization == request.headers['Authorization']:
sys.stderr.write('Should have get a different Authorization. Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
request.send_header('Content-type', 'application/xml')
response = self.response_value
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
h = HandlerClass("""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix></Prefix>
<CommonPrefixes>
<Prefix>test</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F', custom_method=h.method_req_1)
handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F', custom_method=h.method_req_2)
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/s3_test_temporary_redirect_read_dir')
assert dir_contents == ['test']
# Retry on the same bucket and check that the redirection was indeed temporary
handler = webserver.SequentialHandler()
h = HandlerClass("""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>test/</Prefix>
<CommonPrefixes>
<Prefix>test/test2</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F&prefix=test%2F', custom_method=h.method_req_1)
handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F&prefix=test%2F', custom_method=h.method_req_2)
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/s3_test_temporary_redirect_read_dir/test')
assert dir_contents == ['test2']
###############################################################################
# Test OpenDir() with a fake AWS server
def test_vsis3_opendir():
if gdaltest.webserver_port == 0:
pytest.skip()
# Unlimited depth
handler = webserver.SequentialHandler()
handler.add('GET', '/vsis3_opendir/', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix/>
<Marker/>
<Contents>
<Key>test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<Contents>
<Key>subdir/</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>0</Size>
</Contents>
<Contents>
<Key>subdir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>5</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
d = gdal.OpenDir('/vsis3/vsis3_opendir')
assert d is not None
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'test.txt'
assert entry.size == 40
assert entry.mode == 32768
assert entry.mtime == 1
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'subdir'
assert entry.mode == 16384
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'subdir/test.txt'
entry = gdal.GetNextDirEntry(d)
assert entry is None
gdal.CloseDir(d)
# Depth = 0
handler = webserver.SequentialHandler()
handler.add('GET', '/vsis3_opendir/?delimiter=%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix/>
<Marker/>
<Contents>
<Key>test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<CommonPrefixes>
<Prefix>subdir/</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
d = gdal.OpenDir('/vsis3/vsis3_opendir', 0)
assert d is not None
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'test.txt'
assert entry.size == 40
assert entry.mode == 32768
assert entry.mtime == 1
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'subdir'
assert entry.mode == 16384
entry = gdal.GetNextDirEntry(d)
assert entry is None
gdal.CloseDir(d)
# Depth = 1
handler = webserver.SequentialHandler()
handler.add('GET', '/vsis3_opendir/?delimiter=%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix/>
<Marker/>
<Contents>
<Key>test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<CommonPrefixes>
<Prefix>subdir/</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
d = gdal.OpenDir('/vsis3/vsis3_opendir', 1)
assert d is not None
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'test.txt'
assert entry.size == 40
assert entry.mode == 32768
assert entry.mtime == 1
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'subdir'
assert entry.mode == 16384
handler = webserver.SequentialHandler()
handler.add('GET', '/vsis3_opendir/?delimiter=%2F&prefix=subdir%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>subdir/</Prefix>
<Marker/>
<Contents>
<Key>subdir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>5</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'subdir/test.txt'
entry = gdal.GetNextDirEntry(d)
assert entry is None
gdal.CloseDir(d)
###############################################################################
# Test simple PUT support with a fake AWS server
def test_vsis3_4():
if gdaltest.webserver_port == 0:
pytest.skip()
with webserver.install_http_handler(webserver.SequentialHandler()):
with gdaltest.error_handler():
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3', 'wb')
assert f is None
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket3/empty_file.bin', 200, {'Connection': 'close'}, 'foo')
with webserver.install_http_handler(handler):
assert gdal.VSIStatL('/vsis3/s3_fake_bucket3/empty_file.bin').size == 3
# Empty file
handler = webserver.SequentialHandler()
def method(request):
if request.headers['Content-Length'] != '0':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket3/empty_file.bin', custom_method=method)
with webserver.install_http_handler(handler):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb')
assert f is not None
gdal.ErrorReset()
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == ''
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket3/empty_file.bin', 200, {'Connection': 'close'}, '')
with webserver.install_http_handler(handler):
assert gdal.VSIStatL('/vsis3/s3_fake_bucket3/empty_file.bin').size == 0
# Invalid seek
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb')
assert f is not None
with gdaltest.error_handler():
ret = gdal.VSIFSeekL(f, 1, 0)
assert ret != 0
gdal.VSIFCloseL(f)
# Invalid read
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb')
assert f is not None
with gdaltest.error_handler():
ret = gdal.VSIFReadL(1, 1, f)
assert not ret
gdal.VSIFCloseL(f)
# Error case
handler = webserver.SequentialHandler()
handler.add('PUT', '/s3_fake_bucket3/empty_file_error.bin', 403)
with webserver.install_http_handler(handler):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file_error.bin', 'wb')
assert f is not None
gdal.ErrorReset()
with gdaltest.error_handler():
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() != ''
# Nominal case
gdal.NetworkStatsReset()
with gdaltest.config_option('CPL_VSIL_NETWORK_STATS_ENABLED', 'YES'):
with webserver.install_http_handler(webserver.SequentialHandler()):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/another_file.bin', 'wb')
assert f is not None
assert gdal.VSIFSeekL(f, gdal.VSIFTellL(f), 0) == 0
assert gdal.VSIFSeekL(f, 0, 1) == 0
assert gdal.VSIFSeekL(f, 0, 2) == 0
assert gdal.VSIFWriteL('foo', 1, 3, f) == 3
assert gdal.VSIFSeekL(f, gdal.VSIFTellL(f), 0) == 0
assert gdal.VSIFWriteL('bar', 1, 3, f) == 3
handler = webserver.SequentialHandler()
def method(request):
if request.headers['Content-Length'] != '6':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
content = request.rfile.read(6).decode('ascii')
if content != 'foobar':
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket3/another_file.bin', custom_method=method)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == ''
j = json.loads(gdal.NetworkStatsGetAsSerializedJSON())
#print(j)
assert j == {
"methods": {
"PUT": {
"count": 1,
"uploaded_bytes": 6
}
},
"handlers": {
"vsis3": {
"files": {
"/vsis3/s3_fake_bucket3/another_file.bin": {
"methods": {
"PUT": {
"count": 1,
"uploaded_bytes": 6
}
},
"actions": {
"Write": {
"methods": {
"PUT": {
"count": 1,
"uploaded_bytes": 6
}
}
}
}
}
},
"methods": {
"PUT": {
"count": 1,
"uploaded_bytes": 6
}
}
}
}
}
gdal.NetworkStatsReset()
# Redirect case
with webserver.install_http_handler(webserver.SequentialHandler()):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/redirect', 'wb')
assert f is not None
assert gdal.VSIFWriteL('foobar', 1, 6, f) == 6
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-east-1') >= 0:
request.send_response(400)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
elif request.headers['Authorization'].find('us-west-2') >= 0:
if request.headers['Content-Length'] != '6':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
content = request.rfile.read(6).decode('ascii')
if content != 'foobar':
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket3/redirect', custom_method=method)
handler.add('PUT', '/s3_fake_bucket3/redirect', custom_method=method)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == ''
###############################################################################
# Test simple PUT support with retry logic
def test_vsis3_write_single_put_retry():
if gdaltest.webserver_port == 0:
pytest.skip()
with gdaltest.config_options({'GDAL_HTTP_MAX_RETRY': '2',
'GDAL_HTTP_RETRY_DELAY': '0.01'}):
with webserver.install_http_handler(webserver.SequentialHandler()):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/put_with_retry.bin', 'wb')
assert f is not None
assert gdal.VSIFWriteL('foo', 1, 3, f) == 3
handler = webserver.SequentialHandler()
def method(request):
if request.headers['Content-Length'] != '3':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
content = request.rfile.read(3).decode('ascii')
if content != 'foo':
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket3/put_with_retry.bin', 502)
handler.add('PUT', '/s3_fake_bucket3/put_with_retry.bin', custom_method=method)
with gdaltest.error_handler():
with webserver.install_http_handler(handler):
gdal.VSIFCloseL(f)
###############################################################################
# Test simple DELETE support with a fake AWS server
def test_vsis3_5():
if gdaltest.webserver_port == 0:
pytest.skip()
with webserver.install_http_handler(webserver.SequentialHandler()):
with gdaltest.error_handler():
ret = gdal.Unlink('/vsis3/foo')
assert ret != 0
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_delete_bucket/delete_file', 200, {'Connection': 'close'}, 'foo')
with webserver.install_http_handler(handler):
assert gdal.VSIStatL('/vsis3/s3_delete_bucket/delete_file').size == 3
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
assert gdal.VSIStatL('/vsis3/s3_delete_bucket/delete_file').size == 3
handler = webserver.SequentialHandler()
handler.add('DELETE', '/s3_delete_bucket/delete_file', 204)
with webserver.install_http_handler(handler):
ret = gdal.Unlink('/vsis3/s3_delete_bucket/delete_file')
assert ret == 0
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_delete_bucket/delete_file', 404, {'Connection': 'close'})
handler.add('GET', '/s3_delete_bucket/?delimiter=%2F&max-keys=100&prefix=delete_file%2F', 404, {'Connection': 'close'})
with webserver.install_http_handler(handler):
assert gdal.VSIStatL('/vsis3/s3_delete_bucket/delete_file') is None
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_delete_bucket/delete_file_error', 200)
handler.add('DELETE', '/s3_delete_bucket/delete_file_error', 403)
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
ret = gdal.Unlink('/vsis3/s3_delete_bucket/delete_file_error')
assert ret != 0
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_delete_bucket/redirect', 200)
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-east-1') >= 0:
request.send_response(400)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
elif request.headers['Authorization'].find('us-west-2') >= 0:
request.send_response(204)
request.send_header('Content-Length', 0)
request.end_headers()
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('DELETE', '/s3_delete_bucket/redirect', custom_method=method)
handler.add('DELETE', '/s3_delete_bucket/redirect', custom_method=method)
with webserver.install_http_handler(handler):
ret = gdal.Unlink('/vsis3/s3_delete_bucket/redirect')
assert ret == 0
###############################################################################
# Test DeleteObjects with a fake AWS server
def test_vsis3_unlink_batch():
if gdaltest.webserver_port == 0:
pytest.skip()
def method(request):
if request.headers['Content-MD5'] != 'Ze0X4LdlTwCsT+WpNxD9FA==':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(403)
return
content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii')
if content != """<?xml version="1.0" encoding="UTF-8"?>
<Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Object>
<Key>foo</Key>
</Object>
<Object>
<Key>bar/baz</Key>
</Object>
</Delete>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(403)
return
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
response = """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>foo</Key></Deleted><Deleted><Key>bar/baz</Key></Deleted></DeleteResult>"""
request.send_header('Content-Length', len(response))
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler = webserver.SequentialHandler()
handler.add('POST', '/unlink_batch/?delete', custom_method=method)
handler.add('POST', '/unlink_batch/?delete', 200, {},
"""<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>baw</Key></Deleted></DeleteResult>""")
with gdaltest.config_option('CPL_VSIS3_UNLINK_BATCH_SIZE', '2'):
with webserver.install_http_handler(handler):
ret = gdal.UnlinkBatch(['/vsis3/unlink_batch/foo', '/vsis3/unlink_batch/bar/baz', '/vsis3/unlink_batch/baw'])
assert ret
handler = webserver.SequentialHandler()
handler.add('POST', '/unlink_batch/?delete', 200, {},
"""<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Failed><Key>foo</Key></Failed></DeleteResult>""")
with webserver.install_http_handler(handler):
ret = gdal.UnlinkBatch(['/vsis3/unlink_batch/foo'])
assert not ret
###############################################################################
# Test RmdirRecursive() with a fake AWS server
def test_vsis3_rmdir_recursive():
if gdaltest.webserver_port == 0:
pytest.skip()
handler = webserver.SequentialHandler()
handler.add('GET', '/test_rmdir_recursive/?prefix=somedir%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>somedir/</Prefix>
<Marker/>
<Contents>
<Key>somedir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<Contents>
<Key>somedir/subdir/</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>0</Size>
</Contents>
<Contents>
<Key>somedir/subdir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>5</Size>
</Contents>
</ListBucketResult>
""")
def method(request):
content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii')
if content != """<?xml version="1.0" encoding="UTF-8"?>
<Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Object>
<Key>somedir/test.txt</Key>
</Object>
<Object>
<Key>somedir/subdir/</Key>
</Object>
</Delete>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(403)
return
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
response = """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>somedir/test.txt</Key></Deleted><Deleted><Key>somedir/subdir/</Key></Deleted></DeleteResult>"""
request.send_header('Content-Length', len(response))
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('POST', '/test_rmdir_recursive/?delete', custom_method=method)
def method(request):
content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii')
if content != """<?xml version="1.0" encoding="UTF-8"?>
<Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Object>
<Key>somedir/subdir/test.txt</Key>
</Object>
<Object>
<Key>somedir/</Key>
</Object>
</Delete>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(403)
return
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
response = """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>somedir/subdir/test.txt</Key></Deleted><Deleted><Key>somedir/</Key></Deleted></DeleteResult>"""
request.send_header('Content-Length', len(response))
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('POST', '/test_rmdir_recursive/?delete', custom_method=method)
with gdaltest.config_option('CPL_VSIS3_UNLINK_BATCH_SIZE', '2'):
with webserver.install_http_handler(handler):
assert gdal.RmdirRecursive('/vsis3/test_rmdir_recursive/somedir') == 0
###############################################################################
# Test multipart upload with a fake AWS server
def test_vsis3_6():
if gdaltest.webserver_port == 0:
pytest.skip()
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
with webserver.install_http_handler(webserver.SequentialHandler()):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket4/large_file.bin', 'wb')
assert f is not None
size = 1024 * 1024 + 1
big_buffer = 'a' * size
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-east-1') >= 0:
request.send_response(400)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
elif request.headers['Authorization'].find('us-west-2') >= 0:
response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>'
request.send_response(200)
request.send_header('Content-type', 'application/xml')
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', custom_method=method)
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '1048576':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('ETag', '"first_etag"')
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=1&uploadId=my_id', custom_method=method)
with webserver.install_http_handler(handler):
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == size
handler = webserver.SequentialHandler()
def method(request):
if request.headers['Content-Length'] != '1':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
request.send_response(200)
request.send_header('ETag', '"second_etag"')
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=2&uploadId=my_id', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '186':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
content = request.rfile.read(186).decode('ascii')
if content != """<CompleteMultipartUpload>
<Part>
<PartNumber>1</PartNumber><ETag>"first_etag"</ETag></Part>
<Part>
<PartNumber>2</PartNumber><ETag>"second_etag"</ETag></Part>
</CompleteMultipartUpload>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploadId=my_id', custom_method=method)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == ''
handler = webserver.SequentialHandler()
handler.add('POST', '/s3_fake_bucket4/large_file_initiate_403_error.bin?uploads', 403)
handler.add('POST', '/s3_fake_bucket4/large_file_initiate_empty_result.bin?uploads', 200)
handler.add('POST', '/s3_fake_bucket4/large_file_initiate_invalid_xml_result.bin?uploads', 200, {}, 'foo')
handler.add('POST', '/s3_fake_bucket4/large_file_initiate_no_uploadId.bin?uploads', 200, {}, '<foo/>')
with webserver.install_http_handler(handler):
for filename in ['/vsis3/s3_fake_bucket4/large_file_initiate_403_error.bin',
'/vsis3/s3_fake_bucket4/large_file_initiate_empty_result.bin',
'/vsis3/s3_fake_bucket4/large_file_initiate_invalid_xml_result.bin',
'/vsis3/s3_fake_bucket4/large_file_initiate_no_uploadId.bin']:
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
f = gdal.VSIFOpenL(filename, 'wb')
assert f is not None
with gdaltest.error_handler():
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == 0
gdal.ErrorReset()
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == ''
handler = webserver.SequentialHandler()
handler.add('POST', '/s3_fake_bucket4/large_file_upload_part_403_error.bin?uploads', 200, {},
'<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>')
handler.add('PUT', '/s3_fake_bucket4/large_file_upload_part_403_error.bin?partNumber=1&uploadId=my_id', 403)
handler.add('DELETE', '/s3_fake_bucket4/large_file_upload_part_403_error.bin?uploadId=my_id', 204)
handler.add('POST', '/s3_fake_bucket4/large_file_upload_part_no_etag.bin?uploads', 200, {},
'<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>')
handler.add('PUT', '/s3_fake_bucket4/large_file_upload_part_no_etag.bin?partNumber=1&uploadId=my_id', 200)
handler.add('DELETE', '/s3_fake_bucket4/large_file_upload_part_no_etag.bin?uploadId=my_id', 204)
with webserver.install_http_handler(handler):
for filename in ['/vsis3/s3_fake_bucket4/large_file_upload_part_403_error.bin',
'/vsis3/s3_fake_bucket4/large_file_upload_part_no_etag.bin']:
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
f = gdal.VSIFOpenL(filename, 'wb')
assert f is not None, filename
with gdaltest.error_handler():
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == 0, filename
gdal.ErrorReset()
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == '', filename
# Simulate failure in AbortMultipart stage
handler = webserver.SequentialHandler()
handler.add('POST', '/s3_fake_bucket4/large_file_abortmultipart_403_error.bin?uploads', 200, {},
'<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>')
handler.add('PUT', '/s3_fake_bucket4/large_file_abortmultipart_403_error.bin?partNumber=1&uploadId=my_id', 403)
handler.add('DELETE', '/s3_fake_bucket4/large_file_abortmultipart_403_error.bin?uploadId=my_id', 403)
filename = '/vsis3/s3_fake_bucket4/large_file_abortmultipart_403_error.bin'
with webserver.install_http_handler(handler):
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
f = gdal.VSIFOpenL(filename, 'wb')
assert f is not None, filename
with gdaltest.error_handler():
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == 0, filename
gdal.ErrorReset()
with gdaltest.error_handler():
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() != '', filename
# Simulate failure in CompleteMultipartUpload stage
handler = webserver.SequentialHandler()
handler.add('POST', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?uploads', 200, {},
'<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>')
handler.add('PUT', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?partNumber=1&uploadId=my_id', 200, {'ETag': 'first_etag'}, '')
handler.add('PUT', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?partNumber=2&uploadId=my_id', 200, {'ETag': 'second_etag'}, '')
handler.add('POST', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?uploadId=my_id', 403)
# handler.add('DELETE', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?uploadId=my_id', 204)
filename = '/vsis3/s3_fake_bucket4/large_file_completemultipart_403_error.bin'
with webserver.install_http_handler(handler):
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
f = gdal.VSIFOpenL(filename, 'wb')
assert f is not None, filename
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == size, filename
gdal.ErrorReset()
with gdaltest.error_handler():
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() != '', filename
###############################################################################
# Test multipart upload with retry logic
def test_vsis3_write_multipart_retry():
if gdaltest.webserver_port == 0:
pytest.skip()
with gdaltest.config_options({'GDAL_HTTP_MAX_RETRY': '2',
'GDAL_HTTP_RETRY_DELAY': '0.01'}):
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
with webserver.install_http_handler(webserver.SequentialHandler()):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket4/large_file.bin', 'wb')
assert f is not None
size = 1024 * 1024 + 1
big_buffer = 'a' * size
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>'
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', 502)
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', 200,
{'Content-type': 'application/xml',
'Content-Length': len(response),
'Connection': 'close'},
response)
handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=1&uploadId=my_id', 502)
handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=1&uploadId=my_id', 200,
{'Content-Length': '0',
'ETag': '"first_etag"',
'Connection': 'close'}, {})
with gdaltest.error_handler():
with webserver.install_http_handler(handler):
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == size
handler = webserver.SequentialHandler()
handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=2&uploadId=my_id', 200,
{'Content-Length': '0',
'ETag': '"second_etag"',
'Connection': 'close'}, {})
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploadId=my_id', 502)
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploadId=my_id', 200,
{'Content-Length': '0',
'Connection': 'close'}, {})
with gdaltest.error_handler():
with webserver.install_http_handler(handler):
gdal.VSIFCloseL(f)
###############################################################################
# Test Mkdir() / Rmdir()
def test_vsis3_7():
if gdaltest.webserver_port == 0:
pytest.skip()
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_mkdir/dir/', 404, {'Connection': 'close'})
handler.add('GET', '/s3_bucket_test_mkdir/?delimiter=%2F&max-keys=100&prefix=dir%2F', 404, {'Connection': 'close'})
handler.add('PUT', '/s3_bucket_test_mkdir/dir/', 200)
with webserver.install_http_handler(handler):
ret = gdal.Mkdir('/vsis3/s3_bucket_test_mkdir/dir', 0)
assert ret == 0
assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/s3_bucket_test_mkdir/dir').mode)
dir_content = gdal.ReadDir('/vsis3/s3_bucket_test_mkdir/dir')
assert dir_content == ['.']
# Try creating already existing directory
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_mkdir/dir/', 416, {'Connection': 'close'})
with webserver.install_http_handler(handler):
ret = gdal.Mkdir('/vsis3/s3_bucket_test_mkdir/dir', 0)
assert ret != 0
handler = webserver.SequentialHandler()
handler.add('DELETE', '/s3_bucket_test_mkdir/dir/', 204)
with webserver.install_http_handler(handler):
ret = gdal.Rmdir('/vsis3/s3_bucket_test_mkdir/dir')
assert ret == 0
# Try deleting already deleted directory
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_mkdir/dir/', 404)
handler.add('GET', '/s3_bucket_test_mkdir/?delimiter=%2F&max-keys=100&prefix=dir%2F', 404, {'Connection': 'close'})
with webserver.install_http_handler(handler):
ret = gdal.Rmdir('/vsis3/s3_bucket_test_mkdir/dir')
assert ret != 0
# Try deleting non-empty directory
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_mkdir/dir_nonempty/', 416)
handler.add('GET', '/s3_bucket_test_mkdir/?delimiter=%2F&max-keys=100&prefix=dir_nonempty%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>dir_nonempty/</Prefix>
<Contents>
<Key>dir_nonempty/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
ret = gdal.Rmdir('/vsis3/s3_bucket_test_mkdir/dir_nonempty')
assert ret != 0
# Try stat'ing a directory not ending with slash
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_dir_stat/test_dir_stat', 400)
handler.add('GET', '/s3_bucket_test_dir_stat/?delimiter=%2F&max-keys=100&prefix=test_dir_stat%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>test_dir_stat/</Prefix>
<Contents>
<Key>test_dir_stat/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/s3_bucket_test_dir_stat/test_dir_stat').mode)
# Try ReadDi'ing a directory not ending with slash
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_readdir/?delimiter=%2F&prefix=test_dirread%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>test_dirread/</Prefix>
<Contents>
<Key>test_dirread/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert gdal.ReadDir('/vsis3/s3_bucket_test_readdir/test_dirread') is not None
# Try stat'ing a directory ending with slash
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_dir_stat_2/test_dir_stat/', 400)
handler.add('GET', '/s3_bucket_test_dir_stat_2/?delimiter=%2F&max-keys=100&prefix=test_dir_stat%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>test_dir_stat/</Prefix>
<Contents>
<Key>test_dir_stat/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/s3_bucket_test_dir_stat_2/test_dir_stat/').mode)
# Try ReadDi'ing a directory ending with slash
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_readdir2/?delimiter=%2F&prefix=test_dirread%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>test_dirread/</Prefix>
<Contents>
<Key>test_dirread/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert gdal.ReadDir('/vsis3/s3_bucket_test_readdir2/test_dirread') is not None
###############################################################################
# Test handling of file and directory with same name
def test_vsis3_8():
if gdaltest.webserver_port == 0:
pytest.skip()
handler = webserver.SequentialHandler()
handler.add('GET', '/vsis3_8/?delimiter=%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix></Prefix>
<Contents>
<Key>test</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<CommonPrefixes>
<Prefix>test/</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
listdir = gdal.ReadDir('/vsis3/vsis3_8', 0)
assert listdir == ['test', 'test/']
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
assert not stat.S_ISDIR(gdal.VSIStatL('/vsis3/vsis3_8/test').mode)
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/vsis3_8/test/').mode)
###############################################################################
# Test vsisync() with SYNC_STRATEGY=ETAG
def test_vsis3_sync_etag():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
options = ['SYNC_STRATEGY=ETAG']
with gdaltest.error_handler():
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
assert not gdal.Sync('/i_do/not/exist', '/vsis3/', options=options)
with gdaltest.error_handler():
handler = webserver.SequentialHandler()
handler.add('GET', '/do_not/exist', 404)
handler.add('GET', '/do_not/?delimiter=%2F&max-keys=100&prefix=exist%2F', 404)
handler.add('PUT', '/do_not/exist', 404)
with webserver.install_http_handler(handler):
assert not gdal.Sync('vsifile.py', '/vsis3/do_not/exist', options=options)
handler = webserver.SequentialHandler()
handler.add('GET', '/out/', 200)
handler.add('GET', '/out/testsync.txt', 404)
handler.add('GET', '/out/?delimiter=%2F&max-keys=100&prefix=testsync.txt%2F', 404)
def method(request):
if request.headers['Content-Length'] != '3':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
content = request.rfile.read(3).decode('ascii')
if content != 'foo':
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.send_header('ETag', '"acbd18db4cc2f85cedef654fccc4a4d8"')
request.end_headers()
handler.add('PUT', '/out/testsync.txt', custom_method=method)
gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'foo')
def cbk(pct, _, tab):
assert pct > tab[0]
tab[0] = pct
return True
tab = [ 0 ]
with webserver.install_http_handler(handler):
assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out', options=options,
callback=cbk, callback_data=tab)
assert tab[0] == 1.0
# Re-try with cached ETag. Should generate no network access
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out', options=options)
assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out/testsync.txt', options=options)
gdal.VSICurlClearCache()
# Other direction: S3 to /vsimem
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'ETag' : '"acbd18db4cc2f85cedef654fccc4a4d8"' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options)
# Shouldn't do any copy, but hard to verify
with webserver.install_http_handler(webserver.SequentialHandler()):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options)
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/testsync.txt', options=options)
# Modify target file, and redo synchronization
gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'bar')
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 200,
{ 'Content-Length' : '3',
'ETag' : '"acbd18db4cc2f85cedef654fccc4a4d8"' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options)
f = gdal.VSIFOpenL('/vsimem/testsync.txt', 'rb')
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
# /vsimem to S3, but after cleaning the cache
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/', 200)
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'ETag' : '"acbd18db4cc2f85cedef654fccc4a4d8"' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out', options=options)
gdal.Unlink('/vsimem/testsync.txt')
# Directory copying
gdal.VSICurlClearCache()
gdal.Mkdir('/vsimem/subdir', 0)
gdal.FileFromMemBuffer('/vsimem/subdir/testsync.txt', 'foo')
handler = webserver.SequentialHandler()
handler.add('GET', '/out/', 200, {},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix/>
<Marker/>
<IsTruncated>false</IsTruncated>
<Contents>
<Key>testsync.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>3</Size>
<ETag>"acbd18db4cc2f85cedef654fccc4a4d8"</ETag>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert gdal.Sync('/vsimem/subdir/', '/vsis3/out', options=options)
gdal.RmdirRecursive('/vsimem/subdir')
###############################################################################
# Test vsisync() with SYNC_STRATEGY=TIMESTAMP
def test_vsis3_sync_timestamp():
if gdaltest.webserver_port == 0:
pytest.skip()
options = ['SYNC_STRATEGY=TIMESTAMP']
gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'foo')
# S3 to local: S3 file is older -> download
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo")
handler.add('GET', '/out/testsync.txt', 200,
{ 'Content-Length' : '3',
'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/',
options=options)
# S3 to local: S3 file is newer -> do nothing
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/',
options=options)
# Local to S3: S3 file is older -> upload
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo")
handler.add('PUT', '/out/testsync.txt', 200)
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsimem/testsync.txt', '/vsis3/out/testsync.txt',
options=options)
# Local to S3: S3 file is newer -> do nothgin
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsimem/testsync.txt', '/vsis3/out/testsync.txt',
options=options)
gdal.Unlink('/vsimem/testsync.txt')
###############################################################################
# Test vsisync() with SYNC_STRATEGY=OVERWRITE
def test_vsis3_sync_overwrite():
if gdaltest.webserver_port == 0:
pytest.skip()
options = ['SYNC_STRATEGY=OVERWRITE']
gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'foo')
# S3 to local: S3 file is newer
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo")
handler.add('GET', '/out/testsync.txt', 200,
{ 'Content-Length' : '3',
'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/',
options=options)
# Local to S3: S3 file is newer
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo")
handler.add('PUT', '/out/testsync.txt', 200)
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsimem/testsync.txt', '/vsis3/out/testsync.txt',
options=options)
gdal.Unlink('/vsimem/testsync.txt')
###############################################################################
# Test vsisync() with source and target in /vsis3
def test_vsis3_sync_source_target_in_vsis3():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/in/testsync.txt', 200,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo")
handler.add('GET', '/out/', 200)
handler.add('GET', '/out/testsync.txt', 200,
{ 'Content-Length' : '3',
'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo")
def method(request):
if request.headers['Content-Length'] != '0':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
if request.headers['x-amz-copy-source'] != '/in/testsync.txt':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/out/testsync.txt', custom_method=method)
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/in/testsync.txt', '/vsis3/out/')
###############################################################################
# Test rename
def test_vsis3_fake_rename():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/test/source.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3' }, "foo")
handler.add('GET', '/test/target.txt', 404)
handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=target.txt%2F', 200)
def method(request):
if request.headers['Content-Length'] != '0':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
if request.headers['x-amz-copy-source'] != '/test/source.txt':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test/target.txt', custom_method=method)
handler.add('DELETE', '/test/source.txt', 204)
with webserver.install_http_handler(handler):
assert gdal.Rename( '/vsis3/test/source.txt', '/vsis3/test/target.txt') == 0
###############################################################################
# Test rename
def test_vsis3_fake_rename_dir():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/test/source_dir', 404)
handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=source_dir%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>source_dir/</Prefix>
<Contents>
<Key>source_dir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>3</Size>
</Contents>
</ListBucketResult>
""")
handler.add('GET', '/test/target_dir/', 404)
handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=target_dir%2F', 404)
def method(request):
if request.headers['Content-Length'] != '0':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test/target_dir/', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '0':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
if request.headers['x-amz-copy-source'] != '/test/source_dir/test.txt':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test/target_dir/test.txt', custom_method=method)
handler.add('DELETE', '/test/source_dir/test.txt', 204)
handler.add('GET', '/test/source_dir/', 404)
handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=source_dir%2F', 404)
with webserver.install_http_handler(handler):
assert gdal.Rename( '/vsis3/test/source_dir', '/vsis3/test/target_dir') == 0
###############################################################################
# Test rename onto existing dir is not allowed
def test_vsis3_fake_rename_on_existing_dir():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/test/source.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3' }, "foo")
handler.add('GET', '/test_target_dir/', 200)
with webserver.install_http_handler(handler):
assert gdal.Rename( '/vsis3/test/source.txt', '/vsis3/test_target_dir') == -1
###############################################################################
# Test Sync() and multithreaded download and CHUNK_SIZE
def test_vsis3_fake_sync_multithreaded_upload_chunk_size():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
def cbk(pct, _, tab):
assert pct >= tab[0]
tab[0] = pct
return True
gdal.Mkdir('/vsimem/test', 0)
gdal.FileFromMemBuffer('/vsimem/test/foo', 'foo\n')
tab = [ -1 ]
handler = webserver.SequentialHandler()
handler.add('GET', '/test_bucket/?prefix=test%2F', 200)
handler.add('GET', '/test_bucket/test', 404)
handler.add('GET', '/test_bucket/?delimiter=%2F&max-keys=100&prefix=test%2F', 200)
handler.add('GET', '/test_bucket/', 200)
handler.add('GET', '/test_bucket/test/', 404)
handler.add('PUT', '/test_bucket/test/', 200)
def method(request):
request.protocol_version = 'HTTP/1.1'
response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>'
request.send_response(200)
request.send_header('Content-type', 'application/xml')
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('POST', '/test_bucket/test/foo?uploads', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '3':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('ETag', '"first_etag"')
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test_bucket/test/foo?partNumber=1&uploadId=my_id', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '1':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('ETag', '"second_etag"')
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test_bucket/test/foo?partNumber=2&uploadId=my_id', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '186':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
content = request.rfile.read(186).decode('ascii')
if content != """<CompleteMultipartUpload>
<Part>
<PartNumber>1</PartNumber><ETag>"first_etag"</ETag></Part>
<Part>
<PartNumber>2</PartNumber><ETag>"second_etag"</ETag></Part>
</CompleteMultipartUpload>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('POST', '/test_bucket/test/foo?uploadId=my_id', custom_method=method)
with gdaltest.config_option('VSIS3_SIMULATE_THREADING', 'YES'):
with webserver.install_http_handler(handler):
assert gdal.Sync('/vsimem/test',
'/vsis3/test_bucket',
options=['NUM_THREADS=1', 'CHUNK_SIZE=3'],
callback=cbk, callback_data=tab)
assert tab[0] == 1.0
gdal.RmdirRecursive('/vsimem/test')
def test_vsis3_fake_sync_multithreaded_upload_chunk_size_failure():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
gdal.Mkdir('/vsimem/test', 0)
gdal.FileFromMemBuffer('/vsimem/test/foo', 'foo\n')
handler = webserver.SequentialHandler()
handler.add('GET', '/test_bucket/?prefix=test%2F', 200)
handler.add('GET', '/test_bucket/test', 404)
handler.add('GET', '/test_bucket/?delimiter=%2F&max-keys=100&prefix=test%2F', 200)
handler.add('GET', '/test_bucket/', 200)
handler.add('GET', '/test_bucket/test/', 404)
handler.add('PUT', '/test_bucket/test/', 200)
def method(request):
request.protocol_version = 'HTTP/1.1'
response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>'
request.send_response(200)
request.send_header('Content-type', 'application/xml')
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('POST', '/test_bucket/test/foo?uploads', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '3':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('ETag', '"first_etag"')
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test_bucket/test/foo?partNumber=1&uploadId=my_id', 400)
handler.add('DELETE', '/test_bucket/test/foo?uploadId=my_id', 204)
with gdaltest.config_options({'VSIS3_SIMULATE_THREADING': 'YES',
'VSIS3_SYNC_MULTITHREADING': 'NO'}):
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
assert not gdal.Sync('/vsimem/test',
'/vsis3/test_bucket',
options=['NUM_THREADS=1', 'CHUNK_SIZE=3'])
gdal.RmdirRecursive('/vsimem/test')
###############################################################################
# Test reading/writing metadata
def test_vsis3_metadata():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
# Read HEADERS domain
handler = webserver.SequentialHandler()
handler.add('GET', '/test_metadata/foo.txt', 200, {'foo': 'bar'})
with webserver.install_http_handler(handler):
md = gdal.GetFileMetadata('/vsis3/test_metadata/foo.txt', 'HEADERS')
assert 'foo' in md and md['foo'] == 'bar'
# Read TAGS domain
handler = webserver.SequentialHandler()
handler.add('GET', '/test_metadata/foo.txt?tagging', 200, {},
"""<Tagging><TagSet><Tag><Key>foo</Key><Value>bar</Value></Tag></TagSet></Tagging>""")
with webserver.install_http_handler(handler):
md = gdal.GetFileMetadata('/vsis3/test_metadata/foo.txt', 'TAGS')
assert 'foo' in md and md['foo'] == 'bar'
# Write HEADERS domain
handler = webserver.SequentialHandler()
def method(request):
if request.headers['foo'] != 'bar':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.end_headers()
handler.add('PUT', '/test_metadata/foo.txt', custom_method=method)
with webserver.install_http_handler(handler):
assert gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {'foo': 'bar'}, 'HEADERS')
# Write TAGS domain
handler = webserver.SequentialHandler()
def method(request):
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii')
if content != """<?xml version="1.0" encoding="UTF-8"?>
<Tagging xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<TagSet>
<Tag>
<Key>foo</Key>
<Value>bar</Value>
</Tag>
</TagSet>
</Tagging>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test_metadata/foo.txt?tagging', custom_method=method)
with webserver.install_http_handler(handler):
assert gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {'foo': 'bar'}, 'TAGS')
# Write TAGS domain (wiping tags)
handler = webserver.SequentialHandler()
handler.add('DELETE', '/test_metadata/foo.txt?tagging', 204)
with webserver.install_http_handler(handler):
assert gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {}, 'TAGS')
# Error case
with gdaltest.error_handler():
assert gdal.GetFileMetadata('/vsis3/test_metadata/foo.txt', 'UNSUPPORTED') == {}
# Error case
with gdaltest.error_handler():
assert not gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {}, 'UNSUPPORTED')
###############################################################################
# Test that we take into account directory listing to avoid useless
# requests
def test_vsis3_no_useless_requests():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/no_useless_requests/?delimiter=%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix></Prefix>
<Contents>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert gdal.VSIFOpenL('/vsis3/no_useless_requests/foo.txt', 'rb') is None
assert gdal.VSIFOpenL('/vsis3/no_useless_requests/bar.txt', 'rb') is None
assert gdal.VSIStatL('/vsis3/no_useless_requests/baz.txt') is None
###############################################################################
# Test w+ access
def test_vsis3_random_write():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
with gdaltest.error_handler():
assert gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b') is None
with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'):
f = gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b')
assert f
assert gdal.VSIFWriteL('foo', 3, 1, f) == 1
assert gdal.VSIFSeekL(f, 0, 0) == 0
assert gdal.VSIFReadL(3, 1, f).decode('ascii') == 'foo'
assert gdal.VSIFEofL(f) == 0
assert gdal.VSIFTellL(f) == 3
handler = webserver.SequentialHandler()
handler.add('PUT', '/random_write/test.bin', 200, {}, expected_body=b'foo')
with webserver.install_http_handler(handler):
assert gdal.VSIFCloseL(f) == 0
###############################################################################
# Test w+ access
def test_vsis3_random_write_failure_1():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'):
f = gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b')
assert f
handler = webserver.SequentialHandler()
handler.add('PUT', '/random_write/test.bin', 400, {})
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
assert gdal.VSIFCloseL(f) != 0
###############################################################################
# Test w+ access
def test_vsis3_random_write_failure_2():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'):
with gdaltest.config_option('VSIS3_CHUNK_SIZE_BYTES', '1'):
f = gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b')
assert f
assert gdal.VSIFWriteL('foo', 3, 1, f) == 1
handler = webserver.SequentialHandler()
handler.add('POST', '/random_write/test.bin?uploads', 400, {})
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
assert gdal.VSIFCloseL(f) != 0
###############################################################################
# Test w+ access
def test_vsis3_random_write_gtiff_create_copy():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/random_write/test.tif', 404, {})
handler.add('GET', '/random_write/?delimiter=%2F&max-keys=100&prefix=test.tif%2F', 404, {})
handler.add('GET', '/random_write/?delimiter=%2F', 404, {})
src_ds = gdal.Open('data/byte.tif')
with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'):
with webserver.install_http_handler(handler):
ds = gdal.GetDriverByName('GTiff').CreateCopy('/vsis3/random_write/test.tif', src_ds)
assert ds is not None
handler = webserver.SequentialHandler()
handler.add('PUT', '/random_write/test.tif', 200, {})
with webserver.install_http_handler(handler):
ds = None
###############################################################################
# Read credentials from simulated ~/.aws/credentials
def test_vsis3_read_credentials_file():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials')
gdal.VSICurlClearCache()
gdal.FileFromMemBuffer('/vsimem/aws_credentials', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.Unlink('/vsimem/aws_credentials')
###############################################################################
# Read credentials from simulated ~/.aws/config
def test_vsis3_read_config_file():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('AWS_CONFIG_FILE', '/vsimem/aws_config')
gdal.VSICurlClearCache()
gdal.FileFromMemBuffer('/vsimem/aws_config', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
region = us-east-1
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.Unlink('/vsimem/aws_config')
###############################################################################
# Read credentials from simulated ~/.aws/credentials and ~/.aws/config
def test_vsis3_read_credentials_config_file():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials')
gdal.SetConfigOption('AWS_CONFIG_FILE', '/vsimem/aws_config')
gdal.VSICurlClearCache()
gdal.FileFromMemBuffer('/vsimem/aws_credentials', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
gdal.FileFromMemBuffer('/vsimem/aws_config', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
region = us-east-1
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.Unlink('/vsimem/aws_credentials')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.Unlink('/vsimem/aws_config')
###############################################################################
# Read credentials from simulated ~/.aws/credentials and ~/.aws/config with
# a non default profile
def test_vsis3_read_credentials_config_file_non_default_profile(tmpdir):
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', None)
gdal.SetConfigOption('AWS_CONFIG_FILE', None)
gdal.SetConfigOption('AWS_PROFILE', 'myprofile')
os_aws = tmpdir.mkdir(".aws")
gdal.VSICurlClearCache()
os_aws.join('credentials').write("""
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[myprofile]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
[default]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
os_aws.join('config').write("""
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[profile myprofile]
region = us-east-1
[default]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
with gdaltest.config_option(
'USERPROFILE' if sys.platform == 'win32' else 'HOME', str(tmpdir)
):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('AWS_PROFILE', '')
###############################################################################
# Read credentials from simulated ~/.aws/credentials and ~/.aws/config
def test_vsis3_read_credentials_config_file_inconsistent():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials')
gdal.SetConfigOption('AWS_CONFIG_FILE', '/vsimem/aws_config')
gdal.VSICurlClearCache()
gdal.FileFromMemBuffer('/vsimem/aws_credentials', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
gdal.FileFromMemBuffer('/vsimem/aws_config', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID_inconsistent
aws_secret_access_key = AWS_SECRET_ACCESS_KEY_inconsistent
region = us-east-1
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
gdal.ErrorReset()
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
assert gdal.GetLastErrorMsg() != ''
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.Unlink('/vsimem/aws_credentials')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.Unlink('/vsimem/aws_config')
###############################################################################
# Read credentials from simulated EC2 instance
def test_vsis3_read_credentials_ec2_imdsv2():
if gdaltest.webserver_port == 0:
pytest.skip()
if sys.platform not in ('linux', 'linux2', 'win32'):
pytest.skip()
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL',
'http://localhost:%d' % gdaltest.webserver_port)
# Disable hypervisor related check to test if we are really on EC2
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', 'NO')
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('PUT', '/latest/api/token', 200, {}, 'mytoken',
expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/', 200, {}, 'myprofile',
expected_headers={'X-aws-ec2-metadata-token': 'mytoken'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {},
"""{
"AccessKeyId": "AWS_ACCESS_KEY_ID",
"SecretAccessKey": "AWS_SECRET_ACCESS_KEY",
"Expiration": "3000-01-01T00:00:00Z"
}""",
expected_headers={'X-aws-ec2-metadata-token': 'mytoken'})
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
# Set a fake URL to check that credentials re-use works
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '')
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/bar', 200, {}, 'bar')
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/bar')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'bar'
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '')
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', None)
###############################################################################
# Read credentials from simulated EC2 instance that only supports IMDSv1
def test_vsis3_read_credentials_ec2_imdsv1():
if gdaltest.webserver_port == 0:
pytest.skip()
if sys.platform not in ('linux', 'linux2', 'win32'):
pytest.skip()
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL',
'http://localhost:%d' % gdaltest.webserver_port)
# Disable hypervisor related check to test if we are really on EC2
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', 'NO')
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('PUT', '/latest/api/token', 403, {},
expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/', 200, {}, 'myprofile',
unexpected_headers=['X-aws-ec2-metadata-token'])
handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {},
"""{
"AccessKeyId": "AWS_ACCESS_KEY_ID",
"SecretAccessKey": "AWS_SECRET_ACCESS_KEY",
"Expiration": "3000-01-01T00:00:00Z"
}""",
unexpected_headers=['X-aws-ec2-metadata-token'])
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '')
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', None)
###############################################################################
# Read credentials from simulated EC2 instance with expiration of the
# cached credentials
def test_vsis3_read_credentials_ec2_expiration():
if gdaltest.webserver_port == 0:
pytest.skip()
if sys.platform not in ('linux', 'linux2', 'win32'):
pytest.skip()
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL',
'http://localhost:%d' % gdaltest.webserver_port)
# Disable hypervisor related check to test if we are really on EC2
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', 'NO')
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('PUT', '/latest/api/token', 200, {}, 'mytoken',
expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/', 200, {}, 'myprofile',
expected_headers={'X-aws-ec2-metadata-token': 'mytoken'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {},
"""{
"AccessKeyId": "AWS_ACCESS_KEY_ID",
"SecretAccessKey": "AWS_SECRET_ACCESS_KEY",
"Expiration": "1970-01-01T00:00:00Z"
}""",
expected_headers={'X-aws-ec2-metadata-token': 'mytoken'})
handler.add('PUT', '/latest/api/token', 200, {}, 'mytoken2',
expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {},
"""{
"AccessKeyId": "AWS_ACCESS_KEY_ID",
"SecretAccessKey": "AWS_SECRET_ACCESS_KEY",
"Expiration": "1970-01-01T00:00:00Z"
}""",
expected_headers={'X-aws-ec2-metadata-token': 'mytoken2'})
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
# Set a fake URL to demonstrate we try to re-fetch credentials
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL',
'http://localhost:%d/invalid' % gdaltest.webserver_port)
handler = webserver.SequentialHandler()
handler.add('PUT', '/invalid/latest/api/token', 404)
handler.add('GET', '/invalid/latest/meta-data/iam/security-credentials/myprofile', 404)
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3/s3_fake_bucket/bar')
assert f is None
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '')
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', None)
###############################################################################
def test_vsis3_stop_webserver():
if gdaltest.webserver_port == 0:
pytest.skip()
# Clearcache needed to close all connections, since the Python server
# can only handle one connection at a time
gdal.VSICurlClearCache()
webserver.server_stop(gdaltest.webserver_process, gdaltest.webserver_port)
###############################################################################
# Nominal cases (require valid credentials)
def test_vsis3_extra_1():
if not gdaltest.built_against_curl():
pytest.skip()
credentials_filename = gdal.GetConfigOption('HOME',
gdal.GetConfigOption('USERPROFILE', '')) + '/.aws/credentials'
# Either a bucket name or bucket/filename
s3_resource = gdal.GetConfigOption('S3_RESOURCE')
if not os.path.exists(credentials_filename):
if gdal.GetConfigOption('AWS_SECRET_ACCESS_KEY') is None:
pytest.skip('Missing AWS_SECRET_ACCESS_KEY')
elif gdal.GetConfigOption('AWS_ACCESS_KEY_ID') is None:
pytest.skip('Missing AWS_ACCESS_KEY_ID')
if s3_resource is None:
pytest.skip('Missing S3_RESOURCE')
if '/' not in s3_resource:
path = '/vsis3/' + s3_resource
statres = gdal.VSIStatL(path)
assert statres is not None and stat.S_ISDIR(statres.mode), \
('%s is not a valid bucket' % path)
readdir = gdal.ReadDir(path)
assert readdir is not None, 'ReadDir() should not return empty list'
for filename in readdir:
if filename != '.':
subpath = path + '/' + filename
assert gdal.VSIStatL(subpath) is not None, \
('Stat(%s) should not return an error' % subpath)
unique_id = 'vsis3_test'
subpath = path + '/' + unique_id
ret = gdal.Mkdir(subpath, 0)
assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath)
readdir = gdal.ReadDir(path)
assert unique_id in readdir, \
('ReadDir(%s) should contain %s' % (path, unique_id))
ret = gdal.Mkdir(subpath, 0)
assert ret != 0, ('Mkdir(%s) repeated should return an error' % subpath)
ret = gdal.Rmdir(subpath)
assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath)
readdir = gdal.ReadDir(path)
assert unique_id not in readdir, \
('ReadDir(%s) should not contain %s' % (path, unique_id))
ret = gdal.Rmdir(subpath)
assert ret != 0, ('Rmdir(%s) repeated should return an error' % subpath)
ret = gdal.Mkdir(subpath, 0)
assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath)
f = gdal.VSIFOpenL(subpath + '/test.txt', 'wb')
assert f is not None
gdal.VSIFWriteL('hello', 1, 5, f)
gdal.VSIFCloseL(f)
ret = gdal.Rmdir(subpath)
assert ret != 0, \
('Rmdir(%s) on non empty directory should return an error' % subpath)
f = gdal.VSIFOpenL(subpath + '/test.txt', 'rb')
assert f is not None
data = gdal.VSIFReadL(1, 5, f).decode('utf-8')
assert data == 'hello'
gdal.VSIFCloseL(f)
assert gdal.Rename(subpath + '/test.txt', subpath + '/test2.txt') == 0
f = gdal.VSIFOpenL(subpath + '/test2.txt', 'rb')
assert f is not None
data = gdal.VSIFReadL(1, 5, f).decode('utf-8')
assert data == 'hello'
gdal.VSIFCloseL(f)
ret = gdal.Unlink(subpath + '/test2.txt')
assert ret >= 0, \
('Unlink(%s) should not return an error' % (subpath + '/test2.txt'))
ret = gdal.Rmdir(subpath)
assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath)
return
f = open_for_read('/vsis3/' + s3_resource)
assert f is not None, ('cannot open %s' % ('/vsis3/' + s3_resource))
ret = gdal.VSIFReadL(1, 1, f)
gdal.VSIFCloseL(f)
assert len(ret) == 1
# Same with /vsis3_streaming/
f = open_for_read('/vsis3_streaming/' + s3_resource)
assert f is not None
ret = gdal.VSIFReadL(1, 1, f)
gdal.VSIFCloseL(f)
assert len(ret) == 1
if False: # pylint: disable=using-constant-test
# we actually try to read at read() time and bSetError = false
# Invalid bucket : "The specified bucket does not exist"
gdal.ErrorReset()
f = open_for_read('/vsis3/not_existing_bucket/foo')
with gdaltest.error_handler():
gdal.VSIFReadL(1, 1, f)
gdal.VSIFCloseL(f)
assert gdal.VSIGetLastErrorMsg() != ''
# Invalid resource
gdal.ErrorReset()
f = open_for_read('/vsis3_streaming/' + gdal.GetConfigOption('S3_RESOURCE') + '/invalid_resource.baz')
assert f is None, gdal.VSIGetLastErrorMsg()
# Test GetSignedURL()
signed_url = gdal.GetSignedURL('/vsis3/' + s3_resource)
f = open_for_read('/vsicurl_streaming/' + signed_url)
assert f is not None
ret = gdal.VSIFReadL(1, 1, f)
gdal.VSIFCloseL(f)
assert len(ret) == 1
###############################################################################
def test_vsis3_cleanup():
for var in gdaltest.aws_vars:
gdal.SetConfigOption(var, gdaltest.aws_vars[var])
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', None)
gdal.SetConfigOption('AWS_CONFIG_FILE', None)
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', None)
| [((1737, 1767), 'osgeo.gdal.VSIFOpenExL', 'gdal.VSIFOpenExL', (['uri', '"""rb"""', '(1)'], {}), "(uri, 'rb', 1)\n", (1753, 1767), False, 'from osgeo import gdal\n'), ((2397, 2449), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', '""""""'], {}), "('CPL_AWS_CREDENTIALS_FILE', '')\n", (2417, 2449), False, 'from osgeo import gdal\n'), ((2454, 2497), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', '""""""'], {}), "('AWS_CONFIG_FILE', '')\n", (2474, 2497), False, 'from osgeo import gdal\n'), ((2502, 2554), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_EC2_API_ROOT_URL"""', '""""""'], {}), "('CPL_AWS_EC2_API_ROOT_URL', '')\n", (2522, 2554), False, 'from osgeo import gdal\n'), ((3747, 3765), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (3762, 3765), False, 'from osgeo import gdal\n'), ((4705, 4767), 'osgeo.gdal.RmdirRecursive', 'gdal.RmdirRecursive', (['"""/vsimem/test_vsis3_no_sign_request_sync"""'], {}), "('/vsimem/test_vsis3_no_sign_request_sync')\n", (4724, 4767), False, 'from osgeo import gdal\n'), ((5750, 5812), 'osgeo.gdal.RmdirRecursive', 'gdal.RmdirRecursive', (['"""/vsimem/test_vsis3_no_sign_request_sync"""'], {}), "('/vsimem/test_vsis3_no_sign_request_sync')\n", (5769, 5812), False, 'from osgeo import gdal\n'), ((6036, 6053), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (6051, 6053), False, 'from osgeo import gdal\n'), ((6224, 6241), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (6239, 6241), False, 'from osgeo import gdal\n'), ((6422, 6492), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""', '"""AWS_SECRET_ACCESS_KEY"""'], {}), "('AWS_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY')\n", (6442, 6492), False, 'from osgeo import gdal\n'), ((6530, 6547), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (6545, 6547), False, 'from osgeo import gdal\n'), ((6714, 6776), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_ACCESS_KEY_ID"""', '"""AWS_ACCESS_KEY_ID"""'], {}), "('AWS_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID')\n", (6734, 6776), False, 'from osgeo import gdal\n'), ((6863, 6880), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (6878, 6880), False, 'from osgeo import gdal\n'), ((7206, 7223), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (7221, 7223), False, 'from osgeo import gdal\n'), ((7688, 7745), 'webserver.launch', 'webserver.launch', ([], {'handler': 'webserver.DispatcherHttpHandler'}), '(handler=webserver.DispatcherHttpHandler)\n', (7704, 7745), False, 'import webserver\n'), ((7810, 7880), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""', '"""AWS_SECRET_ACCESS_KEY"""'], {}), "('AWS_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY')\n", (7830, 7880), False, 'from osgeo import gdal\n'), ((7885, 7947), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_ACCESS_KEY_ID"""', '"""AWS_ACCESS_KEY_ID"""'], {}), "('AWS_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID')\n", (7905, 7947), False, 'from osgeo import gdal\n'), ((7952, 8009), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_TIMESTAMP"""', '"""20150101T000000Z"""'], {}), "('AWS_TIMESTAMP', '20150101T000000Z')\n", (7972, 8009), False, 'from osgeo import gdal\n'), ((8014, 8053), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_HTTPS"""', '"""NO"""'], {}), "('AWS_HTTPS', 'NO')\n", (8034, 8053), False, 'from osgeo import gdal\n'), ((8058, 8107), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_VIRTUAL_HOSTING"""', '"""NO"""'], {}), "('AWS_VIRTUAL_HOSTING', 'NO')\n", (8078, 8107), False, 'from osgeo import gdal\n'), ((8112, 8198), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_S3_ENDPOINT"""', "('127.0.0.1:%d' % gdaltest.webserver_port)"], {}), "('AWS_S3_ENDPOINT', '127.0.0.1:%d' % gdaltest.\n webserver_port)\n", (8132, 8198), False, 'from osgeo import gdal\n'), ((9690, 9741), 'osgeo.gdal.GetSignedURL', 'gdal.GetSignedURL', (['"""/vsis3/s3_fake_bucket/resource"""'], {}), "('/vsis3/s3_fake_bucket/resource')\n", (9707, 9741), False, 'from osgeo import gdal\n'), ((10493, 10522), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (10520, 10522), False, 'import webserver\n'), ((10891, 10920), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (10918, 10920), False, 'import webserver\n'), ((11294, 11323), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (11321, 11323), False, 'import webserver\n'), ((13125, 13154), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (13152, 13154), False, 'import webserver\n'), ((14835, 14864), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (14862, 14864), False, 'import webserver\n'), ((15389, 15418), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (15416, 15418), False, 'import webserver\n'), ((18475, 18499), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (18497, 18499), False, 'from osgeo import gdal\n'), ((18799, 18828), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (18826, 18828), False, 'import webserver\n'), ((19712, 19729), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (19727, 19729), False, 'from osgeo import gdal\n'), ((19981, 20010), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (20008, 20010), False, 'import webserver\n'), ((20361, 20378), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (20376, 20378), False, 'from osgeo import gdal\n'), ((20637, 20666), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (20664, 20666), False, 'import webserver\n'), ((21018, 21035), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (21033, 21035), False, 'from osgeo import gdal\n'), ((21295, 21324), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (21322, 21324), False, 'import webserver\n'), ((21755, 21772), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (21770, 21772), False, 'from osgeo import gdal\n'), ((22062, 22091), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (22089, 22091), False, 'import webserver\n'), ((22502, 22519), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (22517, 22519), False, 'from osgeo import gdal\n'), ((22800, 22829), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (22827, 22829), False, 'import webserver\n'), ((23207, 23224), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (23222, 23224), False, 'from osgeo import gdal\n'), ((23517, 23546), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (23544, 23546), False, 'import webserver\n'), ((25661, 25690), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (25688, 25690), False, 'import webserver\n'), ((28134, 28163), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (28161, 28163), False, 'import webserver\n'), ((28916, 28940), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (28938, 28940), False, 'from osgeo import gdal\n'), ((28956, 28985), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (28983, 28985), False, 'import webserver\n'), ((30578, 30607), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (30605, 30607), False, 'import webserver\n'), ((35366, 35384), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (35381, 35384), False, 'from osgeo import gdal\n'), ((35881, 35936), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_fake_bucket2/a_dir with_space"""'], {}), "('/vsis3/s3_fake_bucket2/a_dir with_space')\n", (35893, 35936), False, 'from osgeo import gdal\n'), ((36117, 36202), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin"""'], {}), "('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin'\n )\n", (36129, 36202), False, 'from osgeo import gdal\n'), ((36283, 36347), 'osgeo.gdal.VSICurlPartialClearCache', 'gdal.VSICurlPartialClearCache', (['"""/vsis3/s3_fake_bucket_unrelated"""'], {}), "('/vsis3/s3_fake_bucket_unrelated')\n", (36312, 36347), False, 'from osgeo import gdal\n'), ((36477, 36532), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_fake_bucket2/a_dir with_space"""'], {}), "('/vsis3/s3_fake_bucket2/a_dir with_space')\n", (36489, 36532), False, 'from osgeo import gdal\n'), ((36659, 36731), 'osgeo.gdal.VSICurlPartialClearCache', 'gdal.VSICurlPartialClearCache', (['"""/vsis3/s3_fake_bucket2/a_dir with_space"""'], {}), "('/vsis3/s3_fake_bucket2/a_dir with_space')\n", (36688, 36731), False, 'from osgeo import gdal\n'), ((36747, 36776), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (36774, 36776), False, 'import webserver\n'), ((37160, 37189), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (37187, 37189), False, 'import webserver\n'), ((37854, 37878), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (37876, 37878), False, 'from osgeo import gdal\n'), ((37893, 37922), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (37920, 37922), False, 'import webserver\n'), ((43286, 43315), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (43313, 43315), False, 'import webserver\n'), ((43696, 43720), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (43718, 43720), False, 'from osgeo import gdal\n'), ((43755, 43784), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (43782, 43784), False, 'import webserver\n'), ((44293, 44322), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (44320, 44322), False, 'import webserver\n'), ((47039, 47068), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (47066, 47068), False, 'import webserver\n'), ((48030, 48059), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (48057, 48059), False, 'import webserver\n'), ((49086, 49109), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (49106, 49109), False, 'from osgeo import gdal\n'), ((49246, 49269), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (49266, 49269), False, 'from osgeo import gdal\n'), ((49348, 49371), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (49368, 49371), False, 'from osgeo import gdal\n'), ((49428, 49451), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (49448, 49451), False, 'from osgeo import gdal\n'), ((49482, 49498), 'osgeo.gdal.CloseDir', 'gdal.CloseDir', (['d'], {}), '(d)\n', (49495, 49498), False, 'from osgeo import gdal\n'), ((49530, 49559), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (49557, 49559), False, 'import webserver\n'), ((50301, 50324), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (50321, 50324), False, 'from osgeo import gdal\n'), ((50461, 50484), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (50481, 50484), False, 'from osgeo import gdal\n'), ((50563, 50586), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (50583, 50586), False, 'from osgeo import gdal\n'), ((50617, 50633), 'osgeo.gdal.CloseDir', 'gdal.CloseDir', (['d'], {}), '(d)\n', (50630, 50633), False, 'from osgeo import gdal\n'), ((50665, 50694), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (50692, 50694), False, 'import webserver\n'), ((51440, 51463), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (51460, 51463), False, 'from osgeo import gdal\n'), ((51600, 51623), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (51620, 51623), False, 'from osgeo import gdal\n'), ((51704, 51733), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (51731, 51733), False, 'import webserver\n'), ((52411, 52434), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (52431, 52434), False, 'from osgeo import gdal\n'), ((52465, 52481), 'osgeo.gdal.CloseDir', 'gdal.CloseDir', (['d'], {}), '(d)\n', (52478, 52481), False, 'from osgeo import gdal\n'), ((52905, 52934), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (52932, 52934), False, 'import webserver\n'), ((53192, 53221), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (53219, 53221), False, 'import webserver\n'), ((53907, 53936), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (53934, 53936), False, 'import webserver\n'), ((54193, 54222), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (54220, 54222), False, 'import webserver\n'), ((54542, 54571), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (54569, 54571), False, 'import webserver\n'), ((54888, 54917), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (54915, 54917), False, 'import webserver\n'), ((55307, 55331), 'osgeo.gdal.NetworkStatsReset', 'gdal.NetworkStatsReset', ([], {}), '()\n', (55329, 55331), False, 'from osgeo import gdal\n'), ((58360, 58384), 'osgeo.gdal.NetworkStatsReset', 'gdal.NetworkStatsReset', ([], {}), '()\n', (58382, 58384), False, 'from osgeo import gdal\n'), ((58645, 58674), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (58672, 58674), False, 'import webserver\n'), ((60614, 60631), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (60629, 60631), False, 'from osgeo import gdal\n'), ((62992, 63021), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (63019, 63021), False, 'import webserver\n'), ((63258, 63287), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (63285, 63287), False, 'import webserver\n'), ((63431, 63460), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (63458, 63460), False, 'import webserver\n'), ((63675, 63704), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (63702, 63704), False, 'import webserver\n'), ((64056, 64085), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (64083, 64085), False, 'import webserver\n'), ((64422, 64451), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (64449, 64451), False, 'import webserver\n'), ((67239, 67268), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (67266, 67268), False, 'import webserver\n'), ((67808, 67837), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (67835, 67837), False, 'import webserver\n'), ((68397, 68426), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (68424, 68426), False, 'import webserver\n'), ((72217, 72246), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (72244, 72246), False, 'import webserver\n'), ((74507, 74536), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (74534, 74536), False, 'import webserver\n'), ((76071, 76088), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (76086, 76088), False, 'from osgeo import gdal\n'), ((76221, 76250), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (76248, 76250), False, 'import webserver\n'), ((77474, 77503), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (77501, 77503), False, 'import webserver\n'), ((79139, 79168), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (79166, 79168), False, 'import webserver\n'), ((80278, 80307), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (80305, 80307), False, 'import webserver\n'), ((84238, 84267), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (84265, 84267), False, 'import webserver\n'), ((84761, 84808), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_bucket_test_mkdir/dir"""'], {}), "('/vsis3/s3_bucket_test_mkdir/dir')\n", (84773, 84808), False, 'from osgeo import gdal\n'), ((84902, 84931), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (84929, 84931), False, 'import webserver\n'), ((85163, 85192), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (85190, 85192), False, 'import webserver\n'), ((85444, 85473), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (85471, 85473), False, 'import webserver\n'), ((85836, 85865), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (85863, 85865), False, 'import webserver\n'), ((86768, 86797), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (86795, 86797), False, 'import webserver\n'), ((87718, 87747), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (87745, 87747), False, 'import webserver\n'), ((88565, 88594), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (88592, 88594), False, 'import webserver\n'), ((89519, 89548), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (89546, 89548), False, 'import webserver\n'), ((90540, 90569), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (90567, 90569), False, 'import webserver\n'), ((91411, 91440), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (91438, 91440), False, 'import webserver\n'), ((91581, 91610), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (91608, 91610), False, 'import webserver\n'), ((91955, 91979), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (91977, 91979), False, 'from osgeo import gdal\n'), ((92661, 92690), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (92688, 92690), False, 'import webserver\n'), ((93825, 93878), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/testsync.txt"""', '"""foo"""'], {}), "('/vsimem/testsync.txt', 'foo')\n", (93847, 93878), False, 'from osgeo import gdal\n'), ((94285, 94314), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (94312, 94314), False, 'import webserver\n'), ((94543, 94567), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (94565, 94567), False, 'from osgeo import gdal\n'), ((94620, 94649), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (94647, 94649), False, 'import webserver\n'), ((95350, 95403), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/testsync.txt"""', '"""bar"""'], {}), "('/vsimem/testsync.txt', 'bar')\n", (95372, 95403), False, 'from osgeo import gdal\n'), ((95419, 95448), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (95446, 95448), False, 'import webserver\n'), ((95755, 95799), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsimem/testsync.txt"""', '"""rb"""'], {}), "('/vsimem/testsync.txt', 'rb')\n", (95769, 95799), False, 'from osgeo import gdal\n'), ((95855, 95873), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (95870, 95873), False, 'from osgeo import gdal\n'), ((95954, 95978), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (95976, 95978), False, 'from osgeo import gdal\n'), ((95994, 96023), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (96021, 96023), False, 'import webserver\n'), ((96411, 96446), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsimem/testsync.txt"""'], {}), "('/vsimem/testsync.txt')\n", (96422, 96446), False, 'from osgeo import gdal\n'), ((96476, 96500), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (96498, 96500), False, 'from osgeo import gdal\n'), ((96506, 96537), 'osgeo.gdal.Mkdir', 'gdal.Mkdir', (['"""/vsimem/subdir"""', '(0)'], {}), "('/vsimem/subdir', 0)\n", (96516, 96537), False, 'from osgeo import gdal\n'), ((96542, 96602), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/subdir/testsync.txt"""', '"""foo"""'], {}), "('/vsimem/subdir/testsync.txt', 'foo')\n", (96564, 96602), False, 'from osgeo import gdal\n'), ((96617, 96646), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (96644, 96646), False, 'import webserver\n'), ((97424, 97461), 'osgeo.gdal.RmdirRecursive', 'gdal.RmdirRecursive', (['"""/vsimem/subdir"""'], {}), "('/vsimem/subdir')\n", (97443, 97461), False, 'from osgeo import gdal\n'), ((97732, 97785), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/testsync.txt"""', '"""foo"""'], {}), "('/vsimem/testsync.txt', 'foo')\n", (97754, 97785), False, 'from osgeo import gdal\n'), ((97839, 97863), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (97861, 97863), False, 'from osgeo import gdal\n'), ((97878, 97907), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (97905, 97907), False, 'import webserver\n'), ((98506, 98530), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (98528, 98530), False, 'from osgeo import gdal\n'), ((98545, 98574), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (98572, 98574), False, 'import webserver\n'), ((99001, 99025), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (99023, 99025), False, 'from osgeo import gdal\n'), ((99040, 99069), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (99067, 99069), False, 'import webserver\n'), ((99561, 99585), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (99583, 99585), False, 'from osgeo import gdal\n'), ((99600, 99629), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (99627, 99629), False, 'import webserver\n'), ((100022, 100057), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsimem/testsync.txt"""'], {}), "('/vsimem/testsync.txt')\n", (100033, 100057), False, 'from osgeo import gdal\n'), ((100328, 100381), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/testsync.txt"""', '"""foo"""'], {}), "('/vsimem/testsync.txt', 'foo')\n", (100350, 100381), False, 'from osgeo import gdal\n'), ((100423, 100447), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (100445, 100447), False, 'from osgeo import gdal\n'), ((100462, 100491), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (100489, 100491), False, 'import webserver\n'), ((101076, 101100), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (101098, 101100), False, 'from osgeo import gdal\n'), ((101115, 101144), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (101142, 101144), False, 'import webserver\n'), ((101586, 101621), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsimem/testsync.txt"""'], {}), "('/vsimem/testsync.txt')\n", (101597, 101621), False, 'from osgeo import gdal\n'), ((101866, 101890), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (101888, 101890), False, 'from osgeo import gdal\n'), ((101905, 101934), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (101932, 101934), False, 'import webserver\n'), ((103294, 103318), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (103316, 103318), False, 'from osgeo import gdal\n'), ((103333, 103362), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (103360, 103362), False, 'import webserver\n'), ((104655, 104679), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (104677, 104679), False, 'from osgeo import gdal\n'), ((104694, 104723), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (104721, 104723), False, 'import webserver\n'), ((107303, 107327), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (107325, 107327), False, 'from osgeo import gdal\n'), ((107342, 107371), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (107369, 107371), False, 'import webserver\n'), ((107972, 107996), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (107994, 107996), False, 'from osgeo import gdal\n'), ((108099, 108128), 'osgeo.gdal.Mkdir', 'gdal.Mkdir', (['"""/vsimem/test"""', '(0)'], {}), "('/vsimem/test', 0)\n", (108109, 108128), False, 'from osgeo import gdal\n'), ((108133, 108184), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/test/foo"""', '"""foo\n"""'], {}), "('/vsimem/test/foo', 'foo\\n')\n", (108155, 108184), False, 'from osgeo import gdal\n'), ((108217, 108246), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (108244, 108246), False, 'import webserver\n'), ((111691, 111726), 'osgeo.gdal.RmdirRecursive', 'gdal.RmdirRecursive', (['"""/vsimem/test"""'], {}), "('/vsimem/test')\n", (111710, 111726), False, 'from osgeo import gdal\n'), ((111862, 111886), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (111884, 111886), False, 'from osgeo import gdal\n'), ((111892, 111921), 'osgeo.gdal.Mkdir', 'gdal.Mkdir', (['"""/vsimem/test"""', '(0)'], {}), "('/vsimem/test', 0)\n", (111902, 111921), False, 'from osgeo import gdal\n'), ((111926, 111977), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/test/foo"""', '"""foo\n"""'], {}), "('/vsimem/test/foo', 'foo\\n')\n", (111948, 111977), False, 'from osgeo import gdal\n'), ((111993, 112022), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (112020, 112022), False, 'import webserver\n'), ((113976, 114011), 'osgeo.gdal.RmdirRecursive', 'gdal.RmdirRecursive', (['"""/vsimem/test"""'], {}), "('/vsimem/test')\n", (113995, 114011), False, 'from osgeo import gdal\n'), ((114219, 114243), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (114241, 114243), False, 'from osgeo import gdal\n'), ((114285, 114314), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (114312, 114314), False, 'import webserver\n'), ((114596, 114625), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (114623, 114625), False, 'import webserver\n'), ((115007, 115036), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (115034, 115036), False, 'import webserver\n'), ((115664, 115693), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (115691, 115693), False, 'import webserver\n'), ((116731, 116760), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (116758, 116760), False, 'import webserver\n'), ((117507, 117531), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (117529, 117531), False, 'from osgeo import gdal\n'), ((117547, 117576), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (117574, 117576), False, 'import webserver\n'), ((118364, 118388), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (118386, 118388), False, 'from osgeo import gdal\n'), ((118895, 118924), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (118922, 118924), False, 'import webserver\n'), ((119299, 119323), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (119321, 119323), False, 'from osgeo import gdal\n'), ((119502, 119531), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (119529, 119531), False, 'import webserver\n'), ((119928, 119952), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (119950, 119952), False, 'from osgeo import gdal\n'), ((120251, 120280), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (120278, 120280), False, 'import webserver\n'), ((120697, 120721), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (120719, 120721), False, 'from osgeo import gdal\n'), ((120737, 120766), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (120764, 120766), False, 'import webserver\n'), ((120999, 121025), 'osgeo.gdal.Open', 'gdal.Open', (['"""data/byte.tif"""'], {}), "('data/byte.tif')\n", (121008, 121025), False, 'from osgeo import gdal\n'), ((121303, 121332), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (121330, 121332), False, 'import webserver\n'), ((121700, 121749), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""', '""""""'], {}), "('AWS_SECRET_ACCESS_KEY', '')\n", (121720, 121749), False, 'from osgeo import gdal\n'), ((121754, 121799), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_ACCESS_KEY_ID"""', '""""""'], {}), "('AWS_ACCESS_KEY_ID', '')\n", (121774, 121799), False, 'from osgeo import gdal\n'), ((121805, 121880), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', '"""/vsimem/aws_credentials"""'], {}), "('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials')\n", (121825, 121880), False, 'from osgeo import gdal\n'), ((121886, 121910), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (121908, 121910), False, 'from osgeo import gdal\n'), ((121916, 122205), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/aws_credentials"""', '"""\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID\naws_secret_access_key = AWS_SECRET_ACCESS_KEY\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""'], {}), '(\'/vsimem/aws_credentials\',\n """\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID\naws_secret_access_key = AWS_SECRET_ACCESS_KEY\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""\n )\n', (121938, 122205), False, 'from osgeo import gdal\n'), ((122212, 122241), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (122239, 122241), False, 'import webserver\n'), ((122595, 122647), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', '""""""'], {}), "('CPL_AWS_CREDENTIALS_FILE', '')\n", (122615, 122647), False, 'from osgeo import gdal\n'), ((122652, 122690), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsimem/aws_credentials"""'], {}), "('/vsimem/aws_credentials')\n", (122663, 122690), False, 'from osgeo import gdal\n'), ((122923, 122972), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""', '""""""'], {}), "('AWS_SECRET_ACCESS_KEY', '')\n", (122943, 122972), False, 'from osgeo import gdal\n'), ((122977, 123022), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_ACCESS_KEY_ID"""', '""""""'], {}), "('AWS_ACCESS_KEY_ID', '')\n", (122997, 123022), False, 'from osgeo import gdal\n'), ((123028, 123089), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', '"""/vsimem/aws_config"""'], {}), "('AWS_CONFIG_FILE', '/vsimem/aws_config')\n", (123048, 123089), False, 'from osgeo import gdal\n'), ((123095, 123119), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (123117, 123119), False, 'from osgeo import gdal\n'), ((123125, 123428), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/aws_config"""', '"""\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID\naws_secret_access_key = AWS_SECRET_ACCESS_KEY\nregion = us-east-1\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""'], {}), '(\'/vsimem/aws_config\',\n """\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID\naws_secret_access_key = AWS_SECRET_ACCESS_KEY\nregion = us-east-1\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""\n )\n', (123147, 123428), False, 'from osgeo import gdal\n'), ((123435, 123464), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (123462, 123464), False, 'import webserver\n'), ((123818, 123861), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', '""""""'], {}), "('AWS_CONFIG_FILE', '')\n", (123838, 123861), False, 'from osgeo import gdal\n'), ((123866, 123899), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsimem/aws_config"""'], {}), "('/vsimem/aws_config')\n", (123877, 123899), False, 'from osgeo import gdal\n'), ((124166, 124215), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""', '""""""'], {}), "('AWS_SECRET_ACCESS_KEY', '')\n", (124186, 124215), False, 'from osgeo import gdal\n'), ((124220, 124265), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_ACCESS_KEY_ID"""', '""""""'], {}), "('AWS_ACCESS_KEY_ID', '')\n", (124240, 124265), False, 'from osgeo import gdal\n'), ((124271, 124346), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', '"""/vsimem/aws_credentials"""'], {}), "('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials')\n", (124291, 124346), False, 'from osgeo import gdal\n'), ((124351, 124412), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', '"""/vsimem/aws_config"""'], {}), "('AWS_CONFIG_FILE', '/vsimem/aws_config')\n", (124371, 124412), False, 'from osgeo import gdal\n'), ((124418, 124442), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (124440, 124442), False, 'from osgeo import gdal\n'), ((124448, 124737), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/aws_credentials"""', '"""\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID\naws_secret_access_key = AWS_SECRET_ACCESS_KEY\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""'], {}), '(\'/vsimem/aws_credentials\',\n """\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID\naws_secret_access_key = AWS_SECRET_ACCESS_KEY\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""\n )\n', (124470, 124737), False, 'from osgeo import gdal\n'), ((124734, 125037), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/aws_config"""', '"""\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID\naws_secret_access_key = AWS_SECRET_ACCESS_KEY\nregion = us-east-1\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""'], {}), '(\'/vsimem/aws_config\',\n """\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID\naws_secret_access_key = AWS_SECRET_ACCESS_KEY\nregion = us-east-1\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""\n )\n', (124756, 125037), False, 'from osgeo import gdal\n'), ((125044, 125073), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (125071, 125073), False, 'import webserver\n'), ((125427, 125479), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', '""""""'], {}), "('CPL_AWS_CREDENTIALS_FILE', '')\n", (125447, 125479), False, 'from osgeo import gdal\n'), ((125484, 125522), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsimem/aws_credentials"""'], {}), "('/vsimem/aws_credentials')\n", (125495, 125522), False, 'from osgeo import gdal\n'), ((125527, 125570), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', '""""""'], {}), "('AWS_CONFIG_FILE', '')\n", (125547, 125570), False, 'from osgeo import gdal\n'), ((125575, 125608), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsimem/aws_config"""'], {}), "('/vsimem/aws_config')\n", (125586, 125608), False, 'from osgeo import gdal\n'), ((125930, 125979), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""', '""""""'], {}), "('AWS_SECRET_ACCESS_KEY', '')\n", (125950, 125979), False, 'from osgeo import gdal\n'), ((125984, 126029), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_ACCESS_KEY_ID"""', '""""""'], {}), "('AWS_ACCESS_KEY_ID', '')\n", (126004, 126029), False, 'from osgeo import gdal\n'), ((126034, 126088), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', 'None'], {}), "('CPL_AWS_CREDENTIALS_FILE', None)\n", (126054, 126088), False, 'from osgeo import gdal\n'), ((126093, 126138), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', 'None'], {}), "('AWS_CONFIG_FILE', None)\n", (126113, 126138), False, 'from osgeo import gdal\n'), ((126143, 126191), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_PROFILE"""', '"""myprofile"""'], {}), "('AWS_PROFILE', 'myprofile')\n", (126163, 126191), False, 'from osgeo import gdal\n'), ((126232, 126256), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (126254, 126256), False, 'from osgeo import gdal\n'), ((126748, 126777), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (126775, 126777), False, 'import webserver\n'), ((127261, 127300), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_PROFILE"""', '""""""'], {}), "('AWS_PROFILE', '')\n", (127281, 127300), False, 'from osgeo import gdal\n'), ((127580, 127629), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""', '""""""'], {}), "('AWS_SECRET_ACCESS_KEY', '')\n", (127600, 127629), False, 'from osgeo import gdal\n'), ((127634, 127679), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_ACCESS_KEY_ID"""', '""""""'], {}), "('AWS_ACCESS_KEY_ID', '')\n", (127654, 127679), False, 'from osgeo import gdal\n'), ((127685, 127760), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', '"""/vsimem/aws_credentials"""'], {}), "('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials')\n", (127705, 127760), False, 'from osgeo import gdal\n'), ((127765, 127826), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', '"""/vsimem/aws_config"""'], {}), "('AWS_CONFIG_FILE', '/vsimem/aws_config')\n", (127785, 127826), False, 'from osgeo import gdal\n'), ((127832, 127856), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (127854, 127856), False, 'from osgeo import gdal\n'), ((127862, 128151), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/aws_credentials"""', '"""\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID\naws_secret_access_key = AWS_SECRET_ACCESS_KEY\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""'], {}), '(\'/vsimem/aws_credentials\',\n """\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID\naws_secret_access_key = AWS_SECRET_ACCESS_KEY\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""\n )\n', (127884, 128151), False, 'from osgeo import gdal\n'), ((128148, 128477), 'osgeo.gdal.FileFromMemBuffer', 'gdal.FileFromMemBuffer', (['"""/vsimem/aws_config"""', '"""\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID_inconsistent\naws_secret_access_key = AWS_SECRET_ACCESS_KEY_inconsistent\nregion = us-east-1\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""'], {}), '(\'/vsimem/aws_config\',\n """\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n[default]\naws_access_key_id = AWS_ACCESS_KEY_ID_inconsistent\naws_secret_access_key = AWS_SECRET_ACCESS_KEY_inconsistent\nregion = us-east-1\n[unrelated]\naws_access_key_id = foo\naws_secret_access_key = bar\n"""\n )\n', (128170, 128477), False, 'from osgeo import gdal\n'), ((128474, 128491), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (128489, 128491), False, 'from osgeo import gdal\n'), ((128506, 128535), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (128533, 128535), False, 'import webserver\n'), ((128976, 129028), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', '""""""'], {}), "('CPL_AWS_CREDENTIALS_FILE', '')\n", (128996, 129028), False, 'from osgeo import gdal\n'), ((129033, 129071), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsimem/aws_credentials"""'], {}), "('/vsimem/aws_credentials')\n", (129044, 129071), False, 'from osgeo import gdal\n'), ((129076, 129119), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', '""""""'], {}), "('AWS_CONFIG_FILE', '')\n", (129096, 129119), False, 'from osgeo import gdal\n'), ((129124, 129157), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsimem/aws_config"""'], {}), "('/vsimem/aws_config')\n", (129135, 129157), False, 'from osgeo import gdal\n'), ((129479, 129531), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', '""""""'], {}), "('CPL_AWS_CREDENTIALS_FILE', '')\n", (129499, 129531), False, 'from osgeo import gdal\n'), ((129536, 129579), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', '""""""'], {}), "('AWS_CONFIG_FILE', '')\n", (129556, 129579), False, 'from osgeo import gdal\n'), ((129584, 129633), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""', '""""""'], {}), "('AWS_SECRET_ACCESS_KEY', '')\n", (129604, 129633), False, 'from osgeo import gdal\n'), ((129638, 129683), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_ACCESS_KEY_ID"""', '""""""'], {}), "('AWS_ACCESS_KEY_ID', '')\n", (129658, 129683), False, 'from osgeo import gdal\n'), ((129689, 129790), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_EC2_API_ROOT_URL"""', "('http://localhost:%d' % gdaltest.webserver_port)"], {}), "('CPL_AWS_EC2_API_ROOT_URL', 'http://localhost:%d' %\n gdaltest.webserver_port)\n", (129709, 129790), False, 'from osgeo import gdal\n'), ((129887, 129939), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_AUTODETECT_EC2"""', '"""NO"""'], {}), "('CPL_AWS_AUTODETECT_EC2', 'NO')\n", (129907, 129939), False, 'from osgeo import gdal\n'), ((129945, 129969), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (129967, 129969), False, 'from osgeo import gdal\n'), ((129985, 130014), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (130012, 130014), False, 'import webserver\n'), ((131110, 131162), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_EC2_API_ROOT_URL"""', '""""""'], {}), "('CPL_AWS_EC2_API_ROOT_URL', '')\n", (131130, 131162), False, 'from osgeo import gdal\n'), ((131178, 131207), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (131205, 131207), False, 'import webserver\n'), ((131517, 131569), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_EC2_API_ROOT_URL"""', '""""""'], {}), "('CPL_AWS_EC2_API_ROOT_URL', '')\n", (131537, 131569), False, 'from osgeo import gdal\n'), ((131574, 131626), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_AUTODETECT_EC2"""', 'None'], {}), "('CPL_AWS_AUTODETECT_EC2', None)\n", (131594, 131626), False, 'from osgeo import gdal\n'), ((131974, 132026), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', '""""""'], {}), "('CPL_AWS_CREDENTIALS_FILE', '')\n", (131994, 132026), False, 'from osgeo import gdal\n'), ((132031, 132074), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', '""""""'], {}), "('AWS_CONFIG_FILE', '')\n", (132051, 132074), False, 'from osgeo import gdal\n'), ((132079, 132128), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""', '""""""'], {}), "('AWS_SECRET_ACCESS_KEY', '')\n", (132099, 132128), False, 'from osgeo import gdal\n'), ((132133, 132178), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_ACCESS_KEY_ID"""', '""""""'], {}), "('AWS_ACCESS_KEY_ID', '')\n", (132153, 132178), False, 'from osgeo import gdal\n'), ((132184, 132285), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_EC2_API_ROOT_URL"""', "('http://localhost:%d' % gdaltest.webserver_port)"], {}), "('CPL_AWS_EC2_API_ROOT_URL', 'http://localhost:%d' %\n gdaltest.webserver_port)\n", (132204, 132285), False, 'from osgeo import gdal\n'), ((132382, 132434), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_AUTODETECT_EC2"""', '"""NO"""'], {}), "('CPL_AWS_AUTODETECT_EC2', 'NO')\n", (132402, 132434), False, 'from osgeo import gdal\n'), ((132440, 132464), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (132462, 132464), False, 'from osgeo import gdal\n'), ((132480, 132509), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (132507, 132509), False, 'import webserver\n'), ((133516, 133568), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_EC2_API_ROOT_URL"""', '""""""'], {}), "('CPL_AWS_EC2_API_ROOT_URL', '')\n", (133536, 133568), False, 'from osgeo import gdal\n'), ((133573, 133625), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_AUTODETECT_EC2"""', 'None'], {}), "('CPL_AWS_AUTODETECT_EC2', None)\n", (133593, 133625), False, 'from osgeo import gdal\n'), ((133995, 134047), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', '""""""'], {}), "('CPL_AWS_CREDENTIALS_FILE', '')\n", (134015, 134047), False, 'from osgeo import gdal\n'), ((134052, 134095), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', '""""""'], {}), "('AWS_CONFIG_FILE', '')\n", (134072, 134095), False, 'from osgeo import gdal\n'), ((134100, 134149), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""', '""""""'], {}), "('AWS_SECRET_ACCESS_KEY', '')\n", (134120, 134149), False, 'from osgeo import gdal\n'), ((134154, 134199), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_ACCESS_KEY_ID"""', '""""""'], {}), "('AWS_ACCESS_KEY_ID', '')\n", (134174, 134199), False, 'from osgeo import gdal\n'), ((134205, 134306), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_EC2_API_ROOT_URL"""', "('http://localhost:%d' % gdaltest.webserver_port)"], {}), "('CPL_AWS_EC2_API_ROOT_URL', 'http://localhost:%d' %\n gdaltest.webserver_port)\n", (134225, 134306), False, 'from osgeo import gdal\n'), ((134403, 134455), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_AUTODETECT_EC2"""', '"""NO"""'], {}), "('CPL_AWS_AUTODETECT_EC2', 'NO')\n", (134423, 134455), False, 'from osgeo import gdal\n'), ((134461, 134485), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (134483, 134485), False, 'from osgeo import gdal\n'), ((134501, 134530), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (134528, 134530), False, 'import webserver\n'), ((136149, 136259), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_EC2_API_ROOT_URL"""', "('http://localhost:%d/invalid' % gdaltest.webserver_port)"], {}), "('CPL_AWS_EC2_API_ROOT_URL', \n 'http://localhost:%d/invalid' % gdaltest.webserver_port)\n", (136169, 136259), False, 'from osgeo import gdal\n'), ((136295, 136324), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (136322, 136324), False, 'import webserver\n'), ((136652, 136704), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_EC2_API_ROOT_URL"""', '""""""'], {}), "('CPL_AWS_EC2_API_ROOT_URL', '')\n", (136672, 136704), False, 'from osgeo import gdal\n'), ((136709, 136761), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_AUTODETECT_EC2"""', 'None'], {}), "('CPL_AWS_AUTODETECT_EC2', None)\n", (136729, 136761), False, 'from osgeo import gdal\n'), ((137064, 137088), 'osgeo.gdal.VSICurlClearCache', 'gdal.VSICurlClearCache', ([], {}), '()\n', (137086, 137088), False, 'from osgeo import gdal\n'), ((137094, 137168), 'webserver.server_stop', 'webserver.server_stop', (['gdaltest.webserver_process', 'gdaltest.webserver_port'], {}), '(gdaltest.webserver_process, gdaltest.webserver_port)\n', (137115, 137168), False, 'import webserver\n'), ((137620, 137655), 'osgeo.gdal.GetConfigOption', 'gdal.GetConfigOption', (['"""S3_RESOURCE"""'], {}), "('S3_RESOURCE')\n", (137640, 137655), False, 'from osgeo import gdal\n'), ((140740, 140763), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(1)', 'f'], {}), '(1, 1, f)\n', (140754, 140763), False, 'from osgeo import gdal\n'), ((140768, 140786), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (140783, 140786), False, 'from osgeo import gdal\n'), ((140940, 140963), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(1)', 'f'], {}), '(1, 1, f)\n', (140954, 140963), False, 'from osgeo import gdal\n'), ((140968, 140986), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (140983, 140986), False, 'from osgeo import gdal\n'), ((141466, 141483), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (141481, 141483), False, 'from osgeo import gdal\n'), ((141613, 141638), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (141636, 141638), False, 'from osgeo import gdal\n'), ((141683, 141725), 'osgeo.gdal.GetSignedURL', 'gdal.GetSignedURL', (["('/vsis3/' + s3_resource)"], {}), "('/vsis3/' + s3_resource)\n", (141700, 141725), False, 'from osgeo import gdal\n'), ((141819, 141842), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(1)', 'f'], {}), '(1, 1, f)\n', (141833, 141842), False, 'from osgeo import gdal\n'), ((141847, 141865), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (141862, 141865), False, 'from osgeo import gdal\n'), ((142099, 142153), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_CREDENTIALS_FILE"""', 'None'], {}), "('CPL_AWS_CREDENTIALS_FILE', None)\n", (142119, 142153), False, 'from osgeo import gdal\n'), ((142158, 142203), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""AWS_CONFIG_FILE"""', 'None'], {}), "('AWS_CONFIG_FILE', None)\n", (142178, 142203), False, 'from osgeo import gdal\n'), ((142208, 142262), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['"""CPL_AWS_EC2_API_ROOT_URL"""', 'None'], {}), "('CPL_AWS_EC2_API_ROOT_URL', None)\n", (142228, 142262), False, 'from osgeo import gdal\n'), ((2174, 2199), 'osgeo.gdal.GetConfigOption', 'gdal.GetConfigOption', (['var'], {}), '(var)\n', (2194, 2199), False, 'from osgeo import gdal\n'), ((2567, 2602), 'osgeo.gdal.GetSignedURL', 'gdal.GetSignedURL', (['"""/vsis3/foo/bar"""'], {}), "('/vsis3/foo/bar')\n", (2584, 2602), False, 'from osgeo import gdal\n'), ((2771, 2800), 'gdaltest.built_against_curl', 'gdaltest.built_against_curl', ([], {}), '()\n', (2798, 2800), False, 'import gdaltest\n'), ((2810, 2823), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (2821, 2823), False, 'import pytest\n'), ((2834, 2886), 'gdaltest.config_option', 'gdaltest.config_option', (['"""AWS_NO_SIGN_REQUEST"""', '"""YES"""'], {}), "('AWS_NO_SIGN_REQUEST', 'YES')\n", (2856, 2886), False, 'import gdaltest\n'), ((2909, 3020), 'osgeo.gdal.GetActualURL', 'gdal.GetActualURL', (['"""/vsis3/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF"""'], {}), "(\n '/vsis3/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF'\n )\n", (2926, 3020), False, 'from osgeo import gdal\n'), ((3163, 3284), 'osgeo.gdal.GetActualURL', 'gdal.GetActualURL', (['"""/vsis3_streaming/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF"""'], {}), "(\n '/vsis3_streaming/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF'\n )\n", (3180, 3284), False, 'from osgeo import gdal\n'), ((3729, 3742), 'pytest.fail', 'pytest.fail', ([], {}), '()\n', (3740, 3742), False, 'import pytest\n'), ((3949, 3978), 'gdaltest.built_against_curl', 'gdaltest.built_against_curl', ([], {}), '()\n', (3976, 3978), False, 'import gdaltest\n'), ((3988, 4001), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (3999, 4001), False, 'import pytest\n'), ((4199, 4251), 'gdaltest.config_option', 'gdaltest.config_option', (['"""AWS_NO_SIGN_REQUEST"""', '"""YES"""'], {}), "('AWS_NO_SIGN_REQUEST', 'YES')\n", (4221, 4251), False, 'import gdaltest\n'), ((4268, 4422), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsis3/cdn.proj.org/test_dummy"""', '"""/vsimem/test_vsis3_no_sign_request_sync"""'], {'options': "['NUM_THREADS=2']", 'callback': 'cbk', 'callback_data': 'tab'}), "('/vsis3/cdn.proj.org/test_dummy',\n '/vsimem/test_vsis3_no_sign_request_sync', options=['NUM_THREADS=2'],\n callback=cbk, callback_data=tab)\n", (4277, 4422), False, 'from osgeo import gdal\n'), ((4977, 5006), 'gdaltest.built_against_curl', 'gdaltest.built_against_curl', ([], {}), '()\n', (5004, 5006), False, 'import gdaltest\n'), ((5016, 5029), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (5027, 5029), False, 'import pytest\n'), ((5227, 5279), 'gdaltest.config_option', 'gdaltest.config_option', (['"""AWS_NO_SIGN_REQUEST"""', '"""YES"""'], {}), "('AWS_NO_SIGN_REQUEST', 'YES')\n", (5249, 5279), False, 'import gdaltest\n'), ((5296, 5466), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsis3/cdn.proj.org/test_dummy"""', '"""/vsimem/test_vsis3_no_sign_request_sync"""'], {'options': "['NUM_THREADS=2', 'CHUNK_SIZE=3']", 'callback': 'cbk', 'callback_data': 'tab'}), "('/vsis3/cdn.proj.org/test_dummy',\n '/vsimem/test_vsis3_no_sign_request_sync', options=['NUM_THREADS=2',\n 'CHUNK_SIZE=3'], callback=cbk, callback_data=tab)\n", (5305, 5466), False, 'from osgeo import gdal\n'), ((5942, 5971), 'gdaltest.built_against_curl', 'gdaltest.built_against_curl', ([], {}), '()\n', (5969, 5971), False, 'import gdaltest\n'), ((5981, 5994), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (5992, 5994), False, 'import pytest\n'), ((6063, 6087), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (6085, 6087), False, 'import gdaltest\n'), ((6251, 6275), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (6273, 6275), False, 'import gdaltest\n'), ((6557, 6581), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (6579, 6581), False, 'import gdaltest\n'), ((6890, 6914), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (6912, 6914), False, 'import gdaltest\n'), ((7233, 7257), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (7255, 7257), False, 'import gdaltest\n'), ((7574, 7603), 'gdaltest.built_against_curl', 'gdaltest.built_against_curl', ([], {}), '()\n', (7601, 7603), False, 'import gdaltest\n'), ((7613, 7626), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (7624, 7626), False, 'import pytest\n'), ((7791, 7804), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (7802, 7804), False, 'import pytest\n'), ((9658, 9671), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (9669, 9671), False, 'import pytest\n'), ((10634, 10673), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (10664, 10673), False, 'import webserver\n'), ((10827, 10845), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (10842, 10845), False, 'from osgeo import gdal\n'), ((11031, 11070), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (11061, 11070), False, 'import webserver\n'), ((11234, 11252), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (11249, 11252), False, 'from osgeo import gdal\n'), ((12784, 12848), 'gdaltest.config_option', 'gdaltest.config_option', (['"""AWS_SESSION_TOKEN"""', '"""AWS_SESSION_TOKEN"""'], {}), "('AWS_SESSION_TOKEN', 'AWS_SESSION_TOKEN')\n", (12806, 12848), False, 'import gdaltest\n'), ((14279, 14318), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (14309, 14318), False, 'import webserver\n'), ((14488, 14540), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_fake_bucket/resource2.bin"""'], {}), "('/vsis3/s3_fake_bucket/resource2.bin')\n", (14501, 14540), False, 'from osgeo import gdal\n'), ((15068, 15107), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (15098, 15107), False, 'import webserver\n'), ((15128, 15190), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3_streaming/s3_fake_bucket/resource2.bin"""'], {}), "('/vsis3_streaming/s3_fake_bucket/resource2.bin')\n", (15141, 15190), False, 'from osgeo import gdal\n'), ((15360, 15373), 'pytest.fail', 'pytest.fail', ([], {}), '()\n', (15371, 15373), False, 'import pytest\n'), ((18036, 18075), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (18066, 18075), False, 'import webserver\n'), ((18229, 18247), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (18244, 18247), False, 'from osgeo import gdal\n'), ((18283, 18318), 'gdaltest.is_travis_branch', 'gdaltest.is_travis_branch', (['"""trusty"""'], {}), "('trusty')\n", (18308, 18318), False, 'import gdaltest\n'), ((18409, 18426), 'pytest.fail', 'pytest.fail', (['data'], {}), '(data)\n', (18420, 18426), False, 'import pytest\n'), ((18536, 18575), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (18566, 18575), False, 'import webserver\n'), ((18739, 18757), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (18754, 18757), False, 'from osgeo import gdal\n'), ((19739, 19778), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (19769, 19778), False, 'import webserver\n'), ((20388, 20427), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (20418, 20427), False, 'import webserver\n'), ((21045, 21084), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (21075, 21084), False, 'import webserver\n'), ((21782, 21821), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (21812, 21821), False, 'import webserver\n'), ((22529, 22568), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (22559, 22568), False, 'import webserver\n'), ((23234, 23273), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (23264, 23273), False, 'import webserver\n'), ((25212, 25268), 'gdaltest.config_option', 'gdaltest.config_option', (['"""AWS_REQUEST_PAYER"""', '"""requester"""'], {}), "('AWS_REQUEST_PAYER', 'requester')\n", (25234, 25268), False, 'import gdaltest\n'), ((27781, 27820), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (27811, 27820), False, 'import webserver\n'), ((27991, 28009), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (28006, 28009), False, 'from osgeo import gdal\n'), ((28401, 28440), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (28431, 28440), False, 'import webserver\n'), ((28612, 28630), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (28627, 28630), False, 'from osgeo import gdal\n'), ((28897, 28910), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (28908, 28910), False, 'import pytest\n'), ((29149, 29188), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (29179, 29188), False, 'import webserver\n'), ((29536, 29597), 'gdaltest.config_option', 'gdaltest.config_option', (['"""AWS_ACCESS_KEY_ID"""', '"""another_key_id"""'], {}), "('AWS_ACCESS_KEY_ID', 'another_key_id')\n", (29558, 29597), False, 'import gdaltest\n'), ((29617, 29646), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (29644, 29646), False, 'import webserver\n'), ((30549, 30562), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (30560, 30562), False, 'import pytest\n'), ((35057, 35096), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (35087, 35096), False, 'import webserver\n'), ((35222, 35257), 'gdaltest.is_travis_branch', 'gdaltest.is_travis_branch', (['"""trusty"""'], {}), "('trusty')\n", (35247, 35257), False, 'import gdaltest\n'), ((35348, 35361), 'pytest.fail', 'pytest.fail', ([], {}), '()\n', (35359, 35361), False, 'import pytest\n'), ((35481, 35536), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_fake_bucket2/a_dir with_space"""'], {}), "('/vsis3/s3_fake_bucket2/a_dir with_space')\n", (35493, 35536), False, 'from osgeo import gdal\n'), ((37014, 37053), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (37044, 37053), False, 'import webserver\n'), ((37063, 37149), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin"""'], {}), "(\n '/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin')\n", (37076, 37149), False, 'from osgeo import gdal\n'), ((37700, 37739), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (37730, 37739), False, 'import webserver\n'), ((37764, 37808), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_fake_bucket2/a_dir"""'], {}), "('/vsis3/s3_fake_bucket2/a_dir')\n", (37776, 37808), False, 'from osgeo import gdal\n'), ((38803, 38871), 'gdaltest.config_option', 'gdaltest.config_option', (['"""CPL_VSIL_CURL_IGNORE_GLACIER_STORAGE"""', '"""NO"""'], {}), "('CPL_VSIL_CURL_IGNORE_GLACIER_STORAGE', 'NO')\n", (38825, 38871), False, 'import gdaltest\n'), ((43570, 43609), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (43600, 43609), False, 'import webserver\n'), ((43634, 43657), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/"""'], {}), "('/vsis3/')\n", (43646, 43657), False, 'from osgeo import gdal\n'), ((44120, 44159), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (44150, 44159), False, 'import webserver\n'), ((44184, 44207), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/"""'], {}), "('/vsis3/')\n", (44196, 44207), False, 'from osgeo import gdal\n'), ((46782, 46821), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (46812, 46821), False, 'import webserver\n'), ((46846, 46904), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_test_temporary_redirect_read_dir"""'], {}), "('/vsis3/s3_test_temporary_redirect_read_dir')\n", (46858, 46904), False, 'from osgeo import gdal\n'), ((47619, 47658), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (47649, 47658), False, 'import webserver\n'), ((47683, 47746), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_test_temporary_redirect_read_dir/test"""'], {}), "('/vsis3/s3_test_temporary_redirect_read_dir/test')\n", (47695, 47746), False, 'from osgeo import gdal\n'), ((47979, 47992), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (47990, 47992), False, 'import pytest\n'), ((48958, 48997), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (48988, 48997), False, 'import webserver\n'), ((49011, 49047), 'osgeo.gdal.OpenDir', 'gdal.OpenDir', (['"""/vsis3/vsis3_opendir"""'], {}), "('/vsis3/vsis3_opendir')\n", (49023, 49047), False, 'from osgeo import gdal\n'), ((50170, 50209), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (50200, 50209), False, 'import webserver\n'), ((50223, 50262), 'osgeo.gdal.OpenDir', 'gdal.OpenDir', (['"""/vsis3/vsis3_opendir"""', '(0)'], {}), "('/vsis3/vsis3_opendir', 0)\n", (50235, 50262), False, 'from osgeo import gdal\n'), ((51305, 51344), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (51335, 51344), False, 'import webserver\n'), ((51358, 51397), 'osgeo.gdal.OpenDir', 'gdal.OpenDir', (['"""/vsis3/vsis3_opendir"""', '(1)'], {}), "('/vsis3/vsis3_opendir', 1)\n", (51370, 51397), False, 'from osgeo import gdal\n'), ((52270, 52309), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (52300, 52309), False, 'import webserver\n'), ((52327, 52350), 'osgeo.gdal.GetNextDirEntry', 'gdal.GetNextDirEntry', (['d'], {}), '(d)\n', (52347, 52350), False, 'from osgeo import gdal\n'), ((52680, 52693), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (52691, 52693), False, 'import pytest\n'), ((53039, 53078), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (53069, 53078), False, 'import webserver\n'), ((53655, 53694), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (53685, 53694), False, 'import webserver\n'), ((53708, 53769), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/s3_fake_bucket3/empty_file.bin"""', '"""wb"""'], {}), "('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb')\n", (53722, 53769), False, 'from osgeo import gdal\n'), ((53807, 53824), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (53822, 53824), False, 'from osgeo import gdal\n'), ((53833, 53851), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (53848, 53851), False, 'from osgeo import gdal\n'), ((53863, 53885), 'osgeo.gdal.GetLastErrorMsg', 'gdal.GetLastErrorMsg', ([], {}), '()\n', (53883, 53885), False, 'from osgeo import gdal\n'), ((54038, 54077), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (54068, 54077), False, 'import webserver\n'), ((54232, 54271), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (54262, 54271), False, 'import webserver\n'), ((54285, 54346), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/s3_fake_bucket3/empty_file.bin"""', '"""wb"""'], {}), "('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb')\n", (54299, 54346), False, 'from osgeo import gdal\n'), ((54489, 54507), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (54504, 54507), False, 'from osgeo import gdal\n'), ((54581, 54620), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (54611, 54620), False, 'import webserver\n'), ((54634, 54695), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/s3_fake_bucket3/empty_file.bin"""', '"""wb"""'], {}), "('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb')\n", (54648, 54695), False, 'from osgeo import gdal\n'), ((54837, 54855), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (54852, 54855), False, 'from osgeo import gdal\n'), ((54996, 55035), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (55026, 55035), False, 'import webserver\n'), ((55049, 55116), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/s3_fake_bucket3/empty_file_error.bin"""', '"""wb"""'], {}), "('/vsis3/s3_fake_bucket3/empty_file_error.bin', 'wb')\n", (55063, 55116), False, 'from osgeo import gdal\n'), ((55154, 55171), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (55169, 55171), False, 'from osgeo import gdal\n'), ((55253, 55275), 'osgeo.gdal.GetLastErrorMsg', 'gdal.GetLastErrorMsg', ([], {}), '()\n', (55273, 55275), False, 'from osgeo import gdal\n'), ((55341, 55404), 'gdaltest.config_option', 'gdaltest.config_option', (['"""CPL_VSIL_NETWORK_STATS_ENABLED"""', '"""YES"""'], {}), "('CPL_VSIL_NETWORK_STATS_ENABLED', 'YES')\n", (55363, 55404), False, 'import gdaltest\n'), ((55950, 55979), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (55977, 55979), False, 'import webserver\n'), ((56966, 56983), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (56981, 56983), False, 'from osgeo import gdal\n'), ((57133, 57171), 'osgeo.gdal.NetworkStatsGetAsSerializedJSON', 'gdal.NetworkStatsGetAsSerializedJSON', ([], {}), '()\n', (57169, 57171), False, 'from osgeo import gdal\n'), ((58490, 58545), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/s3_fake_bucket3/redirect"""', '"""wb"""'], {}), "('/vsis3/s3_fake_bucket3/redirect', 'wb')\n", (58504, 58545), False, 'from osgeo import gdal\n'), ((60641, 60680), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (60671, 60680), False, 'import webserver\n'), ((60690, 60708), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (60705, 60708), False, 'from osgeo import gdal\n'), ((60720, 60742), 'osgeo.gdal.GetLastErrorMsg', 'gdal.GetLastErrorMsg', ([], {}), '()\n', (60740, 60742), False, 'from osgeo import gdal\n'), ((60962, 60975), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (60973, 60975), False, 'import pytest\n'), ((60986, 61076), 'gdaltest.config_options', 'gdaltest.config_options', (["{'GDAL_HTTP_MAX_RETRY': '2', 'GDAL_HTTP_RETRY_DELAY': '0.01'}"], {}), "({'GDAL_HTTP_MAX_RETRY': '2',\n 'GDAL_HTTP_RETRY_DELAY': '0.01'})\n", (61009, 61076), False, 'import gdaltest\n'), ((61375, 61404), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (61402, 61404), False, 'import webserver\n'), ((62787, 62800), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (62798, 62800), False, 'import pytest\n'), ((63124, 63163), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (63154, 63163), False, 'import webserver\n'), ((63297, 63336), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (63327, 63336), False, 'import webserver\n'), ((63534, 63573), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (63564, 63573), False, 'import webserver\n'), ((63589, 63639), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsis3/s3_delete_bucket/delete_file"""'], {}), "('/vsis3/s3_delete_bucket/delete_file')\n", (63600, 63639), False, 'from osgeo import gdal\n'), ((63924, 63963), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (63954, 63963), False, 'import webserver\n'), ((64232, 64271), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (64262, 64271), False, 'import webserver\n'), ((65732, 65771), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (65762, 65771), False, 'import webserver\n'), ((65787, 65834), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsis3/s3_delete_bucket/redirect"""'], {}), "('/vsis3/s3_delete_bucket/redirect')\n", (65798, 65834), False, 'from osgeo import gdal\n'), ((66059, 66072), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (66070, 66072), False, 'import pytest\n'), ((67542, 67600), 'gdaltest.config_option', 'gdaltest.config_option', (['"""CPL_VSIS3_UNLINK_BATCH_SIZE"""', '"""2"""'], {}), "('CPL_VSIS3_UNLINK_BATCH_SIZE', '2')\n", (67564, 67600), False, 'import gdaltest\n'), ((68038, 68077), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (68068, 68077), False, 'import webserver\n'), ((68093, 68138), 'osgeo.gdal.UnlinkBatch', 'gdal.UnlinkBatch', (["['/vsis3/unlink_batch/foo']"], {}), "(['/vsis3/unlink_batch/foo'])\n", (68109, 68138), False, 'from osgeo import gdal\n'), ((68368, 68381), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (68379, 68381), False, 'import pytest\n'), ((71493, 71551), 'gdaltest.config_option', 'gdaltest.config_option', (['"""CPL_VSIS3_UNLINK_BATCH_SIZE"""', '"""2"""'], {}), "('CPL_VSIS3_UNLINK_BATCH_SIZE', '2')\n", (71515, 71551), False, 'import gdaltest\n'), ((71887, 71900), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (71898, 71900), False, 'import pytest\n'), ((71911, 71958), 'gdaltest.config_option', 'gdaltest.config_option', (['"""VSIS3_CHUNK_SIZE"""', '"""1"""'], {}), "('VSIS3_CHUNK_SIZE', '1')\n", (71933, 71958), False, 'import gdaltest\n'), ((74375, 74414), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (74405, 74414), False, 'import webserver\n'), ((74430, 74469), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['big_buffer', '(1)', 'size', 'f'], {}), '(big_buffer, 1, size, f)\n', (74445, 74469), False, 'from osgeo import gdal\n'), ((76098, 76137), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (76128, 76137), False, 'import webserver\n'), ((76147, 76165), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (76162, 76165), False, 'from osgeo import gdal\n'), ((76177, 76199), 'osgeo.gdal.GetLastErrorMsg', 'gdal.GetLastErrorMsg', ([], {}), '()\n', (76197, 76199), False, 'from osgeo import gdal\n'), ((76663, 76702), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (76693, 76702), False, 'import webserver\n'), ((78431, 78470), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (78461, 78470), False, 'import webserver\n'), ((79729, 79768), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (79759, 79768), False, 'import webserver\n'), ((80065, 80082), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (80080, 80082), False, 'from osgeo import gdal\n'), ((81165, 81204), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (81195, 81204), False, 'import webserver\n'), ((81842, 81855), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (81853, 81855), False, 'import pytest\n'), ((81866, 81956), 'gdaltest.config_options', 'gdaltest.config_options', (["{'GDAL_HTTP_MAX_RETRY': '2', 'GDAL_HTTP_RETRY_DELAY': '0.01'}"], {}), "({'GDAL_HTTP_MAX_RETRY': '2',\n 'GDAL_HTTP_RETRY_DELAY': '0.01'})\n", (81889, 81956), False, 'import gdaltest\n'), ((82332, 82361), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (82359, 82361), False, 'import webserver\n'), ((83379, 83408), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (83406, 83408), False, 'import webserver\n'), ((84209, 84222), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (84220, 84222), False, 'import pytest\n'), ((84538, 84577), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (84568, 84577), False, 'import webserver\n'), ((84593, 84641), 'osgeo.gdal.Mkdir', 'gdal.Mkdir', (['"""/vsis3/s3_bucket_test_mkdir/dir"""', '(0)'], {}), "('/vsis3/s3_bucket_test_mkdir/dir', 0)\n", (84603, 84641), False, 'from osgeo import gdal\n'), ((85024, 85063), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (85054, 85063), False, 'import webserver\n'), ((85079, 85127), 'osgeo.gdal.Mkdir', 'gdal.Mkdir', (['"""/vsis3/s3_bucket_test_mkdir/dir"""', '(0)'], {}), "('/vsis3/s3_bucket_test_mkdir/dir', 0)\n", (85089, 85127), False, 'from osgeo import gdal\n'), ((85263, 85302), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (85293, 85302), False, 'import webserver\n'), ((85318, 85363), 'osgeo.gdal.Rmdir', 'gdal.Rmdir', (['"""/vsis3/s3_bucket_test_mkdir/dir"""'], {}), "('/vsis3/s3_bucket_test_mkdir/dir')\n", (85328, 85363), False, 'from osgeo import gdal\n'), ((85661, 85700), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (85691, 85700), False, 'import webserver\n'), ((85716, 85761), 'osgeo.gdal.Rmdir', 'gdal.Rmdir', (['"""/vsis3/s3_bucket_test_mkdir/dir"""'], {}), "('/vsis3/s3_bucket_test_mkdir/dir')\n", (85726, 85761), False, 'from osgeo import gdal\n'), ((86570, 86609), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (86600, 86609), False, 'import webserver\n'), ((86625, 86679), 'osgeo.gdal.Rmdir', 'gdal.Rmdir', (['"""/vsis3/s3_bucket_test_mkdir/dir_nonempty"""'], {}), "('/vsis3/s3_bucket_test_mkdir/dir_nonempty')\n", (86635, 86679), False, 'from osgeo import gdal\n'), ((87511, 87550), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (87541, 87550), False, 'import webserver\n'), ((88374, 88413), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (88404, 88413), False, 'import webserver\n'), ((89313, 89352), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (89343, 89352), False, 'import webserver\n'), ((90176, 90215), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (90206, 90215), False, 'import webserver\n'), ((90511, 90524), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (90522, 90524), False, 'import pytest\n'), ((91263, 91302), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (91293, 91302), False, 'import webserver\n'), ((91322, 91355), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/vsis3_8"""', '(0)'], {}), "('/vsis3/vsis3_8', 0)\n", (91334, 91355), False, 'from osgeo import gdal\n'), ((91450, 91489), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (91480, 91489), False, 'import webserver\n'), ((91620, 91659), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (91650, 91659), False, 'import webserver\n'), ((91936, 91949), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (91947, 91949), False, 'import pytest\n'), ((92028, 92052), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (92050, 92052), False, 'import gdaltest\n'), ((92072, 92101), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (92099, 92101), False, 'import webserver\n'), ((92246, 92270), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (92268, 92270), False, 'import gdaltest\n'), ((92290, 92319), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (92317, 92319), False, 'import webserver\n'), ((94001, 94040), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (94031, 94040), False, 'import webserver\n'), ((94057, 94159), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsimem/testsync.txt"""', '"""/vsis3/out"""'], {'options': 'options', 'callback': 'cbk', 'callback_data': 'tab'}), "('/vsimem/testsync.txt', '/vsis3/out', options=options, callback=\n cbk, callback_data=tab)\n", (94066, 94159), False, 'from osgeo import gdal\n'), ((94324, 94363), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (94354, 94363), False, 'import webserver\n'), ((94380, 94444), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsimem/testsync.txt"""', '"""/vsis3/out"""'], {'options': 'options'}), "('/vsimem/testsync.txt', '/vsis3/out', options=options)\n", (94389, 94444), False, 'from osgeo import gdal\n'), ((94460, 94537), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsimem/testsync.txt"""', '"""/vsis3/out/testsync.txt"""'], {'options': 'options'}), "('/vsimem/testsync.txt', '/vsis3/out/testsync.txt', options=options)\n", (94469, 94537), False, 'from osgeo import gdal\n'), ((94874, 94913), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (94904, 94913), False, 'import webserver\n'), ((94930, 94995), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsis3/out/testsync.txt"""', '"""/vsimem/"""'], {'options': 'options'}), "('/vsis3/out/testsync.txt', '/vsimem/', options=options)\n", (94939, 94995), False, 'from osgeo import gdal\n'), ((95133, 95198), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsis3/out/testsync.txt"""', '"""/vsimem/"""'], {'options': 'options'}), "('/vsis3/out/testsync.txt', '/vsimem/', options=options)\n", (95142, 95198), False, 'from osgeo import gdal\n'), ((95215, 95292), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsis3/out/testsync.txt"""', '"""/vsimem/testsync.txt"""'], {'options': 'options'}), "('/vsis3/out/testsync.txt', '/vsimem/testsync.txt', options=options)\n", (95224, 95292), False, 'from osgeo import gdal\n'), ((95623, 95662), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (95653, 95662), False, 'import webserver\n'), ((95679, 95744), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsis3/out/testsync.txt"""', '"""/vsimem/"""'], {'options': 'options'}), "('/vsis3/out/testsync.txt', '/vsimem/', options=options)\n", (95688, 95744), False, 'from osgeo import gdal\n'), ((96285, 96324), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (96315, 96324), False, 'import webserver\n'), ((96341, 96405), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsimem/testsync.txt"""', '"""/vsis3/out"""'], {'options': 'options'}), "('/vsimem/testsync.txt', '/vsis3/out', options=options)\n", (96350, 96405), False, 'from osgeo import gdal\n'), ((97304, 97343), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (97334, 97343), False, 'import webserver\n'), ((97360, 97419), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsimem/subdir/"""', '"""/vsis3/out"""'], {'options': 'options'}), "('/vsimem/subdir/', '/vsis3/out', options=options)\n", (97369, 97419), False, 'from osgeo import gdal\n'), ((97670, 97683), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (97681, 97683), False, 'import pytest\n'), ((98303, 98342), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (98333, 98342), False, 'import webserver\n'), ((98359, 98424), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsis3/out/testsync.txt"""', '"""/vsimem/"""'], {'options': 'options'}), "('/vsis3/out/testsync.txt', '/vsimem/', options=options)\n", (98368, 98424), False, 'from osgeo import gdal\n'), ((98802, 98841), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (98832, 98841), False, 'import webserver\n'), ((98858, 98923), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsis3/out/testsync.txt"""', '"""/vsimem/"""'], {'options': 'options'}), "('/vsis3/out/testsync.txt', '/vsimem/', options=options)\n", (98867, 98923), False, 'from osgeo import gdal\n'), ((99346, 99385), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (99376, 99385), False, 'import webserver\n'), ((99402, 99479), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsimem/testsync.txt"""', '"""/vsis3/out/testsync.txt"""'], {'options': 'options'}), "('/vsimem/testsync.txt', '/vsis3/out/testsync.txt', options=options)\n", (99411, 99479), False, 'from osgeo import gdal\n'), ((99857, 99896), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (99887, 99896), False, 'import webserver\n'), ((99913, 99990), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsimem/testsync.txt"""', '"""/vsis3/out/testsync.txt"""'], {'options': 'options'}), "('/vsimem/testsync.txt', '/vsis3/out/testsync.txt', options=options)\n", (99922, 99990), False, 'from osgeo import gdal\n'), ((100266, 100279), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (100277, 100279), False, 'import pytest\n'), ((100887, 100926), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (100917, 100926), False, 'import webserver\n'), ((100943, 101008), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsis3/out/testsync.txt"""', '"""/vsimem/"""'], {'options': 'options'}), "('/vsis3/out/testsync.txt', '/vsimem/', options=options)\n", (100952, 101008), False, 'from osgeo import gdal\n'), ((101421, 101460), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (101451, 101460), False, 'import webserver\n'), ((101477, 101554), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsimem/testsync.txt"""', '"""/vsis3/out/testsync.txt"""'], {'options': 'options'}), "('/vsimem/testsync.txt', '/vsis3/out/testsync.txt', options=options)\n", (101486, 101554), False, 'from osgeo import gdal\n'), ((101847, 101860), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (101858, 101860), False, 'import pytest\n'), ((102995, 103034), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (103025, 103034), False, 'import webserver\n'), ((103051, 103101), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsis3/in/testsync.txt"""', '"""/vsis3/out/"""'], {}), "('/vsis3/in/testsync.txt', '/vsis3/out/')\n", (103060, 103101), False, 'from osgeo import gdal\n'), ((103275, 103288), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (103286, 103288), False, 'import pytest\n'), ((104334, 104373), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (104364, 104373), False, 'import webserver\n'), ((104636, 104649), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (104647, 104649), False, 'import pytest\n'), ((106937, 106976), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (106967, 106976), False, 'import webserver\n'), ((107284, 107297), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (107295, 107297), False, 'import pytest\n'), ((107580, 107619), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (107610, 107619), False, 'import webserver\n'), ((107953, 107966), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (107964, 107966), False, 'import pytest\n'), ((111318, 111375), 'gdaltest.config_option', 'gdaltest.config_option', (['"""VSIS3_SIMULATE_THREADING"""', '"""YES"""'], {}), "('VSIS3_SIMULATE_THREADING', 'YES')\n", (111340, 111375), False, 'import gdaltest\n'), ((111843, 111856), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (111854, 111856), False, 'import pytest\n'), ((113551, 113650), 'gdaltest.config_options', 'gdaltest.config_options', (["{'VSIS3_SIMULATE_THREADING': 'YES', 'VSIS3_SYNC_MULTITHREADING': 'NO'}"], {}), "({'VSIS3_SIMULATE_THREADING': 'YES',\n 'VSIS3_SYNC_MULTITHREADING': 'NO'})\n", (113574, 113650), False, 'import gdaltest\n'), ((114200, 114213), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (114211, 114213), False, 'import pytest\n'), ((114394, 114433), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (114424, 114433), False, 'import webserver\n'), ((114448, 114511), 'osgeo.gdal.GetFileMetadata', 'gdal.GetFileMetadata', (['"""/vsis3/test_metadata/foo.txt"""', '"""HEADERS"""'], {}), "('/vsis3/test_metadata/foo.txt', 'HEADERS')\n", (114468, 114511), False, 'from osgeo import gdal\n'), ((114804, 114843), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (114834, 114843), False, 'import webserver\n'), ((114858, 114918), 'osgeo.gdal.GetFileMetadata', 'gdal.GetFileMetadata', (['"""/vsis3/test_metadata/foo.txt"""', '"""TAGS"""'], {}), "('/vsis3/test_metadata/foo.txt', 'TAGS')\n", (114878, 114918), False, 'from osgeo import gdal\n'), ((115488, 115527), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (115518, 115527), False, 'import webserver\n'), ((115544, 115623), 'osgeo.gdal.SetFileMetadata', 'gdal.SetFileMetadata', (['"""/vsis3/test_metadata/foo.txt"""', "{'foo': 'bar'}", '"""HEADERS"""'], {}), "('/vsis3/test_metadata/foo.txt', {'foo': 'bar'}, 'HEADERS')\n", (115564, 115623), False, 'from osgeo import gdal\n'), ((116545, 116584), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (116575, 116584), False, 'import webserver\n'), ((116601, 116677), 'osgeo.gdal.SetFileMetadata', 'gdal.SetFileMetadata', (['"""/vsis3/test_metadata/foo.txt"""', "{'foo': 'bar'}", '"""TAGS"""'], {}), "('/vsis3/test_metadata/foo.txt', {'foo': 'bar'}, 'TAGS')\n", (116621, 116677), False, 'from osgeo import gdal\n'), ((116835, 116874), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (116865, 116874), False, 'import webserver\n'), ((116891, 116955), 'osgeo.gdal.SetFileMetadata', 'gdal.SetFileMetadata', (['"""/vsis3/test_metadata/foo.txt"""', '{}', '"""TAGS"""'], {}), "('/vsis3/test_metadata/foo.txt', {}, 'TAGS')\n", (116911, 116955), False, 'from osgeo import gdal\n'), ((116983, 117007), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (117005, 117007), False, 'import gdaltest\n'), ((117125, 117149), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (117147, 117149), False, 'import gdaltest\n'), ((117488, 117501), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (117499, 117501), False, 'import pytest\n'), ((117889, 117928), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (117919, 117928), False, 'import webserver\n'), ((118345, 118358), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (118356, 118358), False, 'import pytest\n'), ((118399, 118423), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (118421, 118423), False, 'import gdaltest\n'), ((118512, 118584), 'gdaltest.config_option', 'gdaltest.config_option', (['"""CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE"""', '"""YES"""'], {}), "('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES')\n", (118534, 118584), False, 'import gdaltest\n'), ((118598, 118651), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/random_write/test.bin"""', '"""w+b"""'], {}), "('/vsis3/random_write/test.bin', 'w+b')\n", (118612, 118651), False, 'from osgeo import gdal\n'), ((118676, 118707), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['"""foo"""', '(3)', '(1)', 'f'], {}), "('foo', 3, 1, f)\n", (118691, 118707), False, 'from osgeo import gdal\n'), ((118724, 118747), 'osgeo.gdal.VSIFSeekL', 'gdal.VSIFSeekL', (['f', '(0)', '(0)'], {}), '(f, 0, 0)\n', (118738, 118747), False, 'from osgeo import gdal\n'), ((118824, 118840), 'osgeo.gdal.VSIFEofL', 'gdal.VSIFEofL', (['f'], {}), '(f)\n', (118837, 118840), False, 'from osgeo import gdal\n'), ((118857, 118874), 'osgeo.gdal.VSIFTellL', 'gdal.VSIFTellL', (['f'], {}), '(f)\n', (118871, 118874), False, 'from osgeo import gdal\n'), ((119014, 119053), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (119044, 119053), False, 'import webserver\n'), ((119280, 119293), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (119291, 119293), False, 'import pytest\n'), ((119334, 119406), 'gdaltest.config_option', 'gdaltest.config_option', (['"""CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE"""', '"""YES"""'], {}), "('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES')\n", (119356, 119406), False, 'import gdaltest\n'), ((119420, 119473), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/random_write/test.bin"""', '"""w+b"""'], {}), "('/vsis3/random_write/test.bin', 'w+b')\n", (119434, 119473), False, 'from osgeo import gdal\n'), ((119599, 119638), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (119629, 119638), False, 'import webserver\n'), ((119909, 119922), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (119920, 119922), False, 'import pytest\n'), ((119963, 120035), 'gdaltest.config_option', 'gdaltest.config_option', (['"""CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE"""', '"""YES"""'], {}), "('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES')\n", (119985, 120035), False, 'import gdaltest\n'), ((120199, 120230), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['"""foo"""', '(3)', '(1)', 'f'], {}), "('foo', 3, 1, f)\n", (120214, 120230), False, 'from osgeo import gdal\n'), ((120357, 120396), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (120387, 120396), False, 'import webserver\n'), ((120678, 120691), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (120689, 120691), False, 'import pytest\n'), ((121036, 121108), 'gdaltest.config_option', 'gdaltest.config_option', (['"""CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE"""', '"""YES"""'], {}), "('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES')\n", (121058, 121108), False, 'import gdaltest\n'), ((121400, 121439), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (121430, 121439), False, 'import webserver\n'), ((121681, 121694), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (121692, 121694), False, 'import pytest\n'), ((122352, 122391), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (122382, 122391), False, 'import webserver\n'), ((122545, 122563), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (122560, 122563), False, 'from osgeo import gdal\n'), ((122904, 122917), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (122915, 122917), False, 'import pytest\n'), ((123575, 123614), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (123605, 123614), False, 'import webserver\n'), ((123768, 123786), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (123783, 123786), False, 'from osgeo import gdal\n'), ((124147, 124160), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (124158, 124160), False, 'import pytest\n'), ((125184, 125223), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (125214, 125223), False, 'import webserver\n'), ((125377, 125395), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (125392, 125395), False, 'from osgeo import gdal\n'), ((125911, 125924), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (125922, 125924), False, 'import pytest\n'), ((126888, 126927), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (126918, 126927), False, 'import webserver\n'), ((127211, 127229), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (127226, 127229), False, 'from osgeo import gdal\n'), ((127561, 127574), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (127572, 127574), False, 'import pytest\n'), ((128646, 128685), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (128676, 128685), False, 'import webserver\n'), ((128926, 128944), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (128941, 128944), False, 'from osgeo import gdal\n'), ((129380, 129393), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (129391, 129393), False, 'import pytest\n'), ((129460, 129473), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (129471, 129473), False, 'import pytest\n'), ((130807, 130846), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (130837, 130846), False, 'import webserver\n'), ((131000, 131018), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (131015, 131018), False, 'from osgeo import gdal\n'), ((131279, 131318), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (131309, 131318), False, 'import webserver\n'), ((131467, 131485), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (131482, 131485), False, 'from osgeo import gdal\n'), ((131875, 131888), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (131886, 131888), False, 'import pytest\n'), ((131955, 131968), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (131966, 131968), False, 'import pytest\n'), ((133273, 133312), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (133303, 133312), False, 'import webserver\n'), ((133466, 133484), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (133481, 133484), False, 'from osgeo import gdal\n'), ((133896, 133909), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (133907, 133909), False, 'import pytest\n'), ((133976, 133989), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (133987, 133989), False, 'import pytest\n'), ((135839, 135878), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (135869, 135878), False, 'import webserver\n'), ((136032, 136050), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (136047, 136050), False, 'from osgeo import gdal\n'), ((136483, 136522), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (136513, 136522), False, 'import webserver\n'), ((136924, 136937), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (136935, 136937), False, 'import pytest\n'), ((137334, 137363), 'gdaltest.built_against_curl', 'gdaltest.built_against_curl', ([], {}), '()\n', (137361, 137363), False, 'import gdaltest\n'), ((137373, 137386), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (137384, 137386), False, 'import pytest\n'), ((137983, 138017), 'pytest.skip', 'pytest.skip', (['"""Missing S3_RESOURCE"""'], {}), "('Missing S3_RESOURCE')\n", (137994, 138017), False, 'import pytest\n'), ((138107, 138126), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['path'], {}), '(path)\n', (138120, 138126), False, 'from osgeo import gdal\n'), ((138263, 138281), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['path'], {}), '(path)\n', (138275, 138281), False, 'from osgeo import gdal\n'), ((138692, 138714), 'osgeo.gdal.Mkdir', 'gdal.Mkdir', (['subpath', '(0)'], {}), '(subpath, 0)\n', (138702, 138714), False, 'from osgeo import gdal\n'), ((138810, 138828), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['path'], {}), '(path)\n', (138822, 138828), False, 'from osgeo import gdal\n'), ((138949, 138971), 'osgeo.gdal.Mkdir', 'gdal.Mkdir', (['subpath', '(0)'], {}), '(subpath, 0)\n', (138959, 138971), False, 'from osgeo import gdal\n'), ((139068, 139087), 'osgeo.gdal.Rmdir', 'gdal.Rmdir', (['subpath'], {}), '(subpath)\n', (139078, 139087), False, 'from osgeo import gdal\n'), ((139183, 139201), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['path'], {}), '(path)\n', (139195, 139201), False, 'from osgeo import gdal\n'), ((139330, 139349), 'osgeo.gdal.Rmdir', 'gdal.Rmdir', (['subpath'], {}), '(subpath)\n', (139340, 139349), False, 'from osgeo import gdal\n'), ((139446, 139468), 'osgeo.gdal.Mkdir', 'gdal.Mkdir', (['subpath', '(0)'], {}), '(subpath, 0)\n', (139456, 139468), False, 'from osgeo import gdal\n'), ((139558, 139601), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (["(subpath + '/test.txt')", '"""wb"""'], {}), "(subpath + '/test.txt', 'wb')\n", (139572, 139601), False, 'from osgeo import gdal\n'), ((139639, 139672), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['"""hello"""', '(1)', '(5)', 'f'], {}), "('hello', 1, 5, f)\n", (139654, 139672), False, 'from osgeo import gdal\n'), ((139681, 139699), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (139696, 139699), False, 'from osgeo import gdal\n'), ((139715, 139734), 'osgeo.gdal.Rmdir', 'gdal.Rmdir', (['subpath'], {}), '(subpath)\n', (139725, 139734), False, 'from osgeo import gdal\n'), ((139857, 139900), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (["(subpath + '/test.txt')", '"""rb"""'], {}), "(subpath + '/test.txt', 'rb')\n", (139871, 139900), False, 'from osgeo import gdal\n'), ((140024, 140042), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (140039, 140042), False, 'from osgeo import gdal\n'), ((140136, 140180), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (["(subpath + '/test2.txt')", '"""rb"""'], {}), "(subpath + '/test2.txt', 'rb')\n", (140150, 140180), False, 'from osgeo import gdal\n'), ((140304, 140322), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (140319, 140322), False, 'from osgeo import gdal\n'), ((140338, 140373), 'osgeo.gdal.Unlink', 'gdal.Unlink', (["(subpath + '/test2.txt')"], {}), "(subpath + '/test2.txt')\n", (140349, 140373), False, 'from osgeo import gdal\n'), ((140497, 140516), 'osgeo.gdal.Rmdir', 'gdal.Rmdir', (['subpath'], {}), '(subpath)\n', (140507, 140516), False, 'from osgeo import gdal\n'), ((141211, 141228), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (141226, 141228), False, 'from osgeo import gdal\n'), ((141372, 141390), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (141387, 141390), False, 'from osgeo import gdal\n'), ((142044, 142093), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['var', 'gdaltest.aws_vars[var]'], {}), '(var, gdaltest.aws_vars[var])\n', (142064, 142093), False, 'from osgeo import gdal\n'), ((2259, 2288), 'osgeo.gdal.SetConfigOption', 'gdal.SetConfigOption', (['var', '""""""'], {}), "(var, '')\n", (2279, 2288), False, 'from osgeo import gdal\n'), ((3546, 3678), 'gdaltest.gdalurlopen', 'gdaltest.gdalurlopen', (['"""https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF"""'], {}), "(\n 'https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF'\n )\n", (3566, 3678), False, 'import gdaltest\n'), ((3690, 3720), 'pytest.skip', 'pytest.skip', (['"""cannot open URL"""'], {}), "('cannot open URL')\n", (3701, 3720), False, 'import pytest\n'), ((4526, 4597), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo"""'], {}), "('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo')\n", (4539, 4597), False, 'from osgeo import gdal\n'), ((4619, 4690), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar"""'], {}), "('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar')\n", (4632, 4690), False, 'from osgeo import gdal\n'), ((5570, 5641), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo"""'], {}), "('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo')\n", (5583, 5641), False, 'from osgeo import gdal\n'), ((5663, 5734), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar"""'], {}), "('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar')\n", (5676, 5734), False, 'from osgeo import gdal\n'), ((6988, 7013), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (7011, 7013), False, 'from osgeo import gdal\n'), ((7059, 7077), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (7074, 7077), False, 'from osgeo import gdal\n'), ((7089, 7121), 'osgeo.gdal.GetConfigOption', 'gdal.GetConfigOption', (['"""APPVEYOR"""'], {}), "('APPVEYOR')\n", (7109, 7121), False, 'from osgeo import gdal\n'), ((7174, 7199), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (7197, 7199), False, 'from osgeo import gdal\n'), ((7342, 7367), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (7365, 7367), False, 'from osgeo import gdal\n'), ((12863, 12902), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (12893, 12902), False, 'import webserver\n'), ((13091, 13109), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (13106, 13109), False, 'from osgeo import gdal\n'), ((14806, 14819), 'pytest.fail', 'pytest.fail', ([], {}), '()\n', (14817, 14819), False, 'import pytest\n'), ((18332, 18399), 'pytest.skip', 'pytest.skip', (['"""Skipped on trusty branch, but should be investigated"""'], {}), "('Skipped on trusty branch, but should be investigated')\n", (18343, 18399), False, 'import pytest\n'), ((19793, 19817), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (19815, 19817), False, 'import gdaltest\n'), ((20442, 20466), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (20464, 20466), False, 'import gdaltest\n'), ((21099, 21123), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (21121, 21123), False, 'import gdaltest\n'), ((21836, 21860), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (21858, 21860), False, 'import gdaltest\n'), ((22583, 22607), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (22605, 22607), False, 'import gdaltest\n'), ((23288, 23312), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (23310, 23312), False, 'import gdaltest\n'), ((25283, 25322), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (25313, 25322), False, 'import webserver\n'), ((29203, 29227), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (29225, 29227), False, 'import gdaltest\n'), ((30166, 30205), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (30196, 30205), False, 'import webserver\n'), ((30334, 30352), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (30349, 30352), False, 'from osgeo import gdal\n'), ((35271, 35338), 'pytest.skip', 'pytest.skip', (['"""Skipped on trusty branch, but should be investigated"""'], {}), "('Skipped on trusty branch, but should be investigated')\n", (35282, 35338), False, 'import pytest\n'), ((35426, 35455), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (35453, 35455), False, 'import webserver\n'), ((35632, 35718), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin"""'], {}), "(\n '/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin')\n", (35645, 35718), False, 'from osgeo import gdal\n'), ((35740, 35826), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin"""'], {}), "(\n '/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin')\n", (35753, 35826), False, 'from osgeo import gdal\n'), ((36360, 36446), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin"""'], {}), "(\n '/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin')\n", (36373, 36446), False, 'from osgeo import gdal\n'), ((38886, 38925), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (38916, 38925), False, 'import webserver\n'), ((38954, 38998), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_fake_bucket2/a_dir"""'], {}), "('/vsis3/s3_fake_bucket2/a_dir')\n", (38966, 38998), False, 'from osgeo import gdal\n'), ((39486, 39557), 'gdaltest.config_option', 'gdaltest.config_option', (['"""CPL_VSIL_CURL_NON_CACHED"""', 'config_option_value'], {}), "('CPL_VSIL_CURL_NON_CACHED', config_option_value)\n", (39508, 39557), False, 'import gdaltest\n'), ((39582, 39611), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (39609, 39611), False, 'import webserver\n'), ((40049, 40078), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (40076, 40078), False, 'import webserver\n'), ((40361, 40390), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (40388, 40390), False, 'import webserver\n'), ((40737, 40766), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (40764, 40766), False, 'import webserver\n'), ((41327, 41398), 'gdaltest.config_option', 'gdaltest.config_option', (['"""CPL_VSIL_CURL_NON_CACHED"""', 'config_option_value'], {}), "('CPL_VSIL_CURL_NON_CACHED', config_option_value)\n", (41349, 41398), False, 'import gdaltest\n'), ((41423, 41452), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (41450, 41452), False, 'import webserver\n'), ((42807, 42836), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (42834, 42836), False, 'import webserver\n'), ((52735, 52764), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (52762, 52764), False, 'import webserver\n'), ((52780, 52804), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (52802, 52804), False, 'import gdaltest\n'), ((52822, 52868), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/s3_fake_bucket3"""', '"""wb"""'], {}), "('/vsis3/s3_fake_bucket3', 'wb')\n", (52836, 52868), False, 'from osgeo import gdal\n'), ((54389, 54413), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (54411, 54413), False, 'import gdaltest\n'), ((54433, 54456), 'osgeo.gdal.VSIFSeekL', 'gdal.VSIFSeekL', (['f', '(1)', '(0)'], {}), '(f, 1, 0)\n', (54447, 54456), False, 'from osgeo import gdal\n'), ((54738, 54762), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (54760, 54762), False, 'import gdaltest\n'), ((54782, 54805), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(1)', 'f'], {}), '(1, 1, f)\n', (54796, 54805), False, 'from osgeo import gdal\n'), ((55185, 55209), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (55207, 55209), False, 'import gdaltest\n'), ((55223, 55241), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (55238, 55241), False, 'from osgeo import gdal\n'), ((55498, 55561), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/s3_fake_bucket3/another_file.bin"""', '"""wb"""'], {}), "('/vsis3/s3_fake_bucket3/another_file.bin', 'wb')\n", (55512, 55561), False, 'from osgeo import gdal\n'), ((56997, 57036), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (57027, 57036), False, 'import webserver\n'), ((57050, 57068), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (57065, 57068), False, 'from osgeo import gdal\n'), ((57084, 57106), 'osgeo.gdal.GetLastErrorMsg', 'gdal.GetLastErrorMsg', ([], {}), '()\n', (57104, 57106), False, 'from osgeo import gdal\n'), ((58446, 58475), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (58473, 58475), False, 'import webserver\n'), ((58590, 58624), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['"""foobar"""', '(1)', '(6)', 'f'], {}), "('foobar', 1, 6, f)\n", (58605, 58624), False, 'from osgeo import gdal\n'), ((61201, 61266), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/s3_fake_bucket3/put_with_retry.bin"""', '"""wb"""'], {}), "('/vsis3/s3_fake_bucket3/put_with_retry.bin', 'wb')\n", (61215, 61266), False, 'from osgeo import gdal\n'), ((62466, 62490), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (62488, 62490), False, 'import gdaltest\n'), ((62842, 62871), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (62869, 62871), False, 'import webserver\n'), ((62887, 62911), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (62909, 62911), False, 'import gdaltest\n'), ((62931, 62956), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsis3/foo"""'], {}), "('/vsis3/foo')\n", (62942, 62956), False, 'from osgeo import gdal\n'), ((63980, 64032), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_delete_bucket/delete_file"""'], {}), "('/vsis3/s3_delete_bucket/delete_file')\n", (63993, 64032), False, 'from osgeo import gdal\n'), ((64286, 64310), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (64308, 64310), False, 'import gdaltest\n'), ((64330, 64386), 'osgeo.gdal.Unlink', 'gdal.Unlink', (['"""/vsis3/s3_delete_bucket/delete_file_error"""'], {}), "('/vsis3/s3_delete_bucket/delete_file_error')\n", (64341, 64386), False, 'from osgeo import gdal\n'), ((66650, 66714), 'sys.stderr.write', 'sys.stderr.write', (["('Did not get expected content: %s\\n' % content)"], {}), "('Did not get expected content: %s\\n' % content)\n", (66666, 66714), False, 'import sys\n'), ((67615, 67654), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (67645, 67654), False, 'import webserver\n'), ((67674, 67781), 'osgeo.gdal.UnlinkBatch', 'gdal.UnlinkBatch', (["['/vsis3/unlink_batch/foo', '/vsis3/unlink_batch/bar/baz',\n '/vsis3/unlink_batch/baw']"], {}), "(['/vsis3/unlink_batch/foo', '/vsis3/unlink_batch/bar/baz',\n '/vsis3/unlink_batch/baw'])\n", (67690, 67781), False, 'from osgeo import gdal\n'), ((69757, 69821), 'sys.stderr.write', 'sys.stderr.write', (["('Did not get expected content: %s\\n' % content)"], {}), "('Did not get expected content: %s\\n' % content)\n", (69773, 69821), False, 'import sys\n'), ((70808, 70872), 'sys.stderr.write', 'sys.stderr.write', (["('Did not get expected content: %s\\n' % content)"], {}), "('Did not get expected content: %s\\n' % content)\n", (70824, 70872), False, 'import sys\n'), ((71566, 71605), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (71596, 71605), False, 'import webserver\n'), ((72060, 72121), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/s3_fake_bucket4/large_file.bin"""', '"""wb"""'], {}), "('/vsis3/s3_fake_bucket4/large_file.bin', 'wb')\n", (72074, 72121), False, 'from osgeo import gdal\n'), ((75644, 75708), 'sys.stderr.write', 'sys.stderr.write', (["('Did not get expected content: %s\\n' % content)"], {}), "('Did not get expected content: %s\\n' % content)\n", (75660, 75708), False, 'import sys\n'), ((77362, 77379), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (77377, 77379), False, 'from osgeo import gdal\n'), ((77392, 77410), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (77407, 77410), False, 'from osgeo import gdal\n'), ((78970, 78987), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (78985, 78987), False, 'from osgeo import gdal\n'), ((79000, 79018), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (79015, 79018), False, 'from osgeo import gdal\n'), ((79783, 79830), 'gdaltest.config_option', 'gdaltest.config_option', (['"""VSIS3_CHUNK_SIZE"""', '"""1"""'], {}), "('VSIS3_CHUNK_SIZE', '1')\n", (79805, 79830), False, 'import gdaltest\n'), ((79856, 79886), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['filename', '"""wb"""'], {}), "(filename, 'wb')\n", (79870, 79886), False, 'from osgeo import gdal\n'), ((79939, 79963), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (79961, 79963), False, 'import gdaltest\n'), ((79983, 80022), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['big_buffer', '(1)', 'size', 'f'], {}), '(big_buffer, 1, size, f)\n', (79998, 80022), False, 'from osgeo import gdal\n'), ((80096, 80120), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (80118, 80120), False, 'import gdaltest\n'), ((80134, 80152), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (80149, 80152), False, 'from osgeo import gdal\n'), ((80168, 80190), 'osgeo.gdal.GetLastErrorMsg', 'gdal.GetLastErrorMsg', ([], {}), '()\n', (80188, 80190), False, 'from osgeo import gdal\n'), ((81219, 81266), 'gdaltest.config_option', 'gdaltest.config_option', (['"""VSIS3_CHUNK_SIZE"""', '"""1"""'], {}), "('VSIS3_CHUNK_SIZE', '1')\n", (81241, 81266), False, 'import gdaltest\n'), ((81292, 81322), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['filename', '"""wb"""'], {}), "(filename, 'wb')\n", (81306, 81322), False, 'from osgeo import gdal\n'), ((81384, 81423), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['big_buffer', '(1)', 'size', 'f'], {}), '(big_buffer, 1, size, f)\n', (81399, 81423), False, 'from osgeo import gdal\n'), ((81477, 81494), 'osgeo.gdal.ErrorReset', 'gdal.ErrorReset', ([], {}), '()\n', (81492, 81494), False, 'from osgeo import gdal\n'), ((82002, 82049), 'gdaltest.config_option', 'gdaltest.config_option', (['"""VSIS3_CHUNK_SIZE"""', '"""1"""'], {}), "('VSIS3_CHUNK_SIZE', '1')\n", (82024, 82049), False, 'import gdaltest\n'), ((83188, 83212), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (83210, 83212), False, 'import gdaltest\n'), ((83915, 83939), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (83937, 83939), False, 'import gdaltest\n'), ((84687, 84735), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_bucket_test_mkdir/dir"""'], {}), "('/vsis3/s3_bucket_test_mkdir/dir')\n", (84700, 84735), False, 'from osgeo import gdal\n'), ((88430, 88488), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_bucket_test_readdir/test_dirread"""'], {}), "('/vsis3/s3_bucket_test_readdir/test_dirread')\n", (88442, 88488), False, 'from osgeo import gdal\n'), ((90232, 90291), 'osgeo.gdal.ReadDir', 'gdal.ReadDir', (['"""/vsis3/s3_bucket_test_readdir2/test_dirread"""'], {}), "('/vsis3/s3_bucket_test_readdir2/test_dirread')\n", (90244, 90291), False, 'from osgeo import gdal\n'), ((92115, 92154), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (92145, 92154), False, 'import webserver\n'), ((92518, 92557), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (92548, 92557), False, 'import webserver\n'), ((93354, 93418), 'sys.stderr.write', 'sys.stderr.write', (["('Did not get expected content: %s\\n' % content)"], {}), "('Did not get expected content: %s\\n' % content)\n", (93370, 93418), False, 'import sys\n'), ((95086, 95115), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (95113, 95115), False, 'import webserver\n'), ((95811, 95834), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(3)', 'f'], {}), '(1, 3, f)\n', (95825, 95834), False, 'from osgeo import gdal\n'), ((104390, 104453), 'osgeo.gdal.Rename', 'gdal.Rename', (['"""/vsis3/test/source.txt"""', '"""/vsis3/test/target.txt"""'], {}), "('/vsis3/test/source.txt', '/vsis3/test/target.txt')\n", (104401, 104453), False, 'from osgeo import gdal\n'), ((106993, 107056), 'osgeo.gdal.Rename', 'gdal.Rename', (['"""/vsis3/test/source_dir"""', '"""/vsis3/test/target_dir"""'], {}), "('/vsis3/test/source_dir', '/vsis3/test/target_dir')\n", (107004, 107056), False, 'from osgeo import gdal\n'), ((107636, 107699), 'osgeo.gdal.Rename', 'gdal.Rename', (['"""/vsis3/test/source.txt"""', '"""/vsis3/test_target_dir"""'], {}), "('/vsis3/test/source.txt', '/vsis3/test_target_dir')\n", (107647, 107699), False, 'from osgeo import gdal\n'), ((110896, 110960), 'sys.stderr.write', 'sys.stderr.write', (["('Did not get expected content: %s\\n' % content)"], {}), "('Did not get expected content: %s\\n' % content)\n", (110912, 110960), False, 'import sys\n'), ((111390, 111429), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (111420, 111429), False, 'import webserver\n'), ((111450, 111577), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsimem/test"""', '"""/vsis3/test_bucket"""'], {'options': "['NUM_THREADS=1', 'CHUNK_SIZE=3']", 'callback': 'cbk', 'callback_data': 'tab'}), "('/vsimem/test', '/vsis3/test_bucket', options=['NUM_THREADS=1',\n 'CHUNK_SIZE=3'], callback=cbk, callback_data=tab)\n", (111459, 111577), False, 'from osgeo import gdal\n'), ((113695, 113734), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (113725, 113734), False, 'import webserver\n'), ((116131, 116195), 'sys.stderr.write', 'sys.stderr.write', (["('Did not get expected content: %s\\n' % content)"], {}), "('Did not get expected content: %s\\n' % content)\n", (116147, 116195), False, 'import sys\n'), ((117024, 117091), 'osgeo.gdal.GetFileMetadata', 'gdal.GetFileMetadata', (['"""/vsis3/test_metadata/foo.txt"""', '"""UNSUPPORTED"""'], {}), "('/vsis3/test_metadata/foo.txt', 'UNSUPPORTED')\n", (117044, 117091), False, 'from osgeo import gdal\n'), ((117170, 117241), 'osgeo.gdal.SetFileMetadata', 'gdal.SetFileMetadata', (['"""/vsis3/test_metadata/foo.txt"""', '{}', '"""UNSUPPORTED"""'], {}), "('/vsis3/test_metadata/foo.txt', {}, 'UNSUPPORTED')\n", (117190, 117241), False, 'from osgeo import gdal\n'), ((117945, 118003), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/no_useless_requests/foo.txt"""', '"""rb"""'], {}), "('/vsis3/no_useless_requests/foo.txt', 'rb')\n", (117959, 118003), False, 'from osgeo import gdal\n'), ((118027, 118085), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/no_useless_requests/bar.txt"""', '"""rb"""'], {}), "('/vsis3/no_useless_requests/bar.txt', 'rb')\n", (118041, 118085), False, 'from osgeo import gdal\n'), ((118109, 118160), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/no_useless_requests/baz.txt"""'], {}), "('/vsis3/no_useless_requests/baz.txt')\n", (118122, 118160), False, 'from osgeo import gdal\n'), ((118440, 118493), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/random_write/test.bin"""', '"""w+b"""'], {}), "('/vsis3/random_write/test.bin', 'w+b')\n", (118454, 118493), False, 'from osgeo import gdal\n'), ((119070, 119088), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (119085, 119088), False, 'from osgeo import gdal\n'), ((119653, 119677), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (119675, 119677), False, 'import gdaltest\n'), ((120050, 120103), 'gdaltest.config_option', 'gdaltest.config_option', (['"""VSIS3_CHUNK_SIZE_BYTES"""', '"""1"""'], {}), "('VSIS3_CHUNK_SIZE_BYTES', '1')\n", (120072, 120103), False, 'import gdaltest\n'), ((120121, 120174), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/random_write/test.bin"""', '"""w+b"""'], {}), "('/vsis3/random_write/test.bin', 'w+b')\n", (120135, 120174), False, 'from osgeo import gdal\n'), ((120411, 120435), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (120433, 120435), False, 'import gdaltest\n'), ((121123, 121162), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (121153, 121162), False, 'import webserver\n'), ((128700, 128724), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (128722, 128724), False, 'import gdaltest\n'), ((128834, 128856), 'osgeo.gdal.GetLastErrorMsg', 'gdal.GetLastErrorMsg', ([], {}), '()\n', (128854, 128856), False, 'from osgeo import gdal\n'), ((136537, 136561), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (136559, 136561), False, 'import gdaltest\n'), ((137492, 137531), 'osgeo.gdal.GetConfigOption', 'gdal.GetConfigOption', (['"""USERPROFILE"""', '""""""'], {}), "('USERPROFILE', '')\n", (137512, 137531), False, 'from osgeo import gdal\n'), ((137717, 137762), 'osgeo.gdal.GetConfigOption', 'gdal.GetConfigOption', (['"""AWS_SECRET_ACCESS_KEY"""'], {}), "('AWS_SECRET_ACCESS_KEY')\n", (137737, 137762), False, 'from osgeo import gdal\n'), ((137784, 137828), 'pytest.skip', 'pytest.skip', (['"""Missing AWS_SECRET_ACCESS_KEY"""'], {}), "('Missing AWS_SECRET_ACCESS_KEY')\n", (137795, 137828), False, 'import pytest\n'), ((138166, 138192), 'stat.S_ISDIR', 'stat.S_ISDIR', (['statres.mode'], {}), '(statres.mode)\n', (138178, 138192), False, 'import stat\n'), ((140059, 140117), 'osgeo.gdal.Rename', 'gdal.Rename', (["(subpath + '/test.txt')", "(subpath + '/test2.txt')"], {}), "(subpath + '/test.txt', subpath + '/test2.txt')\n", (140070, 140117), False, 'from osgeo import gdal\n'), ((141302, 141326), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (141324, 141326), False, 'import gdaltest\n'), ((141340, 141363), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(1)', 'f'], {}), '(1, 1, f)\n', (141354, 141363), False, 'from osgeo import gdal\n'), ((141406, 141431), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (141429, 141431), False, 'from osgeo import gdal\n'), ((10779, 10802), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (10793, 10802), False, 'from osgeo import gdal\n'), ((11186, 11209), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (11200, 11209), False, 'from osgeo import gdal\n'), ((18181, 18204), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (18195, 18204), False, 'from osgeo import gdal\n'), ((18691, 18714), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (18705, 18714), False, 'from osgeo import gdal\n'), ((25341, 25365), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (25363, 25365), False, 'import gdaltest\n'), ((25571, 25589), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (25586, 25589), False, 'from osgeo import gdal\n'), ((27943, 27966), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(3)', 'f'], {}), '(1, 3, f)\n', (27957, 27966), False, 'from osgeo import gdal\n'), ((28564, 28587), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(3)', 'f'], {}), '(1, 3, f)\n', (28578, 28587), False, 'from osgeo import gdal\n'), ((39704, 39743), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (39734, 39743), False, 'import webserver\n'), ((39949, 39967), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (39964, 39967), False, 'from osgeo import gdal\n'), ((40172, 40211), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (40202, 40211), False, 'import webserver\n'), ((40483, 40522), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (40513, 40522), False, 'import webserver\n'), ((40860, 40899), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (40890, 40899), False, 'import webserver\n'), ((41105, 41123), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (41120, 41123), False, 'from osgeo import gdal\n'), ((42462, 42501), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (42492, 42501), False, 'import webserver\n'), ((42707, 42725), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (42722, 42725), False, 'from osgeo import gdal\n'), ((42854, 42893), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (42884, 42893), False, 'import webserver\n'), ((43099, 43117), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (43114, 43117), False, 'from osgeo import gdal\n'), ((53095, 53149), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_fake_bucket3/empty_file.bin"""'], {}), "('/vsis3/s3_fake_bucket3/empty_file.bin')\n", (53108, 53149), False, 'from osgeo import gdal\n'), ((54094, 54148), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_fake_bucket3/empty_file.bin"""'], {}), "('/vsis3/s3_fake_bucket3/empty_file.bin')\n", (54107, 54148), False, 'from osgeo import gdal\n'), ((55450, 55479), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (55477, 55479), False, 'import webserver\n'), ((55678, 55701), 'osgeo.gdal.VSIFSeekL', 'gdal.VSIFSeekL', (['f', '(0)', '(1)'], {}), '(f, 0, 1)\n', (55692, 55701), False, 'from osgeo import gdal\n'), ((55726, 55749), 'osgeo.gdal.VSIFSeekL', 'gdal.VSIFSeekL', (['f', '(0)', '(2)'], {}), '(f, 0, 2)\n', (55740, 55749), False, 'from osgeo import gdal\n'), ((55774, 55805), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['"""foo"""', '(1)', '(3)', 'f'], {}), "('foo', 1, 3, f)\n", (55789, 55805), False, 'from osgeo import gdal\n'), ((55894, 55925), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['"""bar"""', '(1)', '(3)', 'f'], {}), "('bar', 1, 3, f)\n", (55909, 55925), False, 'from osgeo import gdal\n'), ((56517, 56581), 'sys.stderr.write', 'sys.stderr.write', (["('Did not get expected content: %s\\n' % content)"], {}), "('Did not get expected content: %s\\n' % content)\n", (56533, 56581), False, 'import sys\n'), ((61153, 61182), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (61180, 61182), False, 'import webserver\n'), ((61319, 61350), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['"""foo"""', '(1)', '(3)', 'f'], {}), "('foo', 1, 3, f)\n", (61334, 61350), False, 'from osgeo import gdal\n'), ((61939, 62003), 'sys.stderr.write', 'sys.stderr.write', (["('Did not get expected content: %s\\n' % content)"], {}), "('Did not get expected content: %s\\n' % content)\n", (61955, 62003), False, 'import sys\n'), ((62509, 62548), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (62539, 62548), False, 'import webserver\n'), ((62566, 62584), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (62581, 62584), False, 'from osgeo import gdal\n'), ((63180, 63232), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_delete_bucket/delete_file"""'], {}), "('/vsis3/s3_delete_bucket/delete_file')\n", (63193, 63232), False, 'from osgeo import gdal\n'), ((63353, 63405), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_delete_bucket/delete_file"""'], {}), "('/vsis3/s3_delete_bucket/delete_file')\n", (63366, 63405), False, 'from osgeo import gdal\n'), ((71626, 71684), 'osgeo.gdal.RmdirRecursive', 'gdal.RmdirRecursive', (['"""/vsis3/test_rmdir_recursive/somedir"""'], {}), "('/vsis3/test_rmdir_recursive/somedir')\n", (71645, 71684), False, 'from osgeo import gdal\n'), ((72012, 72041), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (72039, 72041), False, 'import webserver\n'), ((77076, 77123), 'gdaltest.config_option', 'gdaltest.config_option', (['"""VSIS3_CHUNK_SIZE"""', '"""1"""'], {}), "('VSIS3_CHUNK_SIZE', '1')\n", (77098, 77123), False, 'import gdaltest\n'), ((77153, 77183), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['filename', '"""wb"""'], {}), "(filename, 'wb')\n", (77167, 77183), False, 'from osgeo import gdal\n'), ((77234, 77258), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (77256, 77258), False, 'import gdaltest\n'), ((77282, 77321), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['big_buffer', '(1)', 'size', 'f'], {}), '(big_buffer, 1, size, f)\n', (77297, 77321), False, 'from osgeo import gdal\n'), ((77430, 77452), 'osgeo.gdal.GetLastErrorMsg', 'gdal.GetLastErrorMsg', ([], {}), '()\n', (77450, 77452), False, 'from osgeo import gdal\n'), ((78664, 78711), 'gdaltest.config_option', 'gdaltest.config_option', (['"""VSIS3_CHUNK_SIZE"""', '"""1"""'], {}), "('VSIS3_CHUNK_SIZE', '1')\n", (78686, 78711), False, 'import gdaltest\n'), ((78741, 78771), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['filename', '"""wb"""'], {}), "(filename, 'wb')\n", (78755, 78771), False, 'from osgeo import gdal\n'), ((78832, 78856), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (78854, 78856), False, 'import gdaltest\n'), ((78880, 78919), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['big_buffer', '(1)', 'size', 'f'], {}), '(big_buffer, 1, size, f)\n', (78895, 78919), False, 'from osgeo import gdal\n'), ((79038, 79060), 'osgeo.gdal.GetLastErrorMsg', 'gdal.GetLastErrorMsg', ([], {}), '()\n', (79058, 79060), False, 'from osgeo import gdal\n'), ((81512, 81536), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (81534, 81536), False, 'import gdaltest\n'), ((81554, 81572), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (81569, 81572), False, 'from osgeo import gdal\n'), ((81592, 81614), 'osgeo.gdal.GetLastErrorMsg', 'gdal.GetLastErrorMsg', ([], {}), '()\n', (81612, 81614), False, 'from osgeo import gdal\n'), ((82159, 82220), 'osgeo.gdal.VSIFOpenL', 'gdal.VSIFOpenL', (['"""/vsis3/s3_fake_bucket4/large_file.bin"""', '"""wb"""'], {}), "('/vsis3/s3_fake_bucket4/large_file.bin', 'wb')\n", (82173, 82220), False, 'from osgeo import gdal\n'), ((83231, 83270), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (83261, 83270), False, 'import webserver\n'), ((83294, 83333), 'osgeo.gdal.VSIFWriteL', 'gdal.VSIFWriteL', (['big_buffer', '(1)', 'size', 'f'], {}), '(big_buffer, 1, size, f)\n', (83309, 83333), False, 'from osgeo import gdal\n'), ((83958, 83997), 'webserver.install_http_handler', 'webserver.install_http_handler', (['handler'], {}), '(handler)\n', (83988, 83997), False, 'import webserver\n'), ((84015, 84033), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (84030, 84033), False, 'from osgeo import gdal\n'), ((87580, 87641), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_bucket_test_dir_stat/test_dir_stat"""'], {}), "('/vsis3/s3_bucket_test_dir_stat/test_dir_stat')\n", (87593, 87641), False, 'from osgeo import gdal\n'), ((89382, 89446), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_bucket_test_dir_stat_2/test_dir_stat/"""'], {}), "('/vsis3/s3_bucket_test_dir_stat_2/test_dir_stat/')\n", (89395, 89446), False, 'from osgeo import gdal\n'), ((91689, 91726), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/vsis3_8/test/"""'], {}), "('/vsis3/vsis3_8/test/')\n", (91702, 91726), False, 'from osgeo import gdal\n'), ((92179, 92235), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/i_do/not/exist"""', '"""/vsis3/"""'], {'options': 'options'}), "('/i_do/not/exist', '/vsis3/', options=options)\n", (92188, 92235), False, 'from osgeo import gdal\n'), ((92582, 92645), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""vsifile.py"""', '"""/vsis3/do_not/exist"""'], {'options': 'options'}), "('vsifile.py', '/vsis3/do_not/exist', options=options)\n", (92591, 92645), False, 'from osgeo import gdal\n'), ((113753, 113777), 'gdaltest.error_handler', 'gdaltest.error_handler', ([], {}), '()\n', (113775, 113777), False, 'import gdaltest\n'), ((118764, 118787), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(3)', '(1)', 'f'], {}), '(3, 1, f)\n', (118778, 118787), False, 'from osgeo import gdal\n'), ((119698, 119716), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (119713, 119716), False, 'from osgeo import gdal\n'), ((120460, 120478), 'osgeo.gdal.VSIFCloseL', 'gdal.VSIFCloseL', (['f'], {}), '(f)\n', (120475, 120478), False, 'from osgeo import gdal\n'), ((122497, 122520), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (122511, 122520), False, 'from osgeo import gdal\n'), ((123720, 123743), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (123734, 123743), False, 'from osgeo import gdal\n'), ((125329, 125352), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (125343, 125352), False, 'from osgeo import gdal\n'), ((127163, 127186), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (127177, 127186), False, 'from osgeo import gdal\n'), ((128878, 128901), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (128892, 128901), False, 'from osgeo import gdal\n'), ((130952, 130975), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (130966, 130975), False, 'from osgeo import gdal\n'), ((131419, 131442), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (131433, 131442), False, 'from osgeo import gdal\n'), ((133418, 133441), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (133432, 133441), False, 'from osgeo import gdal\n'), ((135984, 136007), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (135998, 136007), False, 'from osgeo import gdal\n'), ((137842, 137883), 'osgeo.gdal.GetConfigOption', 'gdal.GetConfigOption', (['"""AWS_ACCESS_KEY_ID"""'], {}), "('AWS_ACCESS_KEY_ID')\n", (137862, 137883), False, 'from osgeo import gdal\n'), ((137905, 137945), 'pytest.skip', 'pytest.skip', (['"""Missing AWS_ACCESS_KEY_ID"""'], {}), "('Missing AWS_ACCESS_KEY_ID')\n", (137916, 137945), False, 'import pytest\n'), ((139945, 139968), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(5)', 'f'], {}), '(1, 5, f)\n', (139959, 139968), False, 'from osgeo import gdal\n'), ((140225, 140248), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(5)', 'f'], {}), '(1, 5, f)\n', (140239, 140248), False, 'from osgeo import gdal\n'), ((141528, 141563), 'osgeo.gdal.GetConfigOption', 'gdal.GetConfigOption', (['"""S3_RESOURCE"""'], {}), "('S3_RESOURCE')\n", (141548, 141563), False, 'from osgeo import gdal\n'), ((6158, 6183), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (6181, 6183), False, 'from osgeo import gdal\n'), ((6356, 6381), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (6379, 6381), False, 'from osgeo import gdal\n'), ((6652, 6677), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (6675, 6677), False, 'from osgeo import gdal\n'), ((13039, 13062), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (13053, 13062), False, 'from osgeo import gdal\n'), ((19923, 19948), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (19946, 19948), False, 'from osgeo import gdal\n'), ((20576, 20601), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (20599, 20601), False, 'from osgeo import gdal\n'), ((21232, 21257), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (21255, 21257), False, 'from osgeo import gdal\n'), ((22000, 22025), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (22023, 22025), False, 'from osgeo import gdal\n'), ((22738, 22763), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (22761, 22763), False, 'from osgeo import gdal\n'), ((23424, 23449), 'osgeo.gdal.VSIGetLastErrorMsg', 'gdal.VSIGetLastErrorMsg', ([], {}), '()\n', (23447, 23449), False, 'from osgeo import gdal\n'), ((40236, 40282), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_non_cached/test.txt"""'], {}), "('/vsis3/s3_non_cached/test.txt')\n", (40249, 40282), False, 'from osgeo import gdal\n'), ((40547, 40593), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/s3_non_cached/test.txt"""'], {}), "('/vsis3/s3_non_cached/test.txt')\n", (40560, 40593), False, 'from osgeo import gdal\n'), ((40696, 40713), 'pytest.fail', 'pytest.fail', (['data'], {}), '(data)\n', (40707, 40713), False, 'import pytest\n'), ((55632, 55649), 'osgeo.gdal.VSIFTellL', 'gdal.VSIFTellL', (['f'], {}), '(f)\n', (55646, 55649), False, 'from osgeo import gdal\n'), ((55848, 55865), 'osgeo.gdal.VSIFTellL', 'gdal.VSIFTellL', (['f'], {}), '(f)\n', (55862, 55865), False, 'from osgeo import gdal\n'), ((59895, 59959), 'sys.stderr.write', 'sys.stderr.write', (["('Did not get expected content: %s\\n' % content)"], {}), "('Did not get expected content: %s\\n' % content)\n", (59911, 59959), False, 'import sys\n'), ((82107, 82136), 'webserver.SequentialHandler', 'webserver.SequentialHandler', ([], {}), '()\n', (82134, 82136), False, 'import webserver\n'), ((91523, 91559), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['"""/vsis3/vsis3_8/test"""'], {}), "('/vsis3/vsis3_8/test')\n", (91536, 91559), False, 'from osgeo import gdal\n'), ((113806, 113900), 'osgeo.gdal.Sync', 'gdal.Sync', (['"""/vsimem/test"""', '"""/vsis3/test_bucket"""'], {'options': "['NUM_THREADS=1', 'CHUNK_SIZE=3']"}), "('/vsimem/test', '/vsis3/test_bucket', options=['NUM_THREADS=1',\n 'CHUNK_SIZE=3'])\n", (113815, 113900), False, 'from osgeo import gdal\n'), ((121181, 121210), 'osgeo.gdal.GetDriverByName', 'gdal.GetDriverByName', (['"""GTiff"""'], {}), "('GTiff')\n", (121201, 121210), False, 'from osgeo import gdal\n'), ((138495, 138517), 'osgeo.gdal.VSIStatL', 'gdal.VSIStatL', (['subpath'], {}), '(subpath)\n', (138508, 138517), False, 'from osgeo import gdal\n'), ((25515, 25538), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(3)', 'f'], {}), '(1, 3, f)\n', (25529, 25538), False, 'from osgeo import gdal\n'), ((39893, 39916), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(3)', 'f'], {}), '(1, 3, f)\n', (39907, 39916), False, 'from osgeo import gdal\n'), ((41049, 41072), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (41063, 41072), False, 'from osgeo import gdal\n'), ((42651, 42674), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(3)', 'f'], {}), '(1, 3, f)\n', (42665, 42674), False, 'from osgeo import gdal\n'), ((43043, 43066), 'osgeo.gdal.VSIFReadL', 'gdal.VSIFReadL', (['(1)', '(4)', 'f'], {}), '(1, 4, f)\n', (43057, 43066), False, 'from osgeo import gdal\n')] |
bugra-yilmaz/adventofcode2021 | day06/part1.py | 136cb1d4fba42af4eea934a73714c93710c8741e | import os.path
from collections import Counter
import pytest
INPUT_TXT = os.path.join(os.path.dirname(__file__), 'input.txt')
def compute(s: str) -> int:
lines = s.splitlines()
numbers = Counter(int(f) for f in lines[0].split(","))
for d in range(80):
numbers2 = Counter({8: numbers[0], 6: numbers[0]})
for k, v in numbers.items():
if k >= 1:
numbers2[k - 1] += v
numbers = numbers2
return sum(numbers.values())
INPUT_S = '''\
3,4,3,1,2
'''
EXPECTED = 5934
@pytest.mark.parametrize(
('input_s', 'expected'),
(
(INPUT_S, EXPECTED),
),
)
def test(input_s: str, expected: int) -> None:
assert compute(input_s) == expected
def main() -> int:
with open(INPUT_TXT, "r") as f:
print(compute(f.read()))
return 0
if __name__ == '__main__':
raise SystemExit(main())
| [((535, 607), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('input_s', 'expected')", '((INPUT_S, EXPECTED),)'], {}), "(('input_s', 'expected'), ((INPUT_S, EXPECTED),))\n", (558, 607), False, 'import pytest\n'), ((288, 331), 'collections.Counter', 'Counter', (['{(8): numbers[0], (6): numbers[0]}'], {}), '({(8): numbers[0], (6): numbers[0]})\n', (295, 331), False, 'from collections import Counter\n')] |
gustavomazevedo/tbackup-client | functional_tests.py | eb2fdf75eff7abf17c9bce12920de793ba760f61 | from selenium import webdriver
browser = webdriver.Firefox()
browser.get('http://localhost:8000')
assert 'Django' in browser.title | [((42, 61), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (59, 61), False, 'from selenium import webdriver\n')] |
clarkfitzg/sta141c | examples/first_char_last_column.py | 129704ba0952a4b80f9b093dcfa49f49f37b052d | #!/usr/bin/env python3
"""
For the last column, print only the first character.
Usage:
$ printf "100,200\n0,\n" | python3 first_char_last_column.py
Should print "100,2\n0,"
"""
import csv
from sys import stdin, stdout
def main():
reader = csv.reader(stdin)
writer = csv.writer(stdout)
for row in reader:
try:
row[-1] = row[-1][0]
except IndexError:
# Python: Better to ask forgiveness than permission
# Alternative: Look before you leap
pass
writer.writerow(row)
if __name__ == "__main__":
main()
| [((252, 269), 'csv.reader', 'csv.reader', (['stdin'], {}), '(stdin)\n', (262, 269), False, 'import csv\n'), ((283, 301), 'csv.writer', 'csv.writer', (['stdout'], {}), '(stdout)\n', (293, 301), False, 'import csv\n')] |
reloadware/stickybeak | env_ci.py | 8ac52a80849a3098fb6b2f47115970a734a73c14 | from pathlib import Path
root = Path(__file__).parent.absolute()
import envo
envo.add_source_roots([root])
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
from envo import Env, Namespace, env_var, logger, run
from env_comm import StickybeakCommEnv as ParentEnv
p = Namespace("p")
class StickybeakCiEnv(ParentEnv):
class Meta(ParentEnv.Meta):
stage: str = "ci"
emoji: str = "⚙"
load_env_vars = True
class Environ(ParentEnv.Environ):
pypi_username: Optional[str] = env_var(raw=True)
pypi_password: Optional[str] = env_var(raw=True)
e: Environ
def init(self) -> None:
super().init()
@p.command
def bootstrap(self, test_apps=True) -> None:
super().bootstrap(test_apps)
@p.command
def test(self) -> None:
run("pytest --reruns 2 -v tests")
@p.command
def build(self) -> None:
run("poetry build")
@p.command
def publish(self) -> None:
run(f'poetry publish --username "{self.e.pypi_username}" --password "{self.e.pypi_password}"', verbose=False)
@p.command
def rstcheck(self) -> None:
pass
# run("rstcheck README.rst | tee ./workspace/rstcheck.txt")
@p.command
def flake(self) -> None:
pass
# run("flake8 . | tee ./workspace/flake8.txt")
@p.command
def check_black(self) -> None:
run("black --check .")
@p.command
def check_isort(self) -> None:
run("black --check .")
@p.command
def mypy(self) -> None:
pass
run("mypy .")
@p.command
def generate_version(self) -> None:
import toml
config = toml.load(str(self.meta.root / "pyproject.toml"))
version: str = config["tool"]["poetry"]["version"]
version_file = self.meta.root / "stickybeak/__version__.py"
Path(version_file).touch()
version_file.write_text(f'__version__ = "{version}"\n')
ThisEnv = StickybeakCiEnv
| [((80, 109), 'envo.add_source_roots', 'envo.add_source_roots', (['[root]'], {}), '([root])\n', (101, 109), False, 'import envo\n'), ((301, 315), 'envo.Namespace', 'Namespace', (['"""p"""'], {}), "('p')\n", (310, 315), False, 'from envo import Env, Namespace, env_var, logger, run\n'), ((542, 559), 'envo.env_var', 'env_var', ([], {'raw': '(True)'}), '(raw=True)\n', (549, 559), False, 'from envo import Env, Namespace, env_var, logger, run\n'), ((599, 616), 'envo.env_var', 'env_var', ([], {'raw': '(True)'}), '(raw=True)\n', (606, 616), False, 'from envo import Env, Namespace, env_var, logger, run\n'), ((839, 872), 'envo.run', 'run', (['"""pytest --reruns 2 -v tests"""'], {}), "('pytest --reruns 2 -v tests')\n", (842, 872), False, 'from envo import Env, Namespace, env_var, logger, run\n'), ((926, 945), 'envo.run', 'run', (['"""poetry build"""'], {}), "('poetry build')\n", (929, 945), False, 'from envo import Env, Namespace, env_var, logger, run\n'), ((1001, 1115), 'envo.run', 'run', (['f"""poetry publish --username "{self.e.pypi_username}" --password "{self.e.pypi_password}\\""""'], {'verbose': '(False)'}), '(f\'poetry publish --username "{self.e.pypi_username}" --password "{self.e.pypi_password}"\'\n , verbose=False)\n', (1004, 1115), False, 'from envo import Env, Namespace, env_var, logger, run\n'), ((1412, 1434), 'envo.run', 'run', (['"""black --check ."""'], {}), "('black --check .')\n", (1415, 1434), False, 'from envo import Env, Namespace, env_var, logger, run\n'), ((1494, 1516), 'envo.run', 'run', (['"""black --check ."""'], {}), "('black --check .')\n", (1497, 1516), False, 'from envo import Env, Namespace, env_var, logger, run\n'), ((1582, 1595), 'envo.run', 'run', (['"""mypy ."""'], {}), "('mypy .')\n", (1585, 1595), False, 'from envo import Env, Namespace, env_var, logger, run\n'), ((33, 47), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (37, 47), False, 'from pathlib import Path\n'), ((1876, 1894), 'pathlib.Path', 'Path', (['version_file'], {}), '(version_file)\n', (1880, 1894), False, 'from pathlib import Path\n')] |
iyedb/boost_asio_zeromq | zmq_srv.py | 63110c18540c8303ac29d574f25cba234a00a22d | from __future__ import print_function
import zmq
import time
ADDR='tcp://127.0.0.1:11155'
ctx = zmq.Context()
srv = ctx.socket(zmq.REP)
srv.bind(ADDR)
#srv.setsockopt(zmq.RCVTIMEO, 3000);
while True:
try:
msg = srv.recv()
except Exception as e:
print('zmq socket revc timedout:', e)
else:
print('client says: %s' % msg)
srv.send('hi from server')
time.sleep(2)
| [((98, 111), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (109, 111), False, 'import zmq\n'), ((380, 393), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (390, 393), False, 'import time\n')] |
mmaryada27/mypy | mypy/server/aststrip.py | 39103273d705fe45a55c4879779a0d5567f01876 | """Strip/reset AST in-place to match state after semantic analysis pass 1.
Fine-grained incremental mode reruns semantic analysis (passes 2 and 3)
and type checking for *existing* AST nodes (targets) when changes are
propagated using fine-grained dependencies. AST nodes attributes are
often changed during semantic analysis passes 2 and 3, and running
semantic analysis again on those nodes would produce incorrect
results, since these passes aren't idempotent. This pass resets AST
nodes to reflect the state after semantic analysis pass 1, so that we
can rerun semantic analysis.
(The above is in contrast to behavior with modules that have source code
changes, for which we reparse the entire module and reconstruct a fresh
AST. No stripping is required in this case. Both modes of operation should
have the same outcome.)
Notes:
* This is currently pretty fragile, as we must carefully undo whatever
changes can be made in semantic analysis passes 2 and 3, including changes
to symbol tables.
* We reuse existing AST nodes because it makes it relatively straightforward
to reprocess only a single target within a module efficiently. If there
was a way to parse a single target within a file, in time proportional to
the size of the target, we'd rather create fresh AST nodes than strip them.
Alas, no such facility exists and building it is non-trivial.
* Currently we don't actually reset all changes, but only those known to affect
non-idempotent semantic analysis behavior.
TODO: It would be more principled and less fragile to reset everything
changed in semantic analysis pass 2 and later.
* Reprocessing may recreate AST nodes (such as Var nodes, and TypeInfo nodes
created with assignment statements) that will get different identities from
the original AST. Thus running an AST merge is necessary after stripping,
even though some identities are preserved.
"""
import contextlib
from typing import Union, Iterator, Optional
from mypy.nodes import (
Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt,
ImportFrom, Import, TypeInfo, SymbolTable, Var, CallExpr, Decorator, OverloadedFuncDef,
SuperExpr, UNBOUND_IMPORTED, GDEF, MDEF, IndexExpr
)
from mypy.traverser import TraverserVisitor
def strip_target(node: Union[MypyFile, FuncItem, OverloadedFuncDef]) -> None:
"""Reset a fine-grained incremental target to state after semantic analysis pass 1.
NOTE: Currently we opportunistically only reset changes that are known to otherwise
cause trouble.
"""
visitor = NodeStripVisitor()
if isinstance(node, MypyFile):
visitor.strip_file_top_level(node)
else:
node.accept(visitor)
class NodeStripVisitor(TraverserVisitor):
def __init__(self) -> None:
self.type = None # type: Optional[TypeInfo]
self.names = None # type: Optional[SymbolTable]
self.is_class_body = False
# By default, process function definitions. If False, don't -- this is used for
# processing module top levels.
self.recurse_into_functions = True
def strip_file_top_level(self, file_node: MypyFile) -> None:
"""Strip a module top-level (don't recursive into functions)."""
self.names = file_node.names
self.recurse_into_functions = False
file_node.accept(self)
def visit_class_def(self, node: ClassDef) -> None:
"""Strip class body and type info, but don't strip methods."""
node.info.type_vars = []
node.info.bases = []
node.info.abstract_attributes = []
node.info.mro = []
node.info.add_type_vars()
node.info.tuple_type = None
node.info.typeddict_type = None
node.info._cache = set()
node.info._cache_proper = set()
node.base_type_exprs.extend(node.removed_base_type_exprs)
node.removed_base_type_exprs = []
with self.enter_class(node.info):
super().visit_class_def(node)
def visit_func_def(self, node: FuncDef) -> None:
if not self.recurse_into_functions:
return
node.expanded = []
node.type = node.unanalyzed_type
with self.enter_method(node.info) if node.info else nothing():
super().visit_func_def(node)
def visit_decorator(self, node: Decorator) -> None:
node.var.type = None
for expr in node.decorators:
expr.accept(self)
if self.recurse_into_functions:
node.func.accept(self)
def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> None:
if not self.recurse_into_functions:
return
if node.impl:
# Revert change made during semantic analysis pass 2.
assert node.items[-1] is not node.impl
node.items.append(node.impl)
super().visit_overloaded_func_def(node)
@contextlib.contextmanager
def enter_class(self, info: TypeInfo) -> Iterator[None]:
# TODO: Update and restore self.names
old_type = self.type
old_is_class_body = self.is_class_body
self.type = info
self.is_class_body = True
yield
self.type = old_type
self.is_class_body = old_is_class_body
@contextlib.contextmanager
def enter_method(self, info: TypeInfo) -> Iterator[None]:
# TODO: Update and restore self.names
old_type = self.type
old_is_class_body = self.is_class_body
self.type = info
self.is_class_body = False
yield
self.type = old_type
self.is_class_body = old_is_class_body
def visit_assignment_stmt(self, node: AssignmentStmt) -> None:
node.type = node.unanalyzed_type
if self.type and not self.is_class_body:
# TODO: Handle multiple assignment
if len(node.lvalues) == 1:
lvalue = node.lvalues[0]
if isinstance(lvalue, MemberExpr) and lvalue.is_new_def:
# Remove defined attribute from the class symbol table. If is_new_def is
# true for a MemberExpr, we know that it must be an assignment through
# self, since only those can define new attributes.
del self.type.names[lvalue.name]
super().visit_assignment_stmt(node)
def visit_import_from(self, node: ImportFrom) -> None:
if node.assignments:
node.assignments = []
else:
if self.names:
# Reset entries in the symbol table. This is necessary since
# otherwise the semantic analyzer will think that the import
# assigns to an existing name instead of defining a new one.
for name, as_name in node.names:
imported_name = as_name or name
symnode = self.names[imported_name]
symnode.kind = UNBOUND_IMPORTED
symnode.node = None
def visit_import(self, node: Import) -> None:
if node.assignments:
node.assignments = []
else:
if self.names:
# Reset entries in the symbol table. This is necessary since
# otherwise the semantic analyzer will think that the import
# assigns to an existing name instead of defining a new one.
for name, as_name in node.ids:
imported_name = as_name or name
initial = imported_name.split('.')[0]
symnode = self.names[initial]
symnode.kind = UNBOUND_IMPORTED
symnode.node = None
def visit_name_expr(self, node: NameExpr) -> None:
# Global assignments are processed in semantic analysis pass 1, and we
# only want to strip changes made in passes 2 or later.
if not (node.kind == GDEF and node.is_new_def):
# Remove defined attributes so that they can recreated during semantic analysis.
if node.kind == MDEF and node.is_new_def:
self.strip_class_attr(node.name)
self.strip_ref_expr(node)
def visit_member_expr(self, node: MemberExpr) -> None:
self.strip_ref_expr(node)
# These need to cleared for member expressions but not for other RefExprs since
# these can change based on changed in a base class.
node.is_new_def = False
node.is_inferred_def = False
if self.is_duplicate_attribute_def(node):
# This is marked as an instance variable definition but a base class
# defines an attribute with the same name, and we can't have
# multiple definitions for an attribute. Defer to the base class
# definition.
self.strip_class_attr(node.name)
node.def_var = None
super().visit_member_expr(node)
def visit_index_expr(self, node: IndexExpr) -> None:
node.analyzed = None # was a type alias
super().visit_index_expr(node)
def strip_class_attr(self, name: str) -> None:
if self.type is not None:
del self.type.names[name]
def is_duplicate_attribute_def(self, node: MemberExpr) -> bool:
if not node.is_inferred_def:
return False
assert self.type is not None, "Internal error: Member defined outside class"
if node.name not in self.type.names:
return False
return any(info.get(node.name) is not None for info in self.type.mro[1:])
def strip_ref_expr(self, node: RefExpr) -> None:
node.kind = None
node.node = None
node.fullname = None
node.is_new_def = False
node.is_inferred_def = False
def visit_call_expr(self, node: CallExpr) -> None:
node.analyzed = None
super().visit_call_expr(node)
def visit_super_expr(self, node: SuperExpr) -> None:
node.info = None
super().visit_super_expr(node)
# TODO: handle more node types
def is_self_member_ref(memberexpr: MemberExpr) -> bool:
"""Does memberexpr refer to an attribute of self?"""
# TODO: Merge with is_self_member_ref in semanal.py.
if not isinstance(memberexpr.expr, NameExpr):
return False
node = memberexpr.expr.node
return isinstance(node, Var) and node.is_self
@contextlib.contextmanager
def nothing() -> Iterator[None]:
yield
| [] |
BreederBai/rt-thread | bsp/nrf5x/tools/sdk_dist.py | 53ed0314982556dfa9c5db75d4f3e02485d16ab5 | import os
import sys
import shutil
cwd_path = os.getcwd()
sys.path.append(os.path.join(os.path.dirname(cwd_path), 'rt-thread', 'tools'))
# BSP dist function
def dist_do_building(BSP_ROOT, dist_dir):
from mkdist import bsp_copy_files
import rtconfig
library_dir = os.path.join(dist_dir, 'libraries')
print("=> copy nrf52 bsp libraries")
library_path = os.path.join(os.path.dirname(BSP_ROOT), 'libraries')
bsp_copy_files(library_path, library_dir)
| [((46, 57), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (55, 57), False, 'import os\n'), ((278, 313), 'os.path.join', 'os.path.join', (['dist_dir', '"""libraries"""'], {}), "(dist_dir, 'libraries')\n", (290, 313), False, 'import os\n'), ((433, 474), 'mkdist.bsp_copy_files', 'bsp_copy_files', (['library_path', 'library_dir'], {}), '(library_path, library_dir)\n', (447, 474), False, 'from mkdist import bsp_copy_files\n'), ((87, 112), 'os.path.dirname', 'os.path.dirname', (['cwd_path'], {}), '(cwd_path)\n', (102, 112), False, 'import os\n'), ((388, 413), 'os.path.dirname', 'os.path.dirname', (['BSP_ROOT'], {}), '(BSP_ROOT)\n', (403, 413), False, 'import os\n')] |
kkauder/spack | lib/spack/spack/multimethod.py | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module contains utilities for using multi-methods in
spack. You can think of multi-methods like overloaded methods --
they're methods with the same name, and we need to select a version
of the method based on some criteria. e.g., for overloaded
methods, you would select a version of the method to call based on
the types of its arguments.
In spack, multi-methods are used to ease the life of package
authors. They allow methods like install() (or other methods
called by install()) to declare multiple versions to be called when
the package is instantiated with different specs. e.g., if the
package is built with OpenMPI on x86_64,, you might want to call a
different install method than if it was built for mpich2 on
BlueGene/Q. Likewise, you might want to do a different type of
install for different versions of the package.
Multi-methods provide a simple decorator-based syntax for this that
avoids overly complicated rat nests of if statements. Obviously,
depending on the scenario, regular old conditionals might be clearer,
so package authors should use their judgement.
"""
import functools
import inspect
from llnl.util.lang import caller_locals
import spack.architecture
import spack.error
from spack.spec import Spec
class MultiMethodMeta(type):
"""This allows us to track the class's dict during instantiation."""
#: saved dictionary of attrs on the class being constructed
_locals = None
@classmethod
def __prepare__(cls, name, bases, **kwargs):
"""Save the dictionary that will be used for the class namespace."""
MultiMethodMeta._locals = dict()
return MultiMethodMeta._locals
def __init__(cls, name, bases, attr_dict):
"""Clear out the cached locals dict once the class is built."""
MultiMethodMeta._locals = None
super(MultiMethodMeta, cls).__init__(name, bases, attr_dict)
class SpecMultiMethod(object):
"""This implements a multi-method for Spack specs. Packages are
instantiated with a particular spec, and you may want to
execute different versions of methods based on what the spec
looks like. For example, you might want to call a different
version of install() for one platform than you call on another.
The SpecMultiMethod class implements a callable object that
handles method dispatch. When it is called, it looks through
registered methods and their associated specs, and it tries
to find one that matches the package's spec. If it finds one
(and only one), it will call that method.
This is intended for use with decorators (see below). The
decorator (see docs below) creates SpecMultiMethods and
registers method versions with them.
To register a method, you can do something like this:
mm = SpecMultiMethod()
mm.register("^chaos_5_x86_64_ib", some_method)
The object registered needs to be a Spec or some string that
will parse to be a valid spec.
When the mm is actually called, it selects a version of the
method to call based on the sys_type of the object it is
called on.
See the docs for decorators below for more details.
"""
def __init__(self, default=None):
self.method_list = []
self.default = default
if default:
functools.update_wrapper(self, default)
def register(self, spec, method):
"""Register a version of a method for a particular spec."""
self.method_list.append((spec, method))
if not hasattr(self, '__name__'):
functools.update_wrapper(self, method)
else:
assert(self.__name__ == method.__name__)
def __get__(self, obj, objtype):
"""This makes __call__ support instance methods."""
# Method_list is a list of tuples (constraint, method)
# Here we are going to assume that we have at least one
# element in the list. The first registered function
# will be the one 'wrapped'.
wrapped_method = self.method_list[0][1]
# Call functools.wraps manually to get all the attributes
# we need to be disguised as the wrapped_method
func = functools.wraps(wrapped_method)(
functools.partial(self.__call__, obj)
)
return func
def _get_method_by_spec(self, spec):
"""Find the method of this SpecMultiMethod object that satisfies the
given spec, if one exists
"""
for condition, method in self.method_list:
if spec.satisfies(condition):
return method
return self.default or None
def __call__(self, package_self, *args, **kwargs):
"""Find the first method with a spec that matches the
package's spec. If none is found, call the default
or if there is none, then raise a NoSuchMethodError.
"""
spec_method = self._get_method_by_spec(package_self.spec)
if spec_method:
return spec_method(package_self, *args, **kwargs)
# Unwrap the MRO of `package_self by hand. Note that we can't
# use `super()` here, because using `super()` recursively
# requires us to know the class of `package_self`, as well as
# its superclasses for successive calls. We don't have that
# information within `SpecMultiMethod`, because it is not
# associated with the package class.
for cls in inspect.getmro(package_self.__class__)[1:]:
superself = cls.__dict__.get(self.__name__, None)
if isinstance(superself, SpecMultiMethod):
# Check parent multimethod for method for spec.
superself_method = superself._get_method_by_spec(
package_self.spec
)
if superself_method:
return superself_method(package_self, *args, **kwargs)
elif superself:
return superself(package_self, *args, **kwargs)
raise NoSuchMethodError(
type(package_self), self.__name__, package_self.spec,
[m[0] for m in self.method_list]
)
class when(object):
"""This annotation lets packages declare multiple versions of
methods like install() that depend on the package's spec.
For example:
.. code-block:: python
class SomePackage(Package):
...
def install(self, prefix):
# Do default install
@when('target=x86_64:')
def install(self, prefix):
# This will be executed instead of the default install if
# the package's target is in the x86_64 family.
@when('target=ppc64:')
def install(self, prefix):
# This will be executed if the package's target is in
# the ppc64 family
This allows each package to have a default version of install() AND
specialized versions for particular platforms. The version that is
called depends on the architecutre of the instantiated package.
Note that this works for methods other than install, as well. So,
if you only have part of the install that is platform specific, you
could do this:
.. code-block:: python
class SomePackage(Package):
...
# virtual dependence on MPI.
# could resolve to mpich, mpich2, OpenMPI
depends_on('mpi')
def setup(self):
# do nothing in the default case
pass
@when('^openmpi')
def setup(self):
# do something special when this is built with OpenMPI for
# its MPI implementations.
def install(self, prefix):
# Do common install stuff
self.setup()
# Do more common install stuff
Note that the default version of decorated methods must
*always* come first. Otherwise it will override all of the
platform-specific versions. There's not much we can do to get
around this because of the way decorators work.
"""
def __init__(self, condition):
if isinstance(condition, bool):
self.spec = Spec() if condition else None
else:
self.spec = Spec(condition)
def __call__(self, method):
# In Python 2, Get the first definition of the method in the
# calling scope by looking at the caller's locals. In Python 3,
# we handle this using MultiMethodMeta.__prepare__.
if MultiMethodMeta._locals is None:
MultiMethodMeta._locals = caller_locals()
# Create a multimethod with this name if there is not one already
original_method = MultiMethodMeta._locals.get(method.__name__)
if not type(original_method) == SpecMultiMethod:
original_method = SpecMultiMethod(original_method)
if self.spec is not None:
original_method.register(self.spec, method)
return original_method
class MultiMethodError(spack.error.SpackError):
"""Superclass for multimethod dispatch errors"""
def __init__(self, message):
super(MultiMethodError, self).__init__(message)
class NoSuchMethodError(spack.error.SpackError):
"""Raised when we can't find a version of a multi-method."""
def __init__(self, cls, method_name, spec, possible_specs):
super(NoSuchMethodError, self).__init__(
"Package %s does not support %s called with %s. Options are: %s"
% (cls.__name__, method_name, spec,
", ".join(str(s) for s in possible_specs)))
| [((3566, 3605), 'functools.update_wrapper', 'functools.update_wrapper', (['self', 'default'], {}), '(self, default)\n', (3590, 3605), False, 'import functools\n'), ((3816, 3854), 'functools.update_wrapper', 'functools.update_wrapper', (['self', 'method'], {}), '(self, method)\n', (3840, 3854), False, 'import functools\n'), ((4431, 4462), 'functools.wraps', 'functools.wraps', (['wrapped_method'], {}), '(wrapped_method)\n', (4446, 4462), False, 'import functools\n'), ((4476, 4513), 'functools.partial', 'functools.partial', (['self.__call__', 'obj'], {}), '(self.__call__, obj)\n', (4493, 4513), False, 'import functools\n'), ((5684, 5722), 'inspect.getmro', 'inspect.getmro', (['package_self.__class__'], {}), '(package_self.__class__)\n', (5698, 5722), False, 'import inspect\n'), ((8646, 8661), 'spack.spec.Spec', 'Spec', (['condition'], {}), '(condition)\n', (8650, 8661), False, 'from spack.spec import Spec\n'), ((8978, 8993), 'llnl.util.lang.caller_locals', 'caller_locals', ([], {}), '()\n', (8991, 8993), False, 'from llnl.util.lang import caller_locals\n'), ((8578, 8584), 'spack.spec.Spec', 'Spec', ([], {}), '()\n', (8582, 8584), False, 'from spack.spec import Spec\n')] |
meego-tablet-ux/meego-app-browser | third_party/protobuf/protobuf.gyp | 0f4ef17bd4b399c9c990a2f6ca939099495c2b9c | # Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'conditions': [
['OS!="win"', {
'variables': {
'config_h_dir':
'.', # crafted for gcc/linux.
},
}, { # else, OS=="win"
'variables': {
'config_h_dir':
'vsprojects', # crafted for msvc.
},
'target_defaults': {
'msvs_disabled_warnings': [
4018, # signed/unsigned mismatch in comparison
4244, # implicit conversion, possible loss of data
4355, # 'this' used in base member initializer list
],
'defines!': [
'WIN32_LEAN_AND_MEAN', # Protobuf defines this itself.
],
},
}]
],
'targets': [
# The "lite" lib is about 1/7th the size of the heavy lib,
# but it doesn't support some of the more exotic features of
# protobufs, like reflection. To generate C++ code that can link
# against the lite version of the library, add the option line:
#
# option optimize_for = LITE_RUNTIME;
#
# to your .proto file.
{
'target_name': 'protobuf_lite',
'type': '<(library)',
'toolsets': ['host', 'target'],
'sources': [
'src/google/protobuf/stubs/common.h',
'src/google/protobuf/stubs/once.h',
'src/google/protobuf/extension_set.h',
'src/google/protobuf/generated_message_util.h',
'src/google/protobuf/message_lite.h',
'src/google/protobuf/repeated_field.h',
'src/google/protobuf/unknown_field_set.cc',
'src/google/protobuf/unknown_field_set.h',
'src/google/protobuf/wire_format_lite.h',
'src/google/protobuf/wire_format_lite_inl.h',
'src/google/protobuf/io/coded_stream.h',
'src/google/protobuf/io/zero_copy_stream.h',
'src/google/protobuf/io/zero_copy_stream_impl_lite.h',
'src/google/protobuf/stubs/common.cc',
'src/google/protobuf/stubs/once.cc',
'src/google/protobuf/stubs/hash.h',
'src/google/protobuf/stubs/map-util.h',
'src/google/protobuf/stubs/stl_util-inl.h',
'src/google/protobuf/extension_set.cc',
'src/google/protobuf/generated_message_util.cc',
'src/google/protobuf/message_lite.cc',
'src/google/protobuf/repeated_field.cc',
'src/google/protobuf/wire_format_lite.cc',
'src/google/protobuf/io/coded_stream.cc',
'src/google/protobuf/io/coded_stream_inl.h',
'src/google/protobuf/io/zero_copy_stream.cc',
'src/google/protobuf/io/zero_copy_stream_impl_lite.cc',
'<(config_h_dir)/config.h',
],
'include_dirs': [
'<(config_h_dir)',
'src',
],
# This macro must be defined to suppress the use of dynamic_cast<>,
# which requires RTTI.
'defines': [
'GOOGLE_PROTOBUF_NO_RTTI',
],
'direct_dependent_settings': {
'include_dirs': [
'<(config_h_dir)',
'src',
],
'defines': [
'GOOGLE_PROTOBUF_NO_RTTI',
],
},
},
# This is the full, heavy protobuf lib that's needed for c++ .proto's
# that don't specify the LITE_RUNTIME option. The protocol
# compiler itself (protoc) falls into that category.
#
# DO NOT LINK AGAINST THIS TARGET IN CHROME CODE --agl
{
'target_name': 'protobuf_full_do_not_use',
'type': '<(library)',
'toolsets': ['host','target'],
'sources': [
'src/google/protobuf/descriptor.h',
'src/google/protobuf/descriptor.pb.h',
'src/google/protobuf/descriptor_database.h',
'src/google/protobuf/dynamic_message.h',
'src/google/protobuf/generated_message_reflection.h',
'src/google/protobuf/message.h',
'src/google/protobuf/reflection_ops.h',
'src/google/protobuf/service.h',
'src/google/protobuf/text_format.h',
'src/google/protobuf/unknown_field_set.h',
'src/google/protobuf/wire_format.h',
'src/google/protobuf/io/gzip_stream.h',
'src/google/protobuf/io/printer.h',
'src/google/protobuf/io/tokenizer.h',
'src/google/protobuf/io/zero_copy_stream_impl.h',
'src/google/protobuf/compiler/code_generator.h',
'src/google/protobuf/compiler/command_line_interface.h',
'src/google/protobuf/compiler/importer.h',
'src/google/protobuf/compiler/parser.h',
'src/google/protobuf/stubs/strutil.cc',
'src/google/protobuf/stubs/strutil.h',
'src/google/protobuf/stubs/substitute.cc',
'src/google/protobuf/stubs/substitute.h',
'src/google/protobuf/stubs/structurally_valid.cc',
'src/google/protobuf/descriptor.cc',
'src/google/protobuf/descriptor.pb.cc',
'src/google/protobuf/descriptor_database.cc',
'src/google/protobuf/dynamic_message.cc',
'src/google/protobuf/extension_set_heavy.cc',
'src/google/protobuf/generated_message_reflection.cc',
'src/google/protobuf/message.cc',
'src/google/protobuf/reflection_ops.cc',
'src/google/protobuf/service.cc',
'src/google/protobuf/text_format.cc',
'src/google/protobuf/unknown_field_set.cc',
'src/google/protobuf/wire_format.cc',
# This file pulls in zlib, but it's not actually used by protoc, so
# instead of compiling zlib for the host, let's just exclude this.
# 'src/src/google/protobuf/io/gzip_stream.cc',
'src/google/protobuf/io/printer.cc',
'src/google/protobuf/io/tokenizer.cc',
'src/google/protobuf/io/zero_copy_stream_impl.cc',
'src/google/protobuf/compiler/importer.cc',
'src/google/protobuf/compiler/parser.cc',
],
'dependencies': [
'protobuf_lite',
],
'export_dependent_settings': [
'protobuf_lite',
],
},
{
'target_name': 'protoc',
'type': 'executable',
'toolsets': ['host'],
'sources': [
'src/google/protobuf/compiler/code_generator.cc',
'src/google/protobuf/compiler/command_line_interface.cc',
'src/google/protobuf/compiler/plugin.cc',
'src/google/protobuf/compiler/plugin.pb.cc',
'src/google/protobuf/compiler/subprocess.cc',
'src/google/protobuf/compiler/subprocess.h',
'src/google/protobuf/compiler/zip_writer.cc',
'src/google/protobuf/compiler/zip_writer.h',
'src/google/protobuf/compiler/cpp/cpp_enum.cc',
'src/google/protobuf/compiler/cpp/cpp_enum.h',
'src/google/protobuf/compiler/cpp/cpp_enum_field.cc',
'src/google/protobuf/compiler/cpp/cpp_enum_field.h',
'src/google/protobuf/compiler/cpp/cpp_extension.cc',
'src/google/protobuf/compiler/cpp/cpp_extension.h',
'src/google/protobuf/compiler/cpp/cpp_field.cc',
'src/google/protobuf/compiler/cpp/cpp_field.h',
'src/google/protobuf/compiler/cpp/cpp_file.cc',
'src/google/protobuf/compiler/cpp/cpp_file.h',
'src/google/protobuf/compiler/cpp/cpp_generator.cc',
'src/google/protobuf/compiler/cpp/cpp_helpers.cc',
'src/google/protobuf/compiler/cpp/cpp_helpers.h',
'src/google/protobuf/compiler/cpp/cpp_message.cc',
'src/google/protobuf/compiler/cpp/cpp_message.h',
'src/google/protobuf/compiler/cpp/cpp_message_field.cc',
'src/google/protobuf/compiler/cpp/cpp_message_field.h',
'src/google/protobuf/compiler/cpp/cpp_primitive_field.cc',
'src/google/protobuf/compiler/cpp/cpp_primitive_field.h',
'src/google/protobuf/compiler/cpp/cpp_service.cc',
'src/google/protobuf/compiler/cpp/cpp_service.h',
'src/google/protobuf/compiler/cpp/cpp_string_field.cc',
'src/google/protobuf/compiler/cpp/cpp_string_field.h',
'src/google/protobuf/compiler/java/java_enum.cc',
'src/google/protobuf/compiler/java/java_enum.h',
'src/google/protobuf/compiler/java/java_enum_field.cc',
'src/google/protobuf/compiler/java/java_enum_field.h',
'src/google/protobuf/compiler/java/java_extension.cc',
'src/google/protobuf/compiler/java/java_extension.h',
'src/google/protobuf/compiler/java/java_field.cc',
'src/google/protobuf/compiler/java/java_field.h',
'src/google/protobuf/compiler/java/java_file.cc',
'src/google/protobuf/compiler/java/java_file.h',
'src/google/protobuf/compiler/java/java_generator.cc',
'src/google/protobuf/compiler/java/java_helpers.cc',
'src/google/protobuf/compiler/java/java_helpers.h',
'src/google/protobuf/compiler/java/java_message.cc',
'src/google/protobuf/compiler/java/java_message.h',
'src/google/protobuf/compiler/java/java_message_field.cc',
'src/google/protobuf/compiler/java/java_message_field.h',
'src/google/protobuf/compiler/java/java_primitive_field.cc',
'src/google/protobuf/compiler/java/java_primitive_field.h',
'src/google/protobuf/compiler/java/java_service.cc',
'src/google/protobuf/compiler/java/java_service.h',
'src/google/protobuf/compiler/java/java_string_field.cc',
'src/google/protobuf/compiler/java/java_string_field.h',
'src/google/protobuf/compiler/python/python_generator.cc',
'src/google/protobuf/compiler/main.cc',
],
'dependencies': [
'protobuf_full_do_not_use',
],
'include_dirs': [
'<(config_h_dir)',
'src/src',
],
},
{
# Generate the python module needed by all protoc-generated Python code.
'target_name': 'py_proto',
'type': 'none',
'copies': [
{
'destination': '<(PRODUCT_DIR)/pyproto/google/',
'files': [
# google/ module gets an empty __init__.py.
'__init__.py',
],
},
{
'destination': '<(PRODUCT_DIR)/pyproto/google/protobuf',
'files': [
'python/google/protobuf/__init__.py',
'python/google/protobuf/descriptor.py',
'python/google/protobuf/message.py',
'python/google/protobuf/reflection.py',
'python/google/protobuf/service.py',
'python/google/protobuf/service_reflection.py',
'python/google/protobuf/text_format.py',
# TODO(ncarter): protoc's python generator treats descriptor.proto
# specially, but it's not possible to trigger the special treatment
# unless you run protoc from ./src/src (the treatment is based
# on the path to the .proto file matching a constant exactly).
# I'm not sure how to convince gyp to execute a rule from a
# different directory. Until this is resolved, use a copy of
# descriptor_pb2.py that I manually generated.
'descriptor_pb2.py',
],
},
{
'destination': '<(PRODUCT_DIR)/pyproto/google/protobuf/internal',
'files': [
'python/google/protobuf/internal/__init__.py',
'python/google/protobuf/internal/api_implementation.py',
'python/google/protobuf/internal/containers.py',
'python/google/protobuf/internal/cpp_message.py',
'python/google/protobuf/internal/decoder.py',
'python/google/protobuf/internal/encoder.py',
'python/google/protobuf/internal/generator_test.py',
'python/google/protobuf/internal/message_listener.py',
'python/google/protobuf/internal/python_message.py',
'python/google/protobuf/internal/type_checkers.py',
'python/google/protobuf/internal/wire_format.py',
],
},
],
# # We can't generate a proper descriptor_pb2.py -- see earlier comment.
# 'rules': [
# {
# 'rule_name': 'genproto',
# 'extension': 'proto',
# 'inputs': [
# '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
# ],
# 'variables': {
# # The protoc compiler requires a proto_path argument with the
# # directory containing the .proto file.
# 'rule_input_relpath': 'src/google/protobuf',
# },
# 'outputs': [
# '<(PRODUCT_DIR)/pyproto/google/protobuf/<(RULE_INPUT_ROOT)_pb2.py',
# ],
# 'action': [
# '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
# '-I./src',
# '-I.',
# '--python_out=<(PRODUCT_DIR)/pyproto/google/protobuf',
# 'google/protobuf/descriptor.proto',
# ],
# 'message': 'Generating Python code from <(RULE_INPUT_PATH)',
# },
# ],
# 'dependencies': [
# 'protoc#host',
# ],
# 'sources': [
# 'src/google/protobuf/descriptor.proto',
# ],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
| [] |
Yash-s-Code-Camp/Python-Day-4 | main.py | 887c4e172905b2b0dea493a3c9c1f61e403556fc | # def mul(a):
# return lambda b:b*a
# singler = mul(1) # addition = lambda b:b*1
# doubler = mul(2) # addition = lambda b:b*2
# tripler = mul(3) # addition = lambda b:b*3
# print(doubler(7)) # 7*2 = 14
# print(tripler(7)) # 7*3 = 21
# print(singler(7)) # 7*1 = 7
class Student:
def __init__(self, fname):
self.fname = fname
def greet(self, fname):
return f"Hello, {fname}"
class BatchA(Student):
def __init__(self, lname):
self.lname = lname
#Student.__init__(self, "Nikunj")
super().__init__("Nikunj")
def printName(self):
return f"{self.fname} {self.lname}"
stud = BatchA("Thakor")
print(stud.printName())
rgb(255, 255, 255) # White
rgb(255, 0, 0) # Red
rgb(0, 0, 0) # Black
rgb(0, 255, 255) # Cyan
rgb(255, 255, 0) # Yellow
#00ff00 //green
#1e90ff //dodgerblue
| [] |
MarcoRosso/paperstream | paperstream/create_diary.py | f8d5485ea337334b036393f9566b74394b5dd234 | """
Create diaries in A5 and A4 sizes based on PDF templates.
Julio Vega
"""
import datetime
import math
import sys
from io import BytesIO
from pathlib import Path
from PyPDF2 import PdfFileReader, PdfFileWriter
from reportlab.lib.pagesizes import A5, A4
from reportlab.lib.utils import ImageReader
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFError, TTFont
from reportlab.pdfgen import canvas
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
base_path = getattr(sys, '_MEIPASS', Path(__file__).resolve().parent)
return base_path / Path(relative_path)
CORNER_DIR = resource_path("input/1_diaries_to_create/resources")
LOGO_PATH = resource_path(CORNER_DIR / Path("logo.png"))
DEFAULT_FONT = resource_path(CORNER_DIR / Path('FreeSansLocal.ttf'))
CREATED_DIARIES_DIR = resource_path("output/created_diaries/")
#############################################################
#############################################################
#############################################################
##### Algorithm to convert A4 pages into an A5 booklet ######
#############################################################
#############################################################
#############################################################
## Adapted from the work by Luke Plant, https://bitbucket.org/spookylukey/booklet-maker/src
class Sheet(object):
'''A4 Sheets'''
def __init__(self):
self.front = PrintPage()
self.back = PrintPage()
class PrintPage(object):
'''A4 page with containers for A4 pages'''
def __init__(self):
self.left = PageContainer()
self.right = PageContainer()
class PageContainer(object):
'''A5 containers'''
def __init__(self):
self.page = None
def build_booklet(pages):
''' Build booklet '''
# Double sized page, with double-sided printing, fits 4 of the original.
sheet_count = int(math.ceil(len(pages) / 4.0))
booklet = [Sheet() for i in range(0, sheet_count)]
# Assign input pages to sheets
# This is the core algo. To understand it:
# * pick up 3 A4 sheets, landscape
# * number the sheets from 1 to 3, starting with bottom one
# * fold the stack in the middle to form an A5 booklet
# * work out what order you need to use the front left,
# front right, back left and back right sides.
def containers():
'''Yields parts of the booklet in the order they should be used.'''
for sheet in booklet:
yield sheet.back.right
yield sheet.front.left
for sheet in reversed(booklet):
yield sheet.front.right
yield sheet.back.left
for container, page in zip(containers(), pages):
container.page = page
return booklet
def add_double_page(writer, page_size, print_page):
''' Adds a double page '''
width, height = page_size
page = writer.insertBlankPage(width=width, height=height, index=writer.getNumPages())
# Merge the left page
l_page = print_page.left.page
if l_page is not None:
page.mergePage(l_page)
# Merge the right page with translation
r_page = print_page.right.page
if r_page is not None:
page.mergeTranslatedPage(r_page, width / 2, 0)
def convert_to_a5_booklet(input_file, blanks=0):
'''Converts a PDF into a double sided A5 file to print as an A4 (two A5 pages per A4 page)'''
# Create internal dir to save the a5 files
a5_booklets_dir = CREATED_DIARIES_DIR
Path.mkdir(a5_booklets_dir, parents=True, exist_ok=True)
# Create the a5 booklet's name
a5_booklet_name = Path(input_file).stem + "_as_a5_booklet"
a5_booklet = a5_booklets_dir / Path("{}.pdf".format(a5_booklet_name))
reader = PdfFileReader(open(input_file, "rb"))
pages = [reader.getPage(p) for p in range(0, reader.getNumPages())]
for index in range(0, blanks):
pages.insert(0, None)
sheets = build_booklet(pages)
writer = PdfFileWriter()
firs_page = reader.getPage(0)
input_width = firs_page.mediaBox.getWidth()
output_width = input_width * 2
input_height = firs_page.mediaBox.getHeight()
output_height = input_height
page_size = (output_width, output_height)
# We want to group fronts and backs together.
for sheet in sheets:
add_double_page(writer, page_size, sheet.back)
add_double_page(writer, page_size, sheet.front)
with open(a5_booklet, "wb") as a5_booklet_stream:
writer.write(a5_booklet_stream)
return a5_booklet
#############################################################
#############################################################
#############################################################
########## Create A4 paper diary ############
#############################################################
#############################################################
#############################################################
def create_diary_cover(participant_id, email, font):
'''Create cover of the A5 diary'''
packet = BytesIO()
cover_canvas = canvas.Canvas(packet, pagesize=A4)
width, height = A4
# Centering the logo or participant ID
if Path.exists(LOGO_PATH):
logo = ImageReader(LOGO_PATH)
cover_canvas.drawImage(logo, x=(width * (1/6.0)),
y=(height/4),
width=width * (4/6.0),
preserveAspectRatio=True,
mask='auto')
else:
cover_canvas.setFont(font, 50)
cover_canvas.drawCentredString(width/2, height/2, participant_id)
# Lost legend
if not (email is None or email == ""):
cover_canvas.setFont(font, 15)
cover_canvas.drawCentredString(width/2, 50,
"If you find this document, please email " + email)
cover_canvas.save()
packet.seek(0)
return PdfFileReader(packet).getPage(0)
def create_diary_page(pdf_template, font, top_left_text, page_number, top_right_text):
packet = BytesIO()
diary_canvas = canvas.Canvas(packet, pagesize=A5)
# Header
diary_canvas.setFont(font, 11)
#diary_canvas.drawRightString(378, 562, str(top_right_text))
diary_canvas.drawString(36.5, 562, top_left_text)
# Corners
corners = [(CORNER_DIR / Path("corner_ul.png"), 25, 553),
(CORNER_DIR / Path("corner_ur.png"), 365, 553),
(CORNER_DIR / Path("corner_bl.png"), 25, 15),
(CORNER_DIR / Path("corner_br.png"), 365, 15)]
for corner_path, x, y in corners:
if corner_path.exists():
corner = ImageReader(corner_path)
diary_canvas.drawImage(corner, x=x, y=y, mask='auto')
# Footer
#diary_canvas.setFont(font, 8)
#diary_canvas.drawString(36.5, 24, str(page_number))
diary_canvas.save()
# Merge template and additions (header, corners and footer)
packet.seek(0)
page_additions = PdfFileReader(packet).getPage(0)
new_page = PdfFileReader(open(pdf_template, "rb")).getPage(0)
new_page.mergePage(page_additions)
new_page.scaleTo(A4[0], A4[1])
return new_page
def create_a4_diary(pdf_template, pages, top_left_text, email=None, font='Arial'):
"""Creates an A4 document with [PAGES] from [STARTING_DATE]"""
starting_date = parse_date(top_left_text)
font = set_active_font(font)
# Create output folder/file
if not Path(pdf_template).exists():
raise ValueError("Template does not exist {}".format(pdf_template))
Path.mkdir(CREATED_DIARIES_DIR, parents=True, exist_ok=True)
a4_document_name = Path(pdf_template).stem
a4_document_path = CREATED_DIARIES_DIR / Path("{}_document.pdf".format(a4_document_name))
pdf_file = PdfFileWriter()
# Cover
pdf_file.addPage(create_diary_cover(a4_document_name, email, font))
pdf_file.addBlankPage()
# Pages
for page in range(1, pages+1):
if starting_date is not None:
top_left_text = starting_date.strftime('%A, %d %b %Y')
starting_date += datetime.timedelta(days=1)
new_page = create_diary_page(pdf_template, font, top_left_text,page, a4_document_name)
pdf_file.addPage(new_page)
# Backcover
pdf_file.addBlankPage()
# Save a4 document
with open(a4_document_path, "wb") as output_stream:
pdf_file.write(output_stream)
return a4_document_path
def set_active_font(font):
"""Register the font to use in header and footer of the diary"""
try:
pdfmetrics.registerFont(TTFont(font, font + '.ttf'))
except TTFError:
font = 'FreeSansLocal'
pdfmetrics.registerFont(TTFont(font, DEFAULT_FONT))
return font
def parse_date(s):
try:
return datetime.datetime.strptime(s, "%d/%m/%Y")
except ValueError:
return None | [((3596, 3652), 'pathlib.Path.mkdir', 'Path.mkdir', (['a5_booklets_dir'], {'parents': '(True)', 'exist_ok': '(True)'}), '(a5_booklets_dir, parents=True, exist_ok=True)\n', (3606, 3652), False, 'from pathlib import Path\n'), ((4064, 4079), 'PyPDF2.PdfFileWriter', 'PdfFileWriter', ([], {}), '()\n', (4077, 4079), False, 'from PyPDF2 import PdfFileReader, PdfFileWriter\n'), ((5157, 5166), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (5164, 5166), False, 'from io import BytesIO\n'), ((5186, 5220), 'reportlab.pdfgen.canvas.Canvas', 'canvas.Canvas', (['packet'], {'pagesize': 'A4'}), '(packet, pagesize=A4)\n', (5199, 5220), False, 'from reportlab.pdfgen import canvas\n'), ((5295, 5317), 'pathlib.Path.exists', 'Path.exists', (['LOGO_PATH'], {}), '(LOGO_PATH)\n', (5306, 5317), False, 'from pathlib import Path\n'), ((6164, 6173), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (6171, 6173), False, 'from io import BytesIO\n'), ((6193, 6227), 'reportlab.pdfgen.canvas.Canvas', 'canvas.Canvas', (['packet'], {'pagesize': 'A5'}), '(packet, pagesize=A5)\n', (6206, 6227), False, 'from reportlab.pdfgen import canvas\n'), ((7665, 7725), 'pathlib.Path.mkdir', 'Path.mkdir', (['CREATED_DIARIES_DIR'], {'parents': '(True)', 'exist_ok': '(True)'}), '(CREATED_DIARIES_DIR, parents=True, exist_ok=True)\n', (7675, 7725), False, 'from pathlib import Path\n'), ((7883, 7898), 'PyPDF2.PdfFileWriter', 'PdfFileWriter', ([], {}), '()\n', (7896, 7898), False, 'from PyPDF2 import PdfFileReader, PdfFileWriter\n'), ((643, 662), 'pathlib.Path', 'Path', (['relative_path'], {}), '(relative_path)\n', (647, 662), False, 'from pathlib import Path\n'), ((769, 785), 'pathlib.Path', 'Path', (['"""logo.png"""'], {}), "('logo.png')\n", (773, 785), False, 'from pathlib import Path\n'), ((829, 854), 'pathlib.Path', 'Path', (['"""FreeSansLocal.ttf"""'], {}), "('FreeSansLocal.ttf')\n", (833, 854), False, 'from pathlib import Path\n'), ((5334, 5356), 'reportlab.lib.utils.ImageReader', 'ImageReader', (['LOGO_PATH'], {}), '(LOGO_PATH)\n', (5345, 5356), False, 'from reportlab.lib.utils import ImageReader\n'), ((7749, 7767), 'pathlib.Path', 'Path', (['pdf_template'], {}), '(pdf_template)\n', (7753, 7767), False, 'from pathlib import Path\n'), ((8890, 8931), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['s', '"""%d/%m/%Y"""'], {}), "(s, '%d/%m/%Y')\n", (8916, 8931), False, 'import datetime\n'), ((3711, 3727), 'pathlib.Path', 'Path', (['input_file'], {}), '(input_file)\n', (3715, 3727), False, 'from pathlib import Path\n'), ((6030, 6051), 'PyPDF2.PdfFileReader', 'PdfFileReader', (['packet'], {}), '(packet)\n', (6043, 6051), False, 'from PyPDF2 import PdfFileReader, PdfFileWriter\n'), ((6754, 6778), 'reportlab.lib.utils.ImageReader', 'ImageReader', (['corner_path'], {}), '(corner_path)\n', (6765, 6778), False, 'from reportlab.lib.utils import ImageReader\n'), ((7085, 7106), 'PyPDF2.PdfFileReader', 'PdfFileReader', (['packet'], {}), '(packet)\n', (7098, 7106), False, 'from PyPDF2 import PdfFileReader, PdfFileWriter\n'), ((8194, 8220), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (8212, 8220), False, 'import datetime\n'), ((8681, 8708), 'reportlab.pdfbase.ttfonts.TTFont', 'TTFont', (['font', "(font + '.ttf')"], {}), "(font, font + '.ttf')\n", (8687, 8708), False, 'from reportlab.pdfbase.ttfonts import TTFError, TTFont\n'), ((6440, 6461), 'pathlib.Path', 'Path', (['"""corner_ul.png"""'], {}), "('corner_ul.png')\n", (6444, 6461), False, 'from pathlib import Path\n'), ((6503, 6524), 'pathlib.Path', 'Path', (['"""corner_ur.png"""'], {}), "('corner_ur.png')\n", (6507, 6524), False, 'from pathlib import Path\n'), ((6567, 6588), 'pathlib.Path', 'Path', (['"""corner_bl.png"""'], {}), "('corner_bl.png')\n", (6571, 6588), False, 'from pathlib import Path\n'), ((6629, 6650), 'pathlib.Path', 'Path', (['"""corner_br.png"""'], {}), "('corner_br.png')\n", (6633, 6650), False, 'from pathlib import Path\n'), ((7555, 7573), 'pathlib.Path', 'Path', (['pdf_template'], {}), '(pdf_template)\n', (7559, 7573), False, 'from pathlib import Path\n'), ((8802, 8828), 'reportlab.pdfbase.ttfonts.TTFont', 'TTFont', (['font', 'DEFAULT_FONT'], {}), '(font, DEFAULT_FONT)\n', (8808, 8828), False, 'from reportlab.pdfbase.ttfonts import TTFError, TTFont\n'), ((587, 601), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (591, 601), False, 'from pathlib import Path\n')] |
codeforamerica/w-drive-extractor | wextractor/extractors/csv_extractor.py | 1c62bfff6fc21c4cce4a4409b76355ec4e07daae | #!/usr/bin/env python
import urllib2
import httplib
from urlparse import urlparse
import csv
from wextractor.extractors.extractor import Extractor
class CsvExtractor(Extractor):
def __init__(self, target, header=None, dtypes=None, url=None):
'''
CsvExtractor initializes with an optional url flag that tells
the extractor whether or not the resource is local or remote so
that it can be loaded accordingly
'''
super(CsvExtractor, self).__init__(target, header, dtypes)
if url is None:
self.url = self.detect_url(target)
elif type(url) != bool:
raise TypeError('url kwarg must be of type bool')
else:
self.url = url
def detect_url(self, target):
# see: http://stackoverflow.com/questions/2924422/how-do-i-determine-if-a-web-page-exists-with-shell-scripting
# and http://stackoverflow.com/questions/1140661/python-get-http-response-code-from-a-url
# for additional information
good_codes = [httplib.OK, httplib.FOUND, httplib.MOVED_PERMANENTLY]
# check to see if we have a scheme in the url, and append one if not
parsed_target = urlparse(target)
if bool(parsed_target.scheme) is False:
target = 'http://' + target
host, path = urlparse(target)[1:3]
try:
conn = httplib.HTTPConnection(host)
conn.request("HEAD", path)
status = conn.getresponse().status
except StandardError:
status = None
return status in good_codes
def extract(self):
if self.url:
raw_data = urllib2.urlopen(self.target).read().decode('utf-8-sig').rstrip()
else:
with open(self.target, 'r') as f:
raw_data = f.read().decode('utf-8-sig').rstrip()
# standardize the file endings
raw_data = raw_data.replace('\r\n', '\n').replace('\r', '\n')
if self.header is None:
# use first line if self.header not defined
current_headers = raw_data.split('\n')[0].split(',')
raw_data = '\n'.join(raw_data.split('\n')[1:])
else:
current_headers = self.header
output = []
reader = csv.reader(raw_data.splitlines(), delimiter=',')
for row in reader:
output.append(
self.transform_row(current_headers, row)
)
return output
| [((1197, 1213), 'urlparse.urlparse', 'urlparse', (['target'], {}), '(target)\n', (1205, 1213), False, 'from urlparse import urlparse\n'), ((1323, 1339), 'urlparse.urlparse', 'urlparse', (['target'], {}), '(target)\n', (1331, 1339), False, 'from urlparse import urlparse\n'), ((1377, 1405), 'httplib.HTTPConnection', 'httplib.HTTPConnection', (['host'], {}), '(host)\n', (1399, 1405), False, 'import httplib\n'), ((1653, 1681), 'urllib2.urlopen', 'urllib2.urlopen', (['self.target'], {}), '(self.target)\n', (1668, 1681), False, 'import urllib2\n')] |
r-peng/pyscf | pyscf/geomopt/berny_solver.py | 9a14f9bcc63bc75f5939cb4d00eb47861d8d8989 | #!/usr/bin/env python
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Interface to geometry optimizer pyberny https://github.com/jhrmnn/pyberny
'''
from __future__ import absolute_import
import pkg_resources
try:
dist = pkg_resources.get_distribution('pyberny')
except pkg_resources.DistributionNotFound:
dist = None
if dist is None or [int(x) for x in dist.version.split('.')] < [0, 6, 2]:
msg = ('Geometry optimizer Pyberny not found or outdated. Install or update '
'with:\n\n\tpip install -U pyberny')
raise ImportError(msg)
import time
import numpy
import logging
from pyscf import lib
from pyscf.geomopt.addons import (as_pyscf_method, dump_mol_geometry,
symmetrize)
from pyscf import __config__
from pyscf.grad.rhf import GradientsBasics
from berny import Berny, geomlib, coords
# Overwrite pyberny's atomic unit
coords.angstrom = 1./lib.param.BOHR
INCLUDE_GHOST = getattr(__config__, 'geomopt_berny_solver_optimize_include_ghost', True)
ASSERT_CONV = getattr(__config__, 'geomopt_berny_solver_optimize_assert_convergence', True)
def to_berny_geom(mol, include_ghost=INCLUDE_GHOST):
atom_charges = mol.atom_charges()
if include_ghost:
# Symbol Ghost is not supported in current version of pyberny
#species = [mol.atom_symbol(i) if z != 0 else 'Ghost'
# for i,z in enumerate(atom_charges)]
species = [mol.atom_symbol(i) if z != 0 else 'H'
for i,z in enumerate(atom_charges)]
coords = mol.atom_coords() * lib.param.BOHR
else:
atmlst = numpy.where(atom_charges != 0)[0] # Exclude ghost atoms
species = [mol.atom_symbol(i) for i in atmlst]
coords = mol.atom_coords()[atmlst] * lib.param.BOHR
# geomlib.Geometry is available in the new version of pyberny solver. (issue #212)
if getattr(geomlib, 'Geometry', None):
return geomlib.Geometry(species, coords)
else:
return geomlib.Molecule(species, coords)
def _geom_to_atom(mol, geom, include_ghost):
coords = geom.coords
if include_ghost:
atom_coords = coords / lib.param.BOHR
else:
atmlst = numpy.where(mol.atom_charges() != 0)[0]
atom_coords = mol.atom_coords()
atom_coords[atmlst] = coords / lib.param.BOHR
return atom_coords
def to_berny_log(pyscf_log):
'''Adapter to allow pyberny to use pyscf.logger
'''
class PyscfHandler(logging.Handler):
def emit(self, record):
pyscf_log.info(record.getMessage())
log = logging.getLogger('{}.{}'.format(__name__, id(pyscf_log)))
log.addHandler(PyscfHandler())
log.setLevel('INFO')
return log
def kernel(method, assert_convergence=ASSERT_CONV,
include_ghost=INCLUDE_GHOST, callback=None, **kwargs):
'''Optimize geometry with pyberny for the given method.
To adjust the convergence threshold, parameters can be set in kwargs as
below:
.. code-block:: python
conv_params = { # They are default settings
'gradientmax': 0.45e-3, # Eh/[Bohr|rad]
'gradientrms': 0.15e-3, # Eh/[Bohr|rad]
'stepmax': 1.8e-3, # [Bohr|rad]
'steprms': 1.2e-3, # [Bohr|rad]
}
from pyscf.geomopt import berny_solver
opt = berny_solver.GeometryOptimizer(method)
opt.params = conv_params
opt.kernel()
'''
t0 = time.clock(), time.time()
mol = method.mol.copy()
if 'log' in kwargs:
log = lib.logger.new_logger(method, kwargs['log'])
elif 'verbose' in kwargs:
log = lib.logger.new_logger(method, kwargs['verbose'])
else:
log = lib.logger.new_logger(method)
if isinstance(method, lib.GradScanner):
g_scanner = method
elif isinstance(method, GradientsBasics):
g_scanner = method.as_scanner()
elif getattr(method, 'nuc_grad_method', None):
g_scanner = method.nuc_grad_method().as_scanner()
else:
raise NotImplementedError('Nuclear gradients of %s not available' % method)
if not include_ghost:
g_scanner.atmlst = numpy.where(method.mol.atom_charges() != 0)[0]
# When symmetry is enabled, the molecule may be shifted or rotated to make
# the z-axis be the main axis. The transformation can cause inconsistency
# between the optimization steps. The transformation is muted by setting
# an explict point group to the keyword mol.symmetry (see symmetry
# detection code in Mole.build function).
if mol.symmetry:
mol.symmetry = mol.topgroup
# temporary interface, taken from berny.py optimize function
berny_log = to_berny_log(log)
geom = to_berny_geom(mol, include_ghost)
optimizer = Berny(geom, logger=berny_log, **kwargs)
t1 = t0
e_last = 0
for cycle, geom in enumerate(optimizer):
if log.verbose >= lib.logger.NOTE:
log.note('\nGeometry optimization cycle %d', cycle+1)
dump_mol_geometry(mol, geom.coords, log)
if mol.symmetry:
geom.coords = symmetrize(mol, geom.coords)
mol.set_geom_(_geom_to_atom(mol, geom, include_ghost), unit='Bohr')
energy, gradients = g_scanner(mol)
log.note('cycle %d: E = %.12g dE = %g norm(grad) = %g', cycle+1,
energy, energy - e_last, numpy.linalg.norm(gradients))
e_last = energy
if callable(callback):
callback(locals())
if assert_convergence and not g_scanner.converged:
raise RuntimeError('Nuclear gradients of %s not converged' % method)
optimizer.send((energy, gradients))
t1 = log.timer('geomoetry optimization cycle %d'%cycle, *t1)
t0 = log.timer('geomoetry optimization', *t0)
return optimizer._converged, mol
def optimize(method, assert_convergence=ASSERT_CONV,
include_ghost=INCLUDE_GHOST, callback=None, **kwargs):
'''Optimize geometry with pyberny for the given method.
To adjust the convergence threshold, parameters can be set in kwargs as
below:
.. code-block:: python
conv_params = { # They are default settings
'gradientmax': 0.45e-3, # Eh/[Bohr|rad]
'gradientrms': 0.15e-3, # Eh/[Bohr|rad]
'stepmax': 1.8e-3, # [Bohr|rad]
'steprms': 1.2e-3, # [Bohr|rad]
}
from pyscf.geomopt import berny_solver
newmol = berny_solver.optimize(method, **conv_params)
'''
return kernel(method, assert_convergence, include_ghost, callback,
**kwargs)[1]
class GeometryOptimizer(lib.StreamObject):
'''Optimize the molecular geometry for the input method.
Note the method.mol will be changed after calling .kernel() method.
'''
def __init__(self, method):
self.method = method
self.callback = None
self.params = {}
self.converged = False
self.max_cycle = 100
@property
def mol(self):
return self.method.mol
@mol.setter
def mol(self, x):
self.method.mol = x
def kernel(self, params=None):
if params is not None:
self.params.update(params)
params = dict(self.params)
params['maxsteps'] = self.max_cycle
self.converged, self.mol = \
kernel(self.method, callback=self.callback, **params)
return self.mol
optimize = kernel
del(INCLUDE_GHOST, ASSERT_CONV)
if __name__ == '__main__':
from pyscf import gto
from pyscf import scf, dft, cc, mp
mol = gto.M(atom='''
C 1.1879 -0.3829 0.0000
C 0.0000 0.5526 0.0000
O -1.1867 -0.2472 0.0000
H -1.9237 0.3850 0.0000
H 2.0985 0.2306 0.0000
H 1.1184 -1.0093 0.8869
H 1.1184 -1.0093 -0.8869
H -0.0227 1.1812 0.8852
H -0.0227 1.1812 -0.8852
''',
basis='3-21g')
mf = scf.RHF(mol)
conv_params = {
'gradientmax': 6e-3, # Eh/Bohr
'gradientrms': 2e-3, # Eh/Bohr
'stepmax': 2e-2, # Bohr
'steprms': 1.5e-2, # Bohr
}
mol1 = optimize(mf, **conv_params)
print(mf.kernel() - -153.219208484874)
print(scf.RHF(mol1).kernel() - -153.222680852335)
mf = dft.RKS(mol)
mf.xc = 'pbe,'
mf.conv_tol = 1e-7
mol1 = optimize(mf)
mymp2 = mp.MP2(scf.RHF(mol))
mol1 = optimize(mymp2)
mycc = cc.CCSD(scf.RHF(mol))
mol1 = optimize(mycc)
| [((793, 834), 'pkg_resources.get_distribution', 'pkg_resources.get_distribution', (['"""pyberny"""'], {}), "('pyberny')\n", (823, 834), False, 'import pkg_resources\n'), ((5305, 5344), 'berny.Berny', 'Berny', (['geom'], {'logger': 'berny_log'}), '(geom, logger=berny_log, **kwargs)\n', (5310, 5344), False, 'from berny import Berny, geomlib, coords\n'), ((8113, 8454), 'pyscf.gto.M', 'gto.M', ([], {'atom': '"""\nC 1.1879 -0.3829 0.0000\nC 0.0000 0.5526 0.0000\nO -1.1867 -0.2472 0.0000\nH -1.9237 0.3850 0.0000\nH 2.0985 0.2306 0.0000\nH 1.1184 -1.0093 0.8869\nH 1.1184 -1.0093 -0.8869\nH -0.0227 1.1812 0.8852\nH -0.0227 1.1812 -0.8852\n """', 'basis': '"""3-21g"""'}), '(atom=\n """\nC 1.1879 -0.3829 0.0000\nC 0.0000 0.5526 0.0000\nO -1.1867 -0.2472 0.0000\nH -1.9237 0.3850 0.0000\nH 2.0985 0.2306 0.0000\nH 1.1184 -1.0093 0.8869\nH 1.1184 -1.0093 -0.8869\nH -0.0227 1.1812 0.8852\nH -0.0227 1.1812 -0.8852\n """\n , basis=\'3-21g\')\n', (8118, 8454), False, 'from pyscf import gto\n'), ((8471, 8483), 'pyscf.scf.RHF', 'scf.RHF', (['mol'], {}), '(mol)\n', (8478, 8483), False, 'from pyscf import scf, dft, cc, mp\n'), ((8810, 8822), 'pyscf.dft.RKS', 'dft.RKS', (['mol'], {}), '(mol)\n', (8817, 8822), False, 'from pyscf import scf, dft, cc, mp\n'), ((2483, 2516), 'berny.geomlib.Geometry', 'geomlib.Geometry', (['species', 'coords'], {}), '(species, coords)\n', (2499, 2516), False, 'from berny import Berny, geomlib, coords\n'), ((2542, 2575), 'berny.geomlib.Molecule', 'geomlib.Molecule', (['species', 'coords'], {}), '(species, coords)\n', (2558, 2575), False, 'from berny import Berny, geomlib, coords\n'), ((3994, 4006), 'time.clock', 'time.clock', ([], {}), '()\n', (4004, 4006), False, 'import time\n'), ((4008, 4019), 'time.time', 'time.time', ([], {}), '()\n', (4017, 4019), False, 'import time\n'), ((4086, 4130), 'pyscf.lib.logger.new_logger', 'lib.logger.new_logger', (['method', "kwargs['log']"], {}), "(method, kwargs['log'])\n", (4107, 4130), False, 'from pyscf import lib\n'), ((8909, 8921), 'pyscf.scf.RHF', 'scf.RHF', (['mol'], {}), '(mol)\n', (8916, 8921), False, 'from pyscf import scf, dft, cc, mp\n'), ((8970, 8982), 'pyscf.scf.RHF', 'scf.RHF', (['mol'], {}), '(mol)\n', (8977, 8982), False, 'from pyscf import scf, dft, cc, mp\n'), ((2165, 2195), 'numpy.where', 'numpy.where', (['(atom_charges != 0)'], {}), '(atom_charges != 0)\n', (2176, 2195), False, 'import numpy\n'), ((4175, 4223), 'pyscf.lib.logger.new_logger', 'lib.logger.new_logger', (['method', "kwargs['verbose']"], {}), "(method, kwargs['verbose'])\n", (4196, 4223), False, 'from pyscf import lib\n'), ((4248, 4277), 'pyscf.lib.logger.new_logger', 'lib.logger.new_logger', (['method'], {}), '(method)\n', (4269, 4277), False, 'from pyscf import lib\n'), ((5539, 5579), 'pyscf.geomopt.addons.dump_mol_geometry', 'dump_mol_geometry', (['mol', 'geom.coords', 'log'], {}), '(mol, geom.coords, log)\n', (5556, 5579), False, 'from pyscf.geomopt.addons import as_pyscf_method, dump_mol_geometry, symmetrize\n'), ((5632, 5660), 'pyscf.geomopt.addons.symmetrize', 'symmetrize', (['mol', 'geom.coords'], {}), '(mol, geom.coords)\n', (5642, 5660), False, 'from pyscf.geomopt.addons import as_pyscf_method, dump_mol_geometry, symmetrize\n'), ((5898, 5926), 'numpy.linalg.norm', 'numpy.linalg.norm', (['gradients'], {}), '(gradients)\n', (5915, 5926), False, 'import numpy\n'), ((8756, 8769), 'pyscf.scf.RHF', 'scf.RHF', (['mol1'], {}), '(mol1)\n', (8763, 8769), False, 'from pyscf import scf, dft, cc, mp\n')] |
WesleyPeng/uiXautomation | src/main/python/taf/foundation/api/ui/aut.py | 2d2c4d5a774ffda934d5615036a80c449bac930d | # Copyright (c) 2017-2018 {Flair Inc.} WESLEY PENG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from taf.foundation.utils import ConnectionCache
class AUT(object):
cache = None
current = None
def __init__(
self,
name=None,
identifier=None,
**kwargs
):
if not AUT.cache:
AUT.cache = ConnectionCache(identifier)
self.id = self.cache.register(
self._create_instance(name, **kwargs),
identifier
)
AUT.current = self
@staticmethod
def launch(app_location, **kwargs):
raise NotImplementedError(
'Launch application'
)
def activate(self):
if self.id != self.cache.current_key:
self.cache.current_key = self.id
AUT.current = self
def take_screenshot(self):
self.activate()
return self.get_screenshot_data()
def close(self):
self.cache.close(self.id)
if not self.cache.current:
AUT.cache = None
AUT.current = None
def get_screenshot_data(self):
raise NotImplementedError(
'Get screenshot data from AUT'
)
def _create_instance(self, name, **kwargs):
raise NotImplementedError(
'Create instance of AUT'
)
| [((867, 894), 'taf.foundation.utils.ConnectionCache', 'ConnectionCache', (['identifier'], {}), '(identifier)\n', (882, 894), False, 'from taf.foundation.utils import ConnectionCache\n')] |
dkushche/Crypto | algo/vigenere.py | 75919d6df2084aee1de76c9999ac4e361c4efd48 | import crypto_tools
from itertools import cycle
def vigenere_little_doc():
return "encrypt/decrypt using vigenere cypher"
def vigenere_full_doc():
return """
Advanced caesar we change dict on each char
"""
def vigenere_str_to_list(string, vigenere_dict):
result = list()
for char in string:
try:
result.append(vigenere_dict.index(char))
except ValueError:
err_msg = f"There is no {key[inx]} in alphabet"
raise ValueError(err_msg)
return result
def vigenere_processing(data, key, lang, encrypt):
vigenere_dict = crypto_tools.get_param_json_data("alphabets.json", lang)
num_data = vigenere_str_to_list(data, vigenere_dict)
num_key = vigenere_str_to_list(key, vigenere_dict)
dict_size = len(vigenere_dict)
num_key = cycle(num_key)
if (encrypt == "encrypt"):
num_result = [(a + b) % dict_size for a, b in zip(num_data, num_key)]
else:
num_result = [
(a + dict_size - b) % dict_size for a, b in zip(num_data, num_key)
]
result_str = ""
for val in num_result:
result_str += vigenere_dict[val]
return result_str
@crypto_tools.file_manipulation()
def vigenere(data):
lang = crypto_tools.cterm('input', 'Data language: ', 'ans')
key = crypto_tools.cterm('input', 'Enter key(str): ', 'ans')
encrypt = crypto_tools.cterm('input',
'You want encrypt or decrypt: ', 'ans')
if encrypt != "encrypt" and encrypt != "decrypt":
raise ValueError("Incorrect action")
data = crypto_tools.utf_decoder(data)
return vigenere_processing(data, key, lang, encrypt)
vigenere.little_doc = vigenere_little_doc
vigenere.full_doc = vigenere_full_doc
| [((1180, 1212), 'crypto_tools.file_manipulation', 'crypto_tools.file_manipulation', ([], {}), '()\n', (1210, 1212), False, 'import crypto_tools\n'), ((603, 659), 'crypto_tools.get_param_json_data', 'crypto_tools.get_param_json_data', (['"""alphabets.json"""', 'lang'], {}), "('alphabets.json', lang)\n", (635, 659), False, 'import crypto_tools\n'), ((821, 835), 'itertools.cycle', 'cycle', (['num_key'], {}), '(num_key)\n', (826, 835), False, 'from itertools import cycle\n'), ((1244, 1297), 'crypto_tools.cterm', 'crypto_tools.cterm', (['"""input"""', '"""Data language: """', '"""ans"""'], {}), "('input', 'Data language: ', 'ans')\n", (1262, 1297), False, 'import crypto_tools\n'), ((1308, 1362), 'crypto_tools.cterm', 'crypto_tools.cterm', (['"""input"""', '"""Enter key(str): """', '"""ans"""'], {}), "('input', 'Enter key(str): ', 'ans')\n", (1326, 1362), False, 'import crypto_tools\n'), ((1377, 1444), 'crypto_tools.cterm', 'crypto_tools.cterm', (['"""input"""', '"""You want encrypt or decrypt: """', '"""ans"""'], {}), "('input', 'You want encrypt or decrypt: ', 'ans')\n", (1395, 1444), False, 'import crypto_tools\n'), ((1588, 1618), 'crypto_tools.utf_decoder', 'crypto_tools.utf_decoder', (['data'], {}), '(data)\n', (1612, 1618), False, 'import crypto_tools\n')] |
space-scl/emacs.d | anaconda-mode/0.1.13/jedi-0.15.1-py3.7.egg/jedi/evaluate/base_context.py | 6285c38714023b72a023fe24cbcb5e4fcdcdb949 | """
Contexts are the "values" that Python would return. However Contexts are at the
same time also the "contexts" that a user is currently sitting in.
A ContextSet is typically used to specify the return of a function or any other
static analysis operation. In jedi there are always multiple returns and not
just one.
"""
from functools import reduce
from operator import add
from parso.python.tree import ExprStmt, SyncCompFor
from jedi import debug
from jedi._compatibility import zip_longest, unicode
from jedi.parser_utils import clean_scope_docstring
from jedi.common import BaseContextSet, BaseContext
from jedi.evaluate.helpers import SimpleGetItemNotFound
from jedi.evaluate.utils import safe_property
from jedi.evaluate.cache import evaluator_as_method_param_cache
from jedi.cache import memoize_method
_sentinel = object()
class HelperContextMixin(object):
def get_root_context(self):
context = self
while True:
if context.parent_context is None:
return context
context = context.parent_context
@classmethod
@evaluator_as_method_param_cache()
def create_cached(cls, *args, **kwargs):
return cls(*args, **kwargs)
def execute(self, arguments):
return self.evaluator.execute(self, arguments=arguments)
def execute_evaluated(self, *value_list):
from jedi.evaluate.arguments import ValuesArguments
arguments = ValuesArguments([ContextSet([value]) for value in value_list])
return self.evaluator.execute(self, arguments)
def execute_annotation(self):
return self.execute_evaluated()
def gather_annotation_classes(self):
return ContextSet([self])
def merge_types_of_iterate(self, contextualized_node=None, is_async=False):
return ContextSet.from_sets(
lazy_context.infer()
for lazy_context in self.iterate(contextualized_node, is_async)
)
def py__getattribute__(self, name_or_str, name_context=None, position=None,
search_global=False, is_goto=False,
analysis_errors=True):
"""
:param position: Position of the last statement -> tuple of line, column
"""
if name_context is None:
name_context = self
from jedi.evaluate import finder
f = finder.NameFinder(self.evaluator, self, name_context, name_or_str,
position, analysis_errors=analysis_errors)
filters = f.get_filters(search_global)
if is_goto:
return f.filter_name(filters)
return f.find(filters, attribute_lookup=not search_global)
def py__await__(self):
await_context_set = self.py__getattribute__(u"__await__")
if not await_context_set:
debug.warning('Tried to run __await__ on context %s', self)
return await_context_set.execute_evaluated()
def eval_node(self, node):
return self.evaluator.eval_element(self, node)
def create_context(self, node, node_is_context=False, node_is_object=False):
return self.evaluator.create_context(self, node, node_is_context, node_is_object)
def iterate(self, contextualized_node=None, is_async=False):
debug.dbg('iterate %s', self)
if is_async:
from jedi.evaluate.lazy_context import LazyKnownContexts
# TODO if no __aiter__ contexts are there, error should be:
# TypeError: 'async for' requires an object with __aiter__ method, got int
return iter([
LazyKnownContexts(
self.py__getattribute__('__aiter__').execute_evaluated()
.py__getattribute__('__anext__').execute_evaluated()
.py__getattribute__('__await__').execute_evaluated()
.py__stop_iteration_returns()
) # noqa
])
return self.py__iter__(contextualized_node)
def is_sub_class_of(self, class_context):
for cls in self.py__mro__():
if cls.is_same_class(class_context):
return True
return False
def is_same_class(self, class2):
# Class matching should prefer comparisons that are not this function.
if type(class2).is_same_class != HelperContextMixin.is_same_class:
return class2.is_same_class(self)
return self == class2
class Context(HelperContextMixin, BaseContext):
"""
Should be defined, otherwise the API returns empty types.
"""
predefined_names = {}
"""
To be defined by subclasses.
"""
tree_node = None
@property
def api_type(self):
# By default just lower name of the class. Can and should be
# overwritten.
return self.__class__.__name__.lower()
def py__getitem__(self, index_context_set, contextualized_node):
from jedi.evaluate import analysis
# TODO this context is probably not right.
analysis.add(
contextualized_node.context,
'type-error-not-subscriptable',
contextualized_node.node,
message="TypeError: '%s' object is not subscriptable" % self
)
return NO_CONTEXTS
def py__iter__(self, contextualized_node=None):
if contextualized_node is not None:
from jedi.evaluate import analysis
analysis.add(
contextualized_node.context,
'type-error-not-iterable',
contextualized_node.node,
message="TypeError: '%s' object is not iterable" % self)
return iter([])
def get_signatures(self):
return []
def is_class(self):
return False
def is_instance(self):
return False
def is_function(self):
return False
def is_module(self):
return False
def is_namespace(self):
return False
def is_compiled(self):
return False
def is_bound_method(self):
return False
def py__bool__(self):
"""
Since Wrapper is a super class for classes, functions and modules,
the return value will always be true.
"""
return True
def py__doc__(self):
try:
self.tree_node.get_doc_node
except AttributeError:
return ''
else:
return clean_scope_docstring(self.tree_node)
return None
def get_safe_value(self, default=_sentinel):
if default is _sentinel:
raise ValueError("There exists no safe value for context %s" % self)
return default
def py__call__(self, arguments):
debug.warning("no execution possible %s", self)
return NO_CONTEXTS
def py__stop_iteration_returns(self):
debug.warning("Not possible to return the stop iterations of %s", self)
return NO_CONTEXTS
def get_qualified_names(self):
# Returns Optional[Tuple[str, ...]]
return None
def is_stub(self):
# The root context knows if it's a stub or not.
return self.parent_context.is_stub()
def iterate_contexts(contexts, contextualized_node=None, is_async=False):
"""
Calls `iterate`, on all contexts but ignores the ordering and just returns
all contexts that the iterate functions yield.
"""
return ContextSet.from_sets(
lazy_context.infer()
for lazy_context in contexts.iterate(contextualized_node, is_async=is_async)
)
class _ContextWrapperBase(HelperContextMixin):
predefined_names = {}
@safe_property
def name(self):
from jedi.evaluate.names import ContextName
wrapped_name = self._wrapped_context.name
if wrapped_name.tree_name is not None:
return ContextName(self, wrapped_name.tree_name)
else:
from jedi.evaluate.compiled import CompiledContextName
return CompiledContextName(self, wrapped_name.string_name)
@classmethod
@evaluator_as_method_param_cache()
def create_cached(cls, evaluator, *args, **kwargs):
return cls(*args, **kwargs)
def __getattr__(self, name):
assert name != '_wrapped_context', 'Problem with _get_wrapped_context'
return getattr(self._wrapped_context, name)
class LazyContextWrapper(_ContextWrapperBase):
@safe_property
@memoize_method
def _wrapped_context(self):
with debug.increase_indent_cm('Resolve lazy context wrapper'):
return self._get_wrapped_context()
def __repr__(self):
return '<%s>' % (self.__class__.__name__)
def _get_wrapped_context(self):
raise NotImplementedError
class ContextWrapper(_ContextWrapperBase):
def __init__(self, wrapped_context):
self._wrapped_context = wrapped_context
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._wrapped_context)
class TreeContext(Context):
def __init__(self, evaluator, parent_context, tree_node):
super(TreeContext, self).__init__(evaluator, parent_context)
self.predefined_names = {}
self.tree_node = tree_node
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.tree_node)
class ContextualizedNode(object):
def __init__(self, context, node):
self.context = context
self.node = node
def get_root_context(self):
return self.context.get_root_context()
def infer(self):
return self.context.eval_node(self.node)
def __repr__(self):
return '<%s: %s in %s>' % (self.__class__.__name__, self.node, self.context)
class ContextualizedName(ContextualizedNode):
# TODO merge with TreeNameDefinition?!
@property
def name(self):
return self.node
def assignment_indexes(self):
"""
Returns an array of tuple(int, node) of the indexes that are used in
tuple assignments.
For example if the name is ``y`` in the following code::
x, (y, z) = 2, ''
would result in ``[(1, xyz_node), (0, yz_node)]``.
When searching for b in the case ``a, *b, c = [...]`` it will return::
[(slice(1, -1), abc_node)]
"""
indexes = []
is_star_expr = False
node = self.node.parent
compare = self.node
while node is not None:
if node.type in ('testlist', 'testlist_comp', 'testlist_star_expr', 'exprlist'):
for i, child in enumerate(node.children):
if child == compare:
index = int(i / 2)
if is_star_expr:
from_end = int((len(node.children) - i) / 2)
index = slice(index, -from_end)
indexes.insert(0, (index, node))
break
else:
raise LookupError("Couldn't find the assignment.")
is_star_expr = False
elif node.type == 'star_expr':
is_star_expr = True
elif isinstance(node, (ExprStmt, SyncCompFor)):
break
compare = node
node = node.parent
return indexes
def _getitem(context, index_contexts, contextualized_node):
from jedi.evaluate.context.iterable import Slice
# The actual getitem call.
simple_getitem = getattr(context, 'py__simple_getitem__', None)
result = NO_CONTEXTS
unused_contexts = set()
for index_context in index_contexts:
if simple_getitem is not None:
index = index_context
if isinstance(index_context, Slice):
index = index.obj
try:
method = index.get_safe_value
except AttributeError:
pass
else:
index = method(default=None)
if type(index) in (float, int, str, unicode, slice, bytes):
try:
result |= simple_getitem(index)
continue
except SimpleGetItemNotFound:
pass
unused_contexts.add(index_context)
# The index was somehow not good enough or simply a wrong type.
# Therefore we now iterate through all the contexts and just take
# all results.
if unused_contexts or not index_contexts:
result |= context.py__getitem__(
ContextSet(unused_contexts),
contextualized_node
)
debug.dbg('py__getitem__ result: %s', result)
return result
class ContextSet(BaseContextSet):
def py__class__(self):
return ContextSet(c.py__class__() for c in self._set)
def iterate(self, contextualized_node=None, is_async=False):
from jedi.evaluate.lazy_context import get_merged_lazy_context
type_iters = [c.iterate(contextualized_node, is_async=is_async) for c in self._set]
for lazy_contexts in zip_longest(*type_iters):
yield get_merged_lazy_context(
[l for l in lazy_contexts if l is not None]
)
def execute(self, arguments):
return ContextSet.from_sets(c.evaluator.execute(c, arguments) for c in self._set)
def execute_evaluated(self, *args, **kwargs):
return ContextSet.from_sets(c.execute_evaluated(*args, **kwargs) for c in self._set)
def py__getattribute__(self, *args, **kwargs):
if kwargs.get('is_goto'):
return reduce(add, [c.py__getattribute__(*args, **kwargs) for c in self._set], [])
return ContextSet.from_sets(c.py__getattribute__(*args, **kwargs) for c in self._set)
def get_item(self, *args, **kwargs):
return ContextSet.from_sets(_getitem(c, *args, **kwargs) for c in self._set)
def try_merge(self, function_name):
context_set = self.__class__([])
for c in self._set:
try:
method = getattr(c, function_name)
except AttributeError:
pass
else:
context_set |= method()
return context_set
def gather_annotation_classes(self):
return ContextSet.from_sets([c.gather_annotation_classes() for c in self._set])
def get_signatures(self):
return [sig for c in self._set for sig in c.get_signatures()]
NO_CONTEXTS = ContextSet([])
def iterator_to_context_set(func):
def wrapper(*args, **kwargs):
return ContextSet(func(*args, **kwargs))
return wrapper
| [((1093, 1126), 'jedi.evaluate.cache.evaluator_as_method_param_cache', 'evaluator_as_method_param_cache', ([], {}), '()\n', (1124, 1126), False, 'from jedi.evaluate.cache import evaluator_as_method_param_cache\n'), ((8011, 8044), 'jedi.evaluate.cache.evaluator_as_method_param_cache', 'evaluator_as_method_param_cache', ([], {}), '()\n', (8042, 8044), False, 'from jedi.evaluate.cache import evaluator_as_method_param_cache\n'), ((12510, 12555), 'jedi.debug.dbg', 'debug.dbg', (['"""py__getitem__ result: %s"""', 'result'], {}), "('py__getitem__ result: %s', result)\n", (12519, 12555), False, 'from jedi import debug\n'), ((2358, 2471), 'jedi.evaluate.finder.NameFinder', 'finder.NameFinder', (['self.evaluator', 'self', 'name_context', 'name_or_str', 'position'], {'analysis_errors': 'analysis_errors'}), '(self.evaluator, self, name_context, name_or_str, position,\n analysis_errors=analysis_errors)\n', (2375, 2471), False, 'from jedi.evaluate import finder\n'), ((3260, 3289), 'jedi.debug.dbg', 'debug.dbg', (['"""iterate %s"""', 'self'], {}), "('iterate %s', self)\n", (3269, 3289), False, 'from jedi import debug\n'), ((5002, 5172), 'jedi.evaluate.analysis.add', 'analysis.add', (['contextualized_node.context', '"""type-error-not-subscriptable"""', 'contextualized_node.node'], {'message': '("TypeError: \'%s\' object is not subscriptable" % self)'}), '(contextualized_node.context, \'type-error-not-subscriptable\',\n contextualized_node.node, message=\n "TypeError: \'%s\' object is not subscriptable" % self)\n', (5014, 5172), False, 'from jedi.evaluate import analysis\n'), ((6686, 6733), 'jedi.debug.warning', 'debug.warning', (['"""no execution possible %s"""', 'self'], {}), "('no execution possible %s', self)\n", (6699, 6733), False, 'from jedi import debug\n'), ((6812, 6883), 'jedi.debug.warning', 'debug.warning', (['"""Not possible to return the stop iterations of %s"""', 'self'], {}), "('Not possible to return the stop iterations of %s', self)\n", (6825, 6883), False, 'from jedi import debug\n'), ((12957, 12981), 'jedi._compatibility.zip_longest', 'zip_longest', (['*type_iters'], {}), '(*type_iters)\n', (12968, 12981), False, 'from jedi._compatibility import zip_longest, unicode\n'), ((2814, 2873), 'jedi.debug.warning', 'debug.warning', (['"""Tried to run __await__ on context %s"""', 'self'], {}), "('Tried to run __await__ on context %s', self)\n", (2827, 2873), False, 'from jedi import debug\n'), ((5405, 5565), 'jedi.evaluate.analysis.add', 'analysis.add', (['contextualized_node.context', '"""type-error-not-iterable"""', 'contextualized_node.node'], {'message': '("TypeError: \'%s\' object is not iterable" % self)'}), '(contextualized_node.context, \'type-error-not-iterable\',\n contextualized_node.node, message=\n "TypeError: \'%s\' object is not iterable" % self)\n', (5417, 5565), False, 'from jedi.evaluate import analysis\n'), ((6395, 6432), 'jedi.parser_utils.clean_scope_docstring', 'clean_scope_docstring', (['self.tree_node'], {}), '(self.tree_node)\n', (6416, 6432), False, 'from jedi.parser_utils import clean_scope_docstring\n'), ((7794, 7835), 'jedi.evaluate.names.ContextName', 'ContextName', (['self', 'wrapped_name.tree_name'], {}), '(self, wrapped_name.tree_name)\n', (7805, 7835), False, 'from jedi.evaluate.names import ContextName\n'), ((7936, 7987), 'jedi.evaluate.compiled.CompiledContextName', 'CompiledContextName', (['self', 'wrapped_name.string_name'], {}), '(self, wrapped_name.string_name)\n', (7955, 7987), False, 'from jedi.evaluate.compiled import CompiledContextName\n'), ((8435, 8491), 'jedi.debug.increase_indent_cm', 'debug.increase_indent_cm', (['"""Resolve lazy context wrapper"""'], {}), "('Resolve lazy context wrapper')\n", (8459, 8491), False, 'from jedi import debug\n'), ((13001, 13069), 'jedi.evaluate.lazy_context.get_merged_lazy_context', 'get_merged_lazy_context', (['[l for l in lazy_contexts if l is not None]'], {}), '([l for l in lazy_contexts if l is not None])\n', (13024, 13069), False, 'from jedi.evaluate.lazy_context import get_merged_lazy_context\n')] |
yavook/kiwi-scp | kiwi_scp/commands/cmd_cmd.py | ca4263d913cfbdedc8b14334e3cad61c3b95f0a7 | from typing import Tuple
import click
from .cmd import KiwiCommandType, KiwiCommand
from .decorators import kiwi_command
from ..executable import COMPOSE_EXE
from ..instance import Instance
from ..project import Project
@click.argument(
"compose_args",
metavar="[ARG]...",
nargs=-1,
)
@click.argument(
"compose_cmd",
metavar="COMMAND",
)
@kiwi_command(
short_help="Run docker-compose command",
# ignore arguments looking like options
# just pass everything down to docker-compose
context_settings={"ignore_unknown_options": True},
)
class CmdCommand(KiwiCommand):
"""Run raw docker-compose command in a project"""
type = KiwiCommandType.PROJECT
enabled_only = True
@classmethod
def run_for_project(cls, instance: Instance, project: Project, compose_cmd: str = None,
compose_args: Tuple[str] = None) -> None:
COMPOSE_EXE.run([compose_cmd, *compose_args], **project.process_kwargs)
| [((225, 285), 'click.argument', 'click.argument', (['"""compose_args"""'], {'metavar': '"""[ARG]..."""', 'nargs': '(-1)'}), "('compose_args', metavar='[ARG]...', nargs=-1)\n", (239, 285), False, 'import click\n'), ((302, 350), 'click.argument', 'click.argument', (['"""compose_cmd"""'], {'metavar': '"""COMMAND"""'}), "('compose_cmd', metavar='COMMAND')\n", (316, 350), False, 'import click\n')] |
rvprasad/software-testing-course | homework/Testing with Examples (Network)/impl_fail-add_relation-does_not_fail_when_person1_is_non_existent.py | 3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0 | class MyError(Exception):
pass
class PropertyContainer(object):
def __init__(self):
self.props = {}
def set_property(self, prop, value):
self.props[prop] = value
def get_property(self, prop):
return self.props.get(prop)
def has_property(self, prop):
return prop in self.props
class Node(PropertyContainer):
pass
class Edge(PropertyContainer):
def __init__(self, node1, node2):
super().__init__()
self.node1 = node1
self.node2 = node2
class Network(object):
NAME_PROP = "name" # NAME_PROP is an optional string property
FRIEND_PROP = "friend" # FRIEND_PROP is an optional boolean property
def __init__(self):
self.nodes = set()
self.edges = set()
def create_person(self):
node = Node()
self.nodes.add(node)
return node
# add prop to value; overwrite if prop exists
def add_person_property(self, person, prop, value):
# flag non-existent person
if person not in self.nodes:
raise RuntimeError("person does not exist")
if prop == Network.NAME_PROP:
# disallow non-string values for NAME_PROP property
if not isinstance(value, str):
raise TypeError(
"{0} is a string property".format(Network.NAME_PROP))
# disallow multiple people to have the same name
for p in self.nodes:
if p.get_property(Network.NAME_PROP) == value and \
p is not person:
raise ValueError("{0} name already taken".format(value))
person.set_property(prop, value)
def add_relation(self, person1, person2):
# flag non-existent persons
if person1 not in self.nodes:
# raise RuntimeError("person1 does not exist")
person1 = self.create_person()
if person2 not in self.nodes:
raise RuntimeError("person2 does not exist")
# flag existing edge
for e in self.edges:
if (e.node1 is person1 and e.node2 is person2) or \
(e.node1 is person2 and e.node2 is person1):
raise ValueError("relation exists")
self.edges.add(Edge(person1, person2))
def add_relation_property(self, person1, person2, prop, value):
# disallow non-boolean values for FRIEND_PROP property
if prop == Network.FRIEND_PROP and not isinstance(value, bool):
raise TypeError(
"{0} is a boolean property".format(Network.FRIEND_PROP))
for e in self.edges:
if (e.node1 is person1 and e.node2 is person2) or \
(e.node1 is person2 and e.node2 is person1):
e.set_property(prop, value)
return
# flag non-existent relation
raise RuntimeError("Non-existent relation")
# get a person with given name
def get_person(self, name):
# disallow non-string values for name
if not isinstance(name, str):
raise TypeError(
"{0} is a string argument".format(Network.NAME_PROP))
for n in self.nodes:
if n.get_property(Network.NAME_PROP) == name:
return n
# flag non-existent person
raise RuntimeError("No person named {0}".format(name))
# get friends of friends of a person with given name
def friends_of_friends(self, name):
# disallow non-string values for name
if not isinstance(name, str):
raise TypeError(
"{0} is a string argument".format(Network.NAME_PROP))
# flag non-existent person
person = self.get_person(name)
visited = set([person])
i = 0
while i < 2:
newly_visited = set()
for p in (x for x in visited):
for e in (x for x in self.edges if
x.get_property(Network.FRIEND_PROP) == True):
n1 = e.node1
n2 = e.node2
if n1 == p:
newly_visited.add(e.node2)
elif n2 == p:
newly_visited.add(e.node1)
visited = newly_visited
i += 1
return list(visited)
| [] |
BranKein/Flask-template | tools/__init__.py | 3d8f43b3c44163e855c727de2a0dfe37d3b788f9 | from . import ip
__all__ = ['ip']
| [] |
ferhatelmas/algo | leetCode/algorithms/easy/count_and_say.py | a7149c7a605708bc01a5cd30bf5455644cefd04d | from itertools import groupby
class Solution:
def countAndSay(self, n):
def gen(s):
return "".join(str(len(list(g))) + k for k, g in groupby(s))
s, i = "1", 1
while i < n:
s = gen(s)
i += 1
return s
| [((159, 169), 'itertools.groupby', 'groupby', (['s'], {}), '(s)\n', (166, 169), False, 'from itertools import groupby\n')] |
pupuwudi/nlp_xiaojiang | conf/feature_config.py | 182ac4522b6012a52de6e1d0db7e6a47cb716e5b | # -*- coding: UTF-8 -*-
# !/usr/bin/python
# @time :2019/5/10 9:13
# @author :Mo
# @function :path of FeatureProject
import pathlib
import sys
import os
# base dir
projectdir = str(pathlib.Path(os.path.abspath(__file__)).parent.parent)
sys.path.append(projectdir)
# path of BERT model
model_dir = projectdir + '/Data/chinese_L-12_H-768_A-12'
config_name = model_dir + '/bert_config.json'
ckpt_name = model_dir + '/bert_model.ckpt'
vocab_file = model_dir + '/vocab.txt'
# gpu使用率
gpu_memory_fraction = 0.32
# 默认取倒数第二层的输出值作为句向量
layer_indexes = [-2]
# 序列的最大程度
max_seq_len = 32
| [((256, 283), 'sys.path.append', 'sys.path.append', (['projectdir'], {}), '(projectdir)\n', (271, 283), False, 'import sys\n'), ((213, 238), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (228, 238), False, 'import os\n')] |
taruma/hidrokit | tests/test_prep_read.py | a96c4ba2235d58d2bbc905be44d1b413ed19b3d2 | """Test for .prep.read module
"""
from hidrokit.prep import read
import numpy as np
import pandas as pd
A = pd.DataFrame(
data=[
[1, 3, 4, np.nan, 2, np.nan],
[np.nan, 2, 3, np.nan, 1, 4],
[2, np.nan, 1, 3, 4, np.nan]
],
columns=['A', 'B', 'C', 'D', 'E', 'F']
)
A_date = A.set_index(pd.date_range("20190617", "20190619"))
res_A_number = {'A': [1], 'B': [2], 'C': [], 'D': [0, 1], 'E': [], 'F': [0, 2]}
res_A_date = {'A': ['0618'], 'B': ['0619'], 'C': [],
'D': ['0617', '0618'], 'E': [], 'F': ['0617', '0619']}
def test_read_number():
test = read.missing_row(A, date_index=False)
assert test.items() == res_A_number.items()
def test_read_date():
test = read.missing_row(A_date, date_format="%m%d")
assert test.items() == res_A_date.items()
| [((110, 264), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': '[[1, 3, 4, np.nan, 2, np.nan], [np.nan, 2, 3, np.nan, 1, 4], [2, np.nan, 1,\n 3, 4, np.nan]]', 'columns': "['A', 'B', 'C', 'D', 'E', 'F']"}), "(data=[[1, 3, 4, np.nan, 2, np.nan], [np.nan, 2, 3, np.nan, 1, \n 4], [2, np.nan, 1, 3, 4, np.nan]], columns=['A', 'B', 'C', 'D', 'E', 'F'])\n", (122, 264), True, 'import pandas as pd\n'), ((322, 359), 'pandas.date_range', 'pd.date_range', (['"""20190617"""', '"""20190619"""'], {}), "('20190617', '20190619')\n", (335, 359), True, 'import pandas as pd\n'), ((601, 638), 'hidrokit.prep.read.missing_row', 'read.missing_row', (['A'], {'date_index': '(False)'}), '(A, date_index=False)\n', (617, 638), False, 'from hidrokit.prep import read\n'), ((722, 766), 'hidrokit.prep.read.missing_row', 'read.missing_row', (['A_date'], {'date_format': '"""%m%d"""'}), "(A_date, date_format='%m%d')\n", (738, 766), False, 'from hidrokit.prep import read\n')] |
Maxcutex/personal_ecommerce | app/blueprints/department_blueprint.py | be09fb20eae1b225523acde06f8e75effcc3676f | from flasgger import swag_from
from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth
from app.controllers.department_controller import DepartmentController
url_prefix = '{}/departments'.format(BaseBlueprint.base_url_prefix)
department_blueprint = Blueprint('department', __name__, url_prefix=url_prefix)
department_controller = DepartmentController(request)
@department_blueprint.route('/', methods=['GET'])
@Auth.has_permission('view_department')
@swag_from('documentation/get_all_departments.yml')
def list_departments():
return department_controller.list_departments()
@department_blueprint.route('/<int:department_id>', methods=['GET'])
@Auth.has_permission('view_department')
@swag_from('documentation/get_single_department.yml')
def get_department(department_id):
return department_controller.get_department(department_id)
@department_blueprint.route('/', methods=['POST'])
@Auth.has_role('admin')
@Security.validator(['name|required:ifExists_Department_name', 'description|required'])
@swag_from('documentation/create_department.yml')
def create_department():
return department_controller.create_department()
@department_blueprint.route('/<int:department_id>', methods=['DELETE'])
@Auth.has_role('admin')
@swag_from('documentation/delete_department.yml')
def delete_department(department_id):
return department_controller.delete_department(department_id)
@department_blueprint.route('/<int:department_id>', methods=['PATCH'])
@Auth.has_role('admin')
@Security.validator(['name|optional', 'description|optional'])
@swag_from('documentation/update_department.yml')
def update_department(department_id):
return department_controller.update_department(department_id)
| [((287, 343), 'app.blueprints.base_blueprint.Blueprint', 'Blueprint', (['"""department"""', '__name__'], {'url_prefix': 'url_prefix'}), "('department', __name__, url_prefix=url_prefix)\n", (296, 343), False, 'from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth\n'), ((368, 397), 'app.controllers.department_controller.DepartmentController', 'DepartmentController', (['request'], {}), '(request)\n', (388, 397), False, 'from app.controllers.department_controller import DepartmentController\n'), ((451, 489), 'app.blueprints.base_blueprint.Auth.has_permission', 'Auth.has_permission', (['"""view_department"""'], {}), "('view_department')\n", (470, 489), False, 'from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth\n'), ((491, 541), 'flasgger.swag_from', 'swag_from', (['"""documentation/get_all_departments.yml"""'], {}), "('documentation/get_all_departments.yml')\n", (500, 541), False, 'from flasgger import swag_from\n'), ((686, 724), 'app.blueprints.base_blueprint.Auth.has_permission', 'Auth.has_permission', (['"""view_department"""'], {}), "('view_department')\n", (705, 724), False, 'from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth\n'), ((726, 778), 'flasgger.swag_from', 'swag_from', (['"""documentation/get_single_department.yml"""'], {}), "('documentation/get_single_department.yml')\n", (735, 778), False, 'from flasgger import swag_from\n'), ((927, 949), 'app.blueprints.base_blueprint.Auth.has_role', 'Auth.has_role', (['"""admin"""'], {}), "('admin')\n", (940, 949), False, 'from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth\n'), ((951, 1041), 'app.blueprints.base_blueprint.Security.validator', 'Security.validator', (["['name|required:ifExists_Department_name', 'description|required']"], {}), "(['name|required:ifExists_Department_name',\n 'description|required'])\n", (969, 1041), False, 'from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth\n'), ((1039, 1087), 'flasgger.swag_from', 'swag_from', (['"""documentation/create_department.yml"""'], {}), "('documentation/create_department.yml')\n", (1048, 1087), False, 'from flasgger import swag_from\n'), ((1237, 1259), 'app.blueprints.base_blueprint.Auth.has_role', 'Auth.has_role', (['"""admin"""'], {}), "('admin')\n", (1250, 1259), False, 'from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth\n'), ((1261, 1309), 'flasgger.swag_from', 'swag_from', (['"""documentation/delete_department.yml"""'], {}), "('documentation/delete_department.yml')\n", (1270, 1309), False, 'from flasgger import swag_from\n'), ((1484, 1506), 'app.blueprints.base_blueprint.Auth.has_role', 'Auth.has_role', (['"""admin"""'], {}), "('admin')\n", (1497, 1506), False, 'from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth\n'), ((1508, 1569), 'app.blueprints.base_blueprint.Security.validator', 'Security.validator', (["['name|optional', 'description|optional']"], {}), "(['name|optional', 'description|optional'])\n", (1526, 1569), False, 'from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth\n'), ((1571, 1619), 'flasgger.swag_from', 'swag_from', (['"""documentation/update_department.yml"""'], {}), "('documentation/update_department.yml')\n", (1580, 1619), False, 'from flasgger import swag_from\n')] |
ikecoglu/DL-SR | src/train_DFCAN.py | 5e4c794f1434cd4a9b2b1aecf3738065b11bede1 | import argparse
from keras import optimizers
import matplotlib.pyplot as plt
import numpy as np
import datetime
from keras.callbacks import TensorBoard
import glob
import os
import tensorflow as tf
from models import *
from utils.lr_controller import ReduceLROnPlateau
from utils.data_loader import data_loader, data_loader_multi_channel
from utils.utils import img_comp
from utils.loss import loss_mse_ssim
parser = argparse.ArgumentParser()
parser.add_argument("--gpu_id", type=int, default=1)
parser.add_argument("--gpu_memory_fraction", type=float, default=0.3)
parser.add_argument("--mixed_precision_training", type=int, default=1)
parser.add_argument("--data_dir", type=str, default="../dataset/train/F-actin")
parser.add_argument("--save_weights_dir", type=str, default="../trained_models")
parser.add_argument("--model_name", type=str, default="DFCAN")
parser.add_argument("--patch_height", type=int, default=128)
parser.add_argument("--patch_width", type=int, default=128)
parser.add_argument("--input_channels", type=int, default=9)
parser.add_argument("--scale_factor", type=int, default=2)
parser.add_argument("--norm_flag", type=int, default=1)
parser.add_argument("--iterations", type=int, default=1000000)
parser.add_argument("--sample_interval", type=int, default=1000)
parser.add_argument("--validate_interval", type=int, default=2000)
parser.add_argument("--validate_num", type=int, default=500)
parser.add_argument("--batch_size", type=int, default=4)
parser.add_argument("--start_lr", type=float, default=1e-4)
parser.add_argument("--lr_decay_factor", type=float, default=0.5)
parser.add_argument("--load_weights", type=int, default=0)
parser.add_argument("--optimizer_name", type=str, default="adam")
args = parser.parse_args()
gpu_id = str(args.gpu_id)
gpu_memory_fraction = args.gpu_memory_fraction
mixed_precision_training = str(args.mixed_precision_training)
data_dir = args.data_dir
save_weights_dir = args.save_weights_dir
validate_interval = args.validate_interval
batch_size = args.batch_size
start_lr = args.start_lr
lr_decay_factor = args.lr_decay_factor
patch_height = args.patch_height
patch_width = args.patch_width
input_channels = args.input_channels
scale_factor = args.scale_factor
norm_flag = args.norm_flag
validate_num = args.validate_num
iterations = args.iterations
load_weights = args.load_weights
optimizer_name = args.optimizer_name
model_name = args.model_name
sample_interval = args.sample_interval
os.environ["TF_ENABLE_AUTO_MIXED_PRECISION"] = mixed_precision_training
os.environ["CUDA_VISIBLE_DEVICES"] = gpu_id
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_memory_fraction)
tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
data_name = data_dir.split('/')[-1]
if input_channels == 1:
save_weights_name = model_name + '-SISR_' + data_name
cur_data_loader = data_loader
train_images_path = data_dir + '/training_wf/'
validate_images_path = data_dir + '/validate_wf/'
else:
save_weights_name = model_name + '-SIM_' + data_name
cur_data_loader = data_loader_multi_channel
train_images_path = data_dir + '/training/'
validate_images_path = data_dir + '/validate/'
save_weights_path = save_weights_dir + '/' + save_weights_name + '/'
train_gt_path = data_dir + '/training_gt/'
validate_gt_path = data_dir + '/validate_gt/'
sample_path = save_weights_path + 'sampled_img/'
if not os.path.exists(save_weights_path):
os.mkdir(save_weights_path)
if not os.path.exists(sample_path):
os.mkdir(sample_path)
# --------------------------------------------------------------------------------
# select models and optimizer
# --------------------------------------------------------------------------------
modelFns = {'DFCAN': DFCAN16.DFCAN}
modelFN = modelFns[model_name]
optimizer_g = optimizers.adam(lr=start_lr, beta_1=0.9, beta_2=0.999)
# --------------------------------------------------------------------------------
# define combined model
# --------------------------------------------------------------------------------
g = modelFN((patch_height, patch_width, input_channels))
g.compile(loss=loss_mse_ssim, optimizer=optimizer_g)
lr_controller = ReduceLROnPlateau(model=g, factor=lr_decay_factor, patience=10, mode='min', min_delta=1e-4,
cooldown=0, min_lr=start_lr * 0.1, verbose=1)
# --------------------------------------------------------------------------------
# about Tensorboard
# --------------------------------------------------------------------------------
log_path = save_weights_path + 'graph'
if not os.path.exists(log_path):
os.mkdir(log_path)
callback = TensorBoard(log_path)
callback.set_model(g)
train_names = 'training_loss'
val_names = ['val_MSE', 'val_SSIM', 'val_PSNR', 'val_NRMSE']
def write_log(callback, names, logs, batch_no):
summary = tf.Summary()
summary_value = summary.value.add()
summary_value.simple_value = logs
summary_value.tag = names
callback.writer.add_summary(summary, batch_no)
callback.writer.flush()
# --------------------------------------------------------------------------------
# Sample and validate
# --------------------------------------------------------------------------------
def Validate(iter, sample=0):
validate_path = glob.glob(validate_images_path + '*')
validate_path.sort()
if sample == 1:
r, c = 3, 3
mses, nrmses, psnrs, ssims = [], [], [], []
img_show, gt_show, output_show = [], [], []
validate_path = np.random.choice(validate_path, size=r)
for path in validate_path:
[img, gt] = cur_data_loader([path], validate_images_path, validate_gt_path, patch_height,
patch_width, 1, norm_flag=norm_flag, scale=scale_factor)
output = np.squeeze(g.predict(img))
mses, nrmses, psnrs, ssims = img_comp(gt, output, mses, nrmses, psnrs, ssims)
img_show.append(np.squeeze(np.mean(img, 3)))
gt_show.append(np.squeeze(gt))
output_show.append(output)
# show some examples
fig, axs = plt.subplots(r, c)
cnt = 0
for row in range(r):
axs[row, 1].set_title('MSE=%.4f, SSIM=%.4f, PSNR=%.4f' % (mses[row], ssims[row], psnrs[row]))
for col, image in enumerate([img_show, output_show, gt_show]):
axs[row, col].imshow(np.squeeze(image[row]))
axs[row, col].axis('off')
cnt += 1
fig.savefig(sample_path + '%d.png' % iter)
plt.close()
else:
if validate_num < validate_path.__len__():
validate_path = validate_path[0:validate_num]
mses, nrmses, psnrs, ssims = [], [], [], []
for path in validate_path:
[img, gt] = cur_data_loader([path], validate_images_path, validate_gt_path, patch_height,
patch_width, 1, norm_flag=norm_flag, scale=scale_factor)
output = np.squeeze(g.predict(img))
mses, nrmses, psnrs, ssims = img_comp(gt, output, mses, nrmses, psnrs, ssims)
# if best, save weights.best
g.save_weights(save_weights_path + 'weights.latest')
if min(validate_nrmse) > np.mean(nrmses):
g.save_weights(save_weights_path + 'weights.best')
validate_nrmse.append(np.mean(nrmses))
curlr = lr_controller.on_epoch_end(iter, np.mean(nrmses))
write_log(callback, val_names[0], np.mean(mses), iter)
write_log(callback, val_names[1], np.mean(ssims), iter)
write_log(callback, val_names[2], np.mean(psnrs), iter)
write_log(callback, val_names[3], np.mean(nrmses), iter)
write_log(callback, 'lr', curlr, iter)
# --------------------------------------------------------------------------------
# if exist, load weights
# --------------------------------------------------------------------------------
if load_weights:
if os.path.exists(save_weights_path + 'weights.best'):
g.save_weights(save_weights_path + 'weights.best')
print('Loading weights successfully: ' + save_weights_path + 'weights.best')
elif os.path.exists(save_weights_path + 'weights.latest'):
g.save_weights(save_weights_path + 'weights.latest')
print('Loading weights successfully: ' + save_weights_path + 'weights.latest')
# --------------------------------------------------------------------------------
# training
# --------------------------------------------------------------------------------
start_time = datetime.datetime.now()
loss_record = []
validate_nrmse = [np.Inf]
lr_controller.on_train_begin()
images_path = glob.glob(train_images_path + '/*')
for it in range(iterations):
# ------------------------------------
# train generator
# ------------------------------------
input_g, gt_g = cur_data_loader(images_path, train_images_path, train_gt_path, patch_height, patch_width,
batch_size, norm_flag=norm_flag, scale=scale_factor)
loss_generator = g.train_on_batch(input_g, gt_g)
loss_record.append(loss_generator)
elapsed_time = datetime.datetime.now() - start_time
print("%d epoch: time: %s, g_loss = %s" % (it + 1, elapsed_time, loss_generator))
if (it + 1) % sample_interval == 0:
images_path = glob.glob(train_images_path + '/*')
Validate(it + 1, sample=1)
if (it + 1) % validate_interval == 0:
Validate(it + 1, sample=0)
write_log(callback, train_names, np.mean(loss_record), it + 1)
loss_record = []
| [((418, 443), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (441, 443), False, 'import argparse\n'), ((2580, 2646), 'tensorflow.GPUOptions', 'tf.GPUOptions', ([], {'per_process_gpu_memory_fraction': 'gpu_memory_fraction'}), '(per_process_gpu_memory_fraction=gpu_memory_fraction)\n', (2593, 2646), True, 'import tensorflow as tf\n'), ((3822, 3876), 'keras.optimizers.adam', 'optimizers.adam', ([], {'lr': 'start_lr', 'beta_1': '(0.9)', 'beta_2': '(0.999)'}), '(lr=start_lr, beta_1=0.9, beta_2=0.999)\n', (3837, 3876), False, 'from keras import optimizers\n'), ((4223, 4366), 'utils.lr_controller.ReduceLROnPlateau', 'ReduceLROnPlateau', ([], {'model': 'g', 'factor': 'lr_decay_factor', 'patience': '(10)', 'mode': '"""min"""', 'min_delta': '(0.0001)', 'cooldown': '(0)', 'min_lr': '(start_lr * 0.1)', 'verbose': '(1)'}), "(model=g, factor=lr_decay_factor, patience=10, mode='min',\n min_delta=0.0001, cooldown=0, min_lr=start_lr * 0.1, verbose=1)\n", (4240, 4366), False, 'from utils.lr_controller import ReduceLROnPlateau\n'), ((4720, 4741), 'keras.callbacks.TensorBoard', 'TensorBoard', (['log_path'], {}), '(log_path)\n', (4731, 4741), False, 'from keras.callbacks import TensorBoard\n'), ((8712, 8735), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (8733, 8735), False, 'import datetime\n'), ((8824, 8859), 'glob.glob', 'glob.glob', (["(train_images_path + '/*')"], {}), "(train_images_path + '/*')\n", (8833, 8859), False, 'import glob\n'), ((3389, 3422), 'os.path.exists', 'os.path.exists', (['save_weights_path'], {}), '(save_weights_path)\n', (3403, 3422), False, 'import os\n'), ((3428, 3455), 'os.mkdir', 'os.mkdir', (['save_weights_path'], {}), '(save_weights_path)\n', (3436, 3455), False, 'import os\n'), ((3463, 3490), 'os.path.exists', 'os.path.exists', (['sample_path'], {}), '(sample_path)\n', (3477, 3490), False, 'import os\n'), ((3496, 3517), 'os.mkdir', 'os.mkdir', (['sample_path'], {}), '(sample_path)\n', (3504, 3517), False, 'import os\n'), ((4660, 4684), 'os.path.exists', 'os.path.exists', (['log_path'], {}), '(log_path)\n', (4674, 4684), False, 'import os\n'), ((4690, 4708), 'os.mkdir', 'os.mkdir', (['log_path'], {}), '(log_path)\n', (4698, 4708), False, 'import os\n'), ((4919, 4931), 'tensorflow.Summary', 'tf.Summary', ([], {}), '()\n', (4929, 4931), True, 'import tensorflow as tf\n'), ((5387, 5424), 'glob.glob', 'glob.glob', (["(validate_images_path + '*')"], {}), "(validate_images_path + '*')\n", (5396, 5424), False, 'import glob\n'), ((8078, 8128), 'os.path.exists', 'os.path.exists', (["(save_weights_path + 'weights.best')"], {}), "(save_weights_path + 'weights.best')\n", (8092, 8128), False, 'import os\n'), ((2665, 2704), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {'gpu_options': 'gpu_options'}), '(gpu_options=gpu_options)\n', (2679, 2704), True, 'import tensorflow as tf\n'), ((5618, 5657), 'numpy.random.choice', 'np.random.choice', (['validate_path'], {'size': 'r'}), '(validate_path, size=r)\n', (5634, 5657), True, 'import numpy as np\n'), ((6221, 6239), 'matplotlib.pyplot.subplots', 'plt.subplots', (['r', 'c'], {}), '(r, c)\n', (6233, 6239), True, 'import matplotlib.pyplot as plt\n'), ((6649, 6660), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (6658, 6660), True, 'import matplotlib.pyplot as plt\n'), ((8283, 8335), 'os.path.exists', 'os.path.exists', (["(save_weights_path + 'weights.latest')"], {}), "(save_weights_path + 'weights.latest')\n", (8297, 8335), False, 'import os\n'), ((9316, 9339), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (9337, 9339), False, 'import datetime\n'), ((9502, 9537), 'glob.glob', 'glob.glob', (["(train_images_path + '/*')"], {}), "(train_images_path + '/*')\n", (9511, 9537), False, 'import glob\n'), ((5981, 6029), 'utils.utils.img_comp', 'img_comp', (['gt', 'output', 'mses', 'nrmses', 'psnrs', 'ssims'], {}), '(gt, output, mses, nrmses, psnrs, ssims)\n', (5989, 6029), False, 'from utils.utils import img_comp\n'), ((7155, 7203), 'utils.utils.img_comp', 'img_comp', (['gt', 'output', 'mses', 'nrmses', 'psnrs', 'ssims'], {}), '(gt, output, mses, nrmses, psnrs, ssims)\n', (7163, 7203), False, 'from utils.utils import img_comp\n'), ((7336, 7351), 'numpy.mean', 'np.mean', (['nrmses'], {}), '(nrmses)\n', (7343, 7351), True, 'import numpy as np\n'), ((7447, 7462), 'numpy.mean', 'np.mean', (['nrmses'], {}), '(nrmses)\n', (7454, 7462), True, 'import numpy as np\n'), ((7513, 7528), 'numpy.mean', 'np.mean', (['nrmses'], {}), '(nrmses)\n', (7520, 7528), True, 'import numpy as np\n'), ((7572, 7585), 'numpy.mean', 'np.mean', (['mses'], {}), '(mses)\n', (7579, 7585), True, 'import numpy as np\n'), ((7635, 7649), 'numpy.mean', 'np.mean', (['ssims'], {}), '(ssims)\n', (7642, 7649), True, 'import numpy as np\n'), ((7699, 7713), 'numpy.mean', 'np.mean', (['psnrs'], {}), '(psnrs)\n', (7706, 7713), True, 'import numpy as np\n'), ((7763, 7778), 'numpy.mean', 'np.mean', (['nrmses'], {}), '(nrmses)\n', (7770, 7778), True, 'import numpy as np\n'), ((9692, 9712), 'numpy.mean', 'np.mean', (['loss_record'], {}), '(loss_record)\n', (9699, 9712), True, 'import numpy as np\n'), ((6114, 6128), 'numpy.squeeze', 'np.squeeze', (['gt'], {}), '(gt)\n', (6124, 6128), True, 'import numpy as np\n'), ((6069, 6084), 'numpy.mean', 'np.mean', (['img', '(3)'], {}), '(img, 3)\n', (6076, 6084), True, 'import numpy as np\n'), ((6503, 6525), 'numpy.squeeze', 'np.squeeze', (['image[row]'], {}), '(image[row])\n', (6513, 6525), True, 'import numpy as np\n')] |
erlendve/catalyst | catalyst/exchange/live_graph_clock.py | 463575bc23c0abd1287f8ec81c4377baabf2b8b8 | import pandas as pd
from catalyst.constants import LOG_LEVEL
from catalyst.exchange.utils.stats_utils import prepare_stats
from catalyst.gens.sim_engine import (
BAR,
SESSION_START
)
from logbook import Logger
log = Logger('LiveGraphClock', level=LOG_LEVEL)
class LiveGraphClock(object):
"""Realtime clock for live trading.
This class is a drop-in replacement for
:class:`zipline.gens.sim_engine.MinuteSimulationClock`.
This mixes the clock with a live graph.
Notes
-----
This seemingly awkward approach allows us to run the program using a single
thread. This is important because Matplotlib does not play nice with
multi-threaded environments. Zipline probably does not either.
Matplotlib has a pause() method which is a wrapper around time.sleep()
used in the SimpleClock. The key difference is that users
can still interact with the chart during the pause cycles. This is
what enables us to keep a single thread. This is also why we are not using
the 'animate' callback of Matplotlib. We need to direct access to the
__iter__ method in order to yield events to Zipline.
The :param:`time_skew` parameter represents the time difference between
the exchange and the live trading machine's clock. It's not used currently.
"""
def __init__(self, sessions, context, callback=None,
time_skew=pd.Timedelta('0s')):
self.sessions = sessions
self.time_skew = time_skew
self._last_emit = None
self._before_trading_start_bar_yielded = True
self.context = context
self.callback = callback
def __iter__(self):
from matplotlib import pyplot as plt
yield pd.Timestamp.utcnow(), SESSION_START
while True:
current_time = pd.Timestamp.utcnow()
current_minute = current_time.floor('1T')
if self._last_emit is None or current_minute > self._last_emit:
log.debug('emitting minutely bar: {}'.format(current_minute))
self._last_emit = current_minute
yield current_minute, BAR
recorded_cols = list(self.context.recorded_vars.keys())
df, _ = prepare_stats(
self.context.frame_stats, recorded_cols=recorded_cols
)
self.callback(self.context, df)
else:
# I can't use the "animate" reactive approach here because
# I need to yield from the main loop.
# Workaround: https://stackoverflow.com/a/33050617/814633
plt.pause(1)
| [((225, 266), 'logbook.Logger', 'Logger', (['"""LiveGraphClock"""'], {'level': 'LOG_LEVEL'}), "('LiveGraphClock', level=LOG_LEVEL)\n", (231, 266), False, 'from logbook import Logger\n'), ((1400, 1418), 'pandas.Timedelta', 'pd.Timedelta', (['"""0s"""'], {}), "('0s')\n", (1412, 1418), True, 'import pandas as pd\n'), ((1808, 1829), 'pandas.Timestamp.utcnow', 'pd.Timestamp.utcnow', ([], {}), '()\n', (1827, 1829), True, 'import pandas as pd\n'), ((1723, 1744), 'pandas.Timestamp.utcnow', 'pd.Timestamp.utcnow', ([], {}), '()\n', (1742, 1744), True, 'import pandas as pd\n'), ((2228, 2296), 'catalyst.exchange.utils.stats_utils.prepare_stats', 'prepare_stats', (['self.context.frame_stats'], {'recorded_cols': 'recorded_cols'}), '(self.context.frame_stats, recorded_cols=recorded_cols)\n', (2241, 2296), False, 'from catalyst.exchange.utils.stats_utils import prepare_stats\n'), ((2622, 2634), 'matplotlib.pyplot.pause', 'plt.pause', (['(1)'], {}), '(1)\n', (2631, 2634), True, 'from matplotlib import pyplot as plt\n')] |
LeoLeiva/todo-challenge | invera/api/tests.py | f6f24f53758eb4e425c91516bcab7af8cad66814 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import inspect
from task.models import InveraTask
from api.utils import send_test_csv_report
from django.contrib.auth.models import User
from rest_framework.test import APIClient, APITestCase
from rest_framework.reverse import reverse
from rest_framework import status
TEST_RESULTS = []
RECIPIENTS = ['email@destino.com']
class TaskListTestCase(APITestCase):
def setUp(self) -> None:
self.user = User.objects.create_user(
username='test_user', password='adminpass')
self.other_user = User.objects.create_user(
username='other_user', password='adminpass')
self.task = InveraTask.objects.create(
userTask=self.user, title='My Initial Task')
self.client = APIClient()
@classmethod
def tearDownClass(cls):
User.objects.filter(username__in=['test_user', 'other_user']).delete()
def test_create_task_with_un_authenticate_user(self):
"""
En este caso de prueba, estamos probando la API Task Create utilizando un usuario no autenticado.
"""
response = self.client.post(
reverse('api-task'), {'title': 'My Task 1'}, format='json')
is_passed = response.status_code == status.HTTP_403_FORBIDDEN
TEST_RESULTS.append({
"result": "Passed" if is_passed else "Failed",
"test_name": inspect.currentframe().f_code.co_name,
"test_description": "El usuario no autenticado no puede agregar una tarea a la lista"
})
if is_passed:
print("Resultado: Aprobado")
else:
print("Resultado: Fallido")
print("Nombre del test: " + inspect.currentframe().f_code.co_name)
print("Descripcion: El usuario no autenticado no puede agregar una tarea a la lista")
print("-----------")
def test_put_task_with_un_authenticate_user(self):
"""
En este caso de prueba, estamos probando la API Task PUT utilizando un usuario no autenticado.
"""
response = self.client.put(
reverse('api-task'), {'title': 'My Task'}, format='json')
is_passed = response.status_code == status.HTTP_403_FORBIDDEN
TEST_RESULTS.append({
"result": "Passed" if is_passed else "Failed",
"test_name": inspect.currentframe().f_code.co_name,
"test_description": "El usuario no autenticado no puede modificar una tarea"
})
if is_passed:
print("Resultado: Aprobado")
else:
print("Resultado: Fallido")
print("Nombre del test: " + inspect.currentframe().f_code.co_name)
print("Descripcion: El usuario no autenticado no puede modificar una tarea")
print("-----------")
def test_put_task_with_authenticated_user(self):
self.client.login(username='test_user', password='adminpass')
response = self.client.put(reverse('api-task-detail', args=[str(self.task.idTask)]), {'title': 'My Task 2'}, format='json')
is_passed = response.status_code == status.HTTP_200_OK
TEST_RESULTS.append({
"result": "Passed" if is_passed else "Failed",
"test_name": inspect.currentframe().f_code.co_name,
"test_description": "Usuario autenticado puede modificar una tarea suya"
})
if is_passed:
print("Resultado: Aprobado")
else:
print("Resultado: Fallido")
print("Nombre del test: " + inspect.currentframe().f_code.co_name)
print("Descripcion: Usuario autenticado puede modificar una tarea suya")
print("-----------")
def test_get_other_user_task_detail(self):
"""
En este caso de prueba, estamos probando la API Task GET y tratando de obtener detalles de la tarea de un usuario que usa credenciales de usuario diferentes.
"""
self.client.login(username='other_user', password='adminpass')
response = self.client.get(reverse('api-task-detail', args=[str(self.task.idTask)]))
is_passed = response.status_code == status.HTTP_404_NOT_FOUND
# is_passed = response.status_code == status.HTTP_403_FORBIDDEN
TEST_RESULTS.append({
"result": "Passed" if is_passed else "Failed",
"test_name": inspect.currentframe().f_code.co_name,
"test_description": "Solo el propietario puede ver el detalle de la tarea"
})
if is_passed:
print("Resultado: Aprobado")
else:
print("Resultado: Fallido")
print("Nombre del test: " + inspect.currentframe().f_code.co_name)
print("Descripcion: Solo el propietario puede ver el detalle de la tarea")
print("-----------")
def test_create_task_with_authenticated_user(self):
self.client.login(username='test_user', password='adminpass')
response = self.client.post(reverse('api-task'), {'title': 'My Task'}, format='json')
is_passed = response.status_code == status.HTTP_201_CREATED
TEST_RESULTS.append({
"result": "Passed" if is_passed else "Failed",
"test_name": inspect.currentframe().f_code.co_name,
"test_description": "Usuario autenticado agrega tarea a la lista"
})
if is_passed:
print("Resultado: Aprobado")
else:
print("Resultado: Fallido")
print("Nombre del test: " + inspect.currentframe().f_code.co_name)
print("Descripcion: Usuario autenticado agrega tarea a la lista")
print("-----------")
def test_get_task_detail(self):
self.client.login(username='test_user', password='adminpass')
response = self.client.get(reverse('api-task-detail', args=[str(self.task.idTask)]))
is_passed = response.status_code == status.HTTP_200_OK
TEST_RESULTS.append({
"result": "Passed" if is_passed else "Failed",
"test_name": inspect.currentframe().f_code.co_name,
"test_description": "Usuario autenticado puede ver detalles de la tarea correctamente"
})
if is_passed:
print("Resultado: Aprobado")
else:
print("Resultado: Fallido")
print("Nombre del test: " + inspect.currentframe().f_code.co_name)
print("Descripcion: Usuario autenticado puede ver detalles de la tarea correctamente")
print("-----------")
class CSVReportTest(APITestCase):
def test_send_csv(self):
send_test_csv_report(
test_results=TEST_RESULTS,
recipients=RECIPIENTS
)
| [((478, 546), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""test_user"""', 'password': '"""adminpass"""'}), "(username='test_user', password='adminpass')\n", (502, 546), False, 'from django.contrib.auth.models import User\n'), ((586, 655), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""other_user"""', 'password': '"""adminpass"""'}), "(username='other_user', password='adminpass')\n", (610, 655), False, 'from django.contrib.auth.models import User\n'), ((689, 759), 'task.models.InveraTask.objects.create', 'InveraTask.objects.create', ([], {'userTask': 'self.user', 'title': '"""My Initial Task"""'}), "(userTask=self.user, title='My Initial Task')\n", (714, 759), False, 'from task.models import InveraTask\n'), ((796, 807), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (805, 807), False, 'from rest_framework.test import APIClient, APITestCase\n'), ((6544, 6614), 'api.utils.send_test_csv_report', 'send_test_csv_report', ([], {'test_results': 'TEST_RESULTS', 'recipients': 'RECIPIENTS'}), '(test_results=TEST_RESULTS, recipients=RECIPIENTS)\n', (6564, 6614), False, 'from api.utils import send_test_csv_report\n'), ((1172, 1191), 'rest_framework.reverse.reverse', 'reverse', (['"""api-task"""'], {}), "('api-task')\n", (1179, 1191), False, 'from rest_framework.reverse import reverse\n'), ((2114, 2133), 'rest_framework.reverse.reverse', 'reverse', (['"""api-task"""'], {}), "('api-task')\n", (2121, 2133), False, 'from rest_framework.reverse import reverse\n'), ((4962, 4981), 'rest_framework.reverse.reverse', 'reverse', (['"""api-task"""'], {}), "('api-task')\n", (4969, 4981), False, 'from rest_framework.reverse import reverse\n'), ((862, 923), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'username__in': "['test_user', 'other_user']"}), "(username__in=['test_user', 'other_user'])\n", (881, 923), False, 'from django.contrib.auth.models import User\n'), ((1418, 1440), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (1438, 1440), False, 'import inspect\n'), ((1720, 1742), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (1740, 1742), False, 'import inspect\n'), ((2358, 2380), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (2378, 2380), False, 'import inspect\n'), ((2651, 2673), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (2671, 2673), False, 'import inspect\n'), ((3239, 3261), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (3259, 3261), False, 'import inspect\n'), ((3527, 3549), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (3547, 3549), False, 'import inspect\n'), ((4341, 4363), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (4361, 4363), False, 'import inspect\n'), ((4631, 4653), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (4651, 4653), False, 'import inspect\n'), ((5203, 5225), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (5223, 5225), False, 'import inspect\n'), ((5484, 5506), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (5504, 5506), False, 'import inspect\n'), ((6006, 6028), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (6026, 6028), False, 'import inspect\n'), ((6308, 6330), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (6328, 6330), False, 'import inspect\n')] |
LoopTilingBenchmark/benchmark | experiments/seidel-2d/tmp_files/6745.py | 52a3d2e70216552a498fd91de02a2fa9cb62122c | from chill import *
source('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/polybench/polybench-code/stencils/seidel-2d/kernel.c')
destination('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/experiments/seidel-2d/tmp_files/6745.c')
procedure('kernel_seidel_2d')
loop(0)
known(' n > 2 ')
tile(0,2,16,2)
tile(0,4,16,4)
| [] |
dmrz/baymax | baymax/api.py | 60cca5ae2e7cb42e093747f91b809e34e6782fcd | import json
import aiohttp
async def request(url, payload=None, params=None, headers=None):
headers = {'content-type': 'application/json', **(headers or {})}
data = payload and json.dumps(payload)
async with aiohttp.ClientSession() as client:
async with client.post(
url, data=data, params=params, headers=headers) as resp:
# TODO: Check response status
json_response = await resp.json()
return json_response
async def get_updates(base_url, timeout, offset):
params = {
'timeout': timeout,
'offset': offset
}
return await request(f'{base_url}/getUpdates', params=params)
async def send_message(base_url, chat_id, text, reply_markup=None):
payload = {
'chat_id': chat_id,
'text': text
}
if reply_markup is not None:
payload['reply_markup'] = reply_markup
return await request(f'{base_url}/sendMessage', payload)
async def answer_callback_query(
base_url, callback_query_id, text, show_alert,
url=None, cache_time=None):
payload = {
'callback_query_id': callback_query_id,
'text': text,
'show_alert': show_alert
}
if url is not None:
payload['url'] = url
if cache_time is not None:
payload['cache_time'] = cache_time
return await request(f'{base_url}/answerCallbackQuery', payload)
| [((188, 207), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (198, 207), False, 'import json\n'), ((223, 246), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (244, 246), False, 'import aiohttp\n')] |
pszulczewski/nautobot-plugin-device-onboarding | nautobot_device_onboarding/tests/test_netdev_keeper.py | 9ddec52d7bcc751c4616bd7c1180ed2a1d31ff2c | """Unit tests for nautobot_device_onboarding.netdev_keeper module and its classes.
(c) 2020-2021 Network To Code
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from socket import gaierror
from unittest import mock
from django.test import TestCase
from nautobot.dcim.models import Site, DeviceRole, Platform
from nautobot_device_onboarding.exceptions import OnboardException
from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip
from nautobot_device_onboarding.models import OnboardingTask
class NetdevKeeperTestCase(TestCase):
"""Test the NetdevKeeper Class."""
def setUp(self):
"""Create a superuser and token for API calls."""
self.site1 = Site.objects.create(name="USWEST", slug="uswest")
self.device_role1 = DeviceRole.objects.create(name="Firewall", slug="firewall")
self.platform1 = Platform.objects.create(name="JunOS", slug="junos", napalm_driver="junos")
# self.platform2 = Platform.objects.create(name="Cisco NX-OS", slug="cisco-nx-os")
self.onboarding_task4 = OnboardingTask.objects.create(
ip_address="ntc123.local", site=self.site1, role=self.device_role1, platform=self.platform1
)
self.onboarding_task5 = OnboardingTask.objects.create(
ip_address="bad.local", site=self.site1, role=self.device_role1, platform=self.platform1
)
self.onboarding_task7 = OnboardingTask.objects.create(
ip_address="192.0.2.1/32", site=self.site1, role=self.device_role1, platform=self.platform1
)
@mock.patch("nautobot_device_onboarding.helpers.socket.gethostbyname")
def test_check_ip(self, mock_get_hostbyname):
"""Check DNS to IP address."""
# Look up response value
mock_get_hostbyname.return_value = "192.0.2.1"
# FQDN -> IP
onboarding_task_fqdn_to_ip(ot=self.onboarding_task4)
# Run the check to change the IP address
self.assertEqual(self.onboarding_task4.ip_address, "192.0.2.1")
@mock.patch("nautobot_device_onboarding.helpers.socket.gethostbyname")
def test_failed_check_ip(self, mock_get_hostbyname):
"""Check DNS to IP address failing."""
# Look up a failed response
mock_get_hostbyname.side_effect = gaierror(8)
# Check for bad.local raising an exception
with self.assertRaises(OnboardException) as exc_info:
onboarding_task_fqdn_to_ip(ot=self.onboarding_task5)
self.assertEqual(exc_info.exception.message, "ERROR failed to complete DNS lookup: bad.local")
self.assertEqual(exc_info.exception.reason, "fail-dns")
# Check for exception with prefix address entered
with self.assertRaises(OnboardException) as exc_info:
onboarding_task_fqdn_to_ip(ot=self.onboarding_task7)
self.assertEqual(exc_info.exception.reason, "fail-prefix")
self.assertEqual(exc_info.exception.message, "ERROR appears a prefix was entered: 192.0.2.1/32")
| [((2037, 2106), 'unittest.mock.patch', 'mock.patch', (['"""nautobot_device_onboarding.helpers.socket.gethostbyname"""'], {}), "('nautobot_device_onboarding.helpers.socket.gethostbyname')\n", (2047, 2106), False, 'from unittest import mock\n'), ((2495, 2564), 'unittest.mock.patch', 'mock.patch', (['"""nautobot_device_onboarding.helpers.socket.gethostbyname"""'], {}), "('nautobot_device_onboarding.helpers.socket.gethostbyname')\n", (2505, 2564), False, 'from unittest import mock\n'), ((1170, 1219), 'nautobot.dcim.models.Site.objects.create', 'Site.objects.create', ([], {'name': '"""USWEST"""', 'slug': '"""uswest"""'}), "(name='USWEST', slug='uswest')\n", (1189, 1219), False, 'from nautobot.dcim.models import Site, DeviceRole, Platform\n'), ((1248, 1307), 'nautobot.dcim.models.DeviceRole.objects.create', 'DeviceRole.objects.create', ([], {'name': '"""Firewall"""', 'slug': '"""firewall"""'}), "(name='Firewall', slug='firewall')\n", (1273, 1307), False, 'from nautobot.dcim.models import Site, DeviceRole, Platform\n'), ((1334, 1408), 'nautobot.dcim.models.Platform.objects.create', 'Platform.objects.create', ([], {'name': '"""JunOS"""', 'slug': '"""junos"""', 'napalm_driver': '"""junos"""'}), "(name='JunOS', slug='junos', napalm_driver='junos')\n", (1357, 1408), False, 'from nautobot.dcim.models import Site, DeviceRole, Platform\n'), ((1533, 1659), 'nautobot_device_onboarding.models.OnboardingTask.objects.create', 'OnboardingTask.objects.create', ([], {'ip_address': '"""ntc123.local"""', 'site': 'self.site1', 'role': 'self.device_role1', 'platform': 'self.platform1'}), "(ip_address='ntc123.local', site=self.site1,\n role=self.device_role1, platform=self.platform1)\n", (1562, 1659), False, 'from nautobot_device_onboarding.models import OnboardingTask\n'), ((1711, 1835), 'nautobot_device_onboarding.models.OnboardingTask.objects.create', 'OnboardingTask.objects.create', ([], {'ip_address': '"""bad.local"""', 'site': 'self.site1', 'role': 'self.device_role1', 'platform': 'self.platform1'}), "(ip_address='bad.local', site=self.site1, role\n =self.device_role1, platform=self.platform1)\n", (1740, 1835), False, 'from nautobot_device_onboarding.models import OnboardingTask\n'), ((1886, 2012), 'nautobot_device_onboarding.models.OnboardingTask.objects.create', 'OnboardingTask.objects.create', ([], {'ip_address': '"""192.0.2.1/32"""', 'site': 'self.site1', 'role': 'self.device_role1', 'platform': 'self.platform1'}), "(ip_address='192.0.2.1/32', site=self.site1,\n role=self.device_role1, platform=self.platform1)\n", (1915, 2012), False, 'from nautobot_device_onboarding.models import OnboardingTask\n'), ((2314, 2366), 'nautobot_device_onboarding.helpers.onboarding_task_fqdn_to_ip', 'onboarding_task_fqdn_to_ip', ([], {'ot': 'self.onboarding_task4'}), '(ot=self.onboarding_task4)\n', (2340, 2366), False, 'from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip\n'), ((2747, 2758), 'socket.gaierror', 'gaierror', (['(8)'], {}), '(8)\n', (2755, 2758), False, 'from socket import gaierror\n'), ((2885, 2937), 'nautobot_device_onboarding.helpers.onboarding_task_fqdn_to_ip', 'onboarding_task_fqdn_to_ip', ([], {'ot': 'self.onboarding_task5'}), '(ot=self.onboarding_task5)\n', (2911, 2937), False, 'from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip\n'), ((3246, 3298), 'nautobot_device_onboarding.helpers.onboarding_task_fqdn_to_ip', 'onboarding_task_fqdn_to_ip', ([], {'ot': 'self.onboarding_task7'}), '(ot=self.onboarding_task7)\n', (3272, 3298), False, 'from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip\n')] |
Phaeton-lang/baselines | superneurons/tools/img_val/main.py | 472c248047fbb55b5fa0e620758047b7f0a1d041 | # Created by ay27 at 17/4/9
import os
import matplotlib.pyplot as plt
import struct
import numpy as np
def trans(row):
return list(map(lambda x: np.uint8(x), row))
def read_image(filename):
with open(filename, mode='rb') as file:
n = file.read(8)
n = struct.unpack("<Q", n)[0]
c = file.read(8)
c = struct.unpack("<Q", c)[0]
h = file.read(8)
h = struct.unpack("<Q", h)[0]
w = file.read(8)
w = struct.unpack("<Q", w)[0]
print(n, c, h, w)
for ii in range(n):
r = trans(file.read(h*w))
g = trans(file.read(h*w))
b = trans(file.read(h*w))
if ii == 100:
break
print(file.tell() == os.fstat(file.fileno()).st_size)
img = np.array([r,g,b]).transpose(1,0).reshape(h,w,c)
print(img.shape)
plt.imshow(img)
plt.show()
def read_label(path, ground_truth=None):
with open(path, 'rb') as file:
n = file.read(8)
n = struct.unpack("<Q", n)[0]
c = file.read(8)
c = struct.unpack("<Q", c)[0]
h = file.read(8)
h = struct.unpack("<Q", h)[0]
w = file.read(8)
w = struct.unpack("<Q", w)[0]
print(n, c, h, w)
label = []
sets = set()
while not (file.tell() == os.fstat(file.fileno()).st_size):
ch = file.read(4)
num = struct.unpack("<l", ch)[0]
label.append(num)
sets.add(num)
# print(file.tell() == os.fstat(file.fileno()).st_size)
print(label)
print(len(label))
# print(label[900],label[901], label[902], label[903], label[904])
return label
# if ground_truth:
# g = []
# with open(ground_truth) as file:
# for line in file:
# g.append(int(line.split(' ')[1]))
# np.testing.assert_array_equal(g, label)
if __name__ == '__main__':
# read_image('../../data/ilsvrc2012/img.bin')
# read_label('../../data/ilsvrc2012/label.bin', '../../data/ilsvrc2012/val.txt')
# read_image('../../build/cifar100_train_image.bin')
# read_label('../../build/cifar100_train_label.bin')
read_image('../../build/val_data_8.bin')
for i in range(10):
read_label('../../build/val_label_%d.bin' % i)
# labels = []
# for i in range(10):
# labels.append(read_label('../../build/val_label_%d.bin' % i))
#
# ground = []
# with open('../../build/shuffled_list') as file:
# ground.append() | [((872, 887), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (882, 887), True, 'import matplotlib.pyplot as plt\n'), ((896, 906), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (904, 906), True, 'import matplotlib.pyplot as plt\n'), ((280, 302), 'struct.unpack', 'struct.unpack', (['"""<Q"""', 'n'], {}), "('<Q', n)\n", (293, 302), False, 'import struct\n'), ((343, 365), 'struct.unpack', 'struct.unpack', (['"""<Q"""', 'c'], {}), "('<Q', c)\n", (356, 365), False, 'import struct\n'), ((406, 428), 'struct.unpack', 'struct.unpack', (['"""<Q"""', 'h'], {}), "('<Q', h)\n", (419, 428), False, 'import struct\n'), ((469, 491), 'struct.unpack', 'struct.unpack', (['"""<Q"""', 'w'], {}), "('<Q', w)\n", (482, 491), False, 'import struct\n'), ((1022, 1044), 'struct.unpack', 'struct.unpack', (['"""<Q"""', 'n'], {}), "('<Q', n)\n", (1035, 1044), False, 'import struct\n'), ((1085, 1107), 'struct.unpack', 'struct.unpack', (['"""<Q"""', 'c'], {}), "('<Q', c)\n", (1098, 1107), False, 'import struct\n'), ((1148, 1170), 'struct.unpack', 'struct.unpack', (['"""<Q"""', 'h'], {}), "('<Q', h)\n", (1161, 1170), False, 'import struct\n'), ((1211, 1233), 'struct.unpack', 'struct.unpack', (['"""<Q"""', 'w'], {}), "('<Q', w)\n", (1224, 1233), False, 'import struct\n'), ((152, 163), 'numpy.uint8', 'np.uint8', (['x'], {}), '(x)\n', (160, 163), True, 'import numpy as np\n'), ((1421, 1444), 'struct.unpack', 'struct.unpack', (['"""<l"""', 'ch'], {}), "('<l', ch)\n", (1434, 1444), False, 'import struct\n'), ((791, 810), 'numpy.array', 'np.array', (['[r, g, b]'], {}), '([r, g, b])\n', (799, 810), True, 'import numpy as np\n')] |
exenGT/pymatgen | pymatgen/analysis/tests/test_piezo.py | a8ffb820ab8fc3f60251099e38c8888f45eae618 | # Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
Test for the piezo tensor class
"""
__author__ = "Shyam Dwaraknath"
__version__ = "0.1"
__maintainer__ = "Shyam Dwaraknath"
__email__ = "shyamd@lbl.gov"
__status__ = "Development"
__date__ = "4/1/16"
import os
import unittest
import numpy as np
from pymatgen.analysis.piezo import PiezoTensor
from pymatgen.util.testing import PymatgenTest
class PiezoTest(PymatgenTest):
def setUp(self):
self.piezo_struc = self.get_structure("BaNiO3")
self.voigt_matrix = np.array(
[
[0.0, 0.0, 0.0, 0.0, 0.03839, 0.0],
[0.0, 0.0, 0.0, 0.03839, 0.0, 0.0],
[6.89822, 6.89822, 27.46280, 0.0, 0.0, 0.0],
]
)
self.vasp_matrix = np.array(
[
[0.0, 0.0, 0.0, 0.0, 0.0, 0.03839],
[0.0, 0.0, 0.0, 0.0, 0.03839, 0.0, 0.0],
[6.89822, 6.89822, 27.46280, 0.0, 0.0, 0.0],
]
)
self.full_tensor_array = [
[[0.0, 0.0, 0.03839], [0.0, 0.0, 0.0], [0.03839, 0.0, 0.0]],
[[0.0, 0.0, 0.0], [0.0, 0.0, 0.03839], [0.0, 0.03839, 0.0]],
[[6.89822, 0.0, 0.0], [0.0, 6.89822, 0.0], [0.0, 0.0, 27.4628]],
]
def test_new(self):
pt = PiezoTensor(self.full_tensor_array)
self.assertArrayAlmostEqual(pt, self.full_tensor_array)
bad_dim_array = np.zeros((3, 3))
self.assertRaises(ValueError, PiezoTensor, bad_dim_array)
def test_from_voigt(self):
bad_voigt = np.zeros((3, 7))
pt = PiezoTensor.from_voigt(self.voigt_matrix)
self.assertArrayEqual(pt, self.full_tensor_array)
self.assertRaises(ValueError, PiezoTensor.from_voigt, bad_voigt)
self.assertArrayEqual(self.voigt_matrix, pt.voigt)
def test_from_vasp_voigt(self):
bad_voigt = np.zeros((3, 7))
pt = PiezoTensor.from_vasp_voigt(self.vasp_matrix)
self.assertArrayEqual(pt, self.full_tensor_array)
self.assertRaises(ValueError, PiezoTensor.from_voigt, bad_voigt)
self.assertArrayEqual(self.voigt_matrix, pt.voigt)
if __name__ == "__main__":
unittest.main()
| [((2222, 2237), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2235, 2237), False, 'import unittest\n'), ((581, 711), 'numpy.array', 'np.array', (['[[0.0, 0.0, 0.0, 0.0, 0.03839, 0.0], [0.0, 0.0, 0.0, 0.03839, 0.0, 0.0], [\n 6.89822, 6.89822, 27.4628, 0.0, 0.0, 0.0]]'], {}), '([[0.0, 0.0, 0.0, 0.0, 0.03839, 0.0], [0.0, 0.0, 0.0, 0.03839, 0.0,\n 0.0], [6.89822, 6.89822, 27.4628, 0.0, 0.0, 0.0]])\n', (589, 711), True, 'import numpy as np\n'), ((821, 956), 'numpy.array', 'np.array', (['[[0.0, 0.0, 0.0, 0.0, 0.0, 0.03839], [0.0, 0.0, 0.0, 0.0, 0.03839, 0.0, 0.0\n ], [6.89822, 6.89822, 27.4628, 0.0, 0.0, 0.0]]'], {}), '([[0.0, 0.0, 0.0, 0.0, 0.0, 0.03839], [0.0, 0.0, 0.0, 0.0, 0.03839,\n 0.0, 0.0], [6.89822, 6.89822, 27.4628, 0.0, 0.0, 0.0]])\n', (829, 956), True, 'import numpy as np\n'), ((1345, 1380), 'pymatgen.analysis.piezo.PiezoTensor', 'PiezoTensor', (['self.full_tensor_array'], {}), '(self.full_tensor_array)\n', (1356, 1380), False, 'from pymatgen.analysis.piezo import PiezoTensor\n'), ((1469, 1485), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {}), '((3, 3))\n', (1477, 1485), True, 'import numpy as np\n'), ((1604, 1620), 'numpy.zeros', 'np.zeros', (['(3, 7)'], {}), '((3, 7))\n', (1612, 1620), True, 'import numpy as np\n'), ((1634, 1675), 'pymatgen.analysis.piezo.PiezoTensor.from_voigt', 'PiezoTensor.from_voigt', (['self.voigt_matrix'], {}), '(self.voigt_matrix)\n', (1656, 1675), False, 'from pymatgen.analysis.piezo import PiezoTensor\n'), ((1923, 1939), 'numpy.zeros', 'np.zeros', (['(3, 7)'], {}), '((3, 7))\n', (1931, 1939), True, 'import numpy as np\n'), ((1953, 1998), 'pymatgen.analysis.piezo.PiezoTensor.from_vasp_voigt', 'PiezoTensor.from_vasp_voigt', (['self.vasp_matrix'], {}), '(self.vasp_matrix)\n', (1980, 1998), False, 'from pymatgen.analysis.piezo import PiezoTensor\n')] |
larsbutler/nova | nova/virt/driver.py | fb190f30a911658d8b0c4deaf43cbb8c9e35b672 | # Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Driver base-classes:
(Beginning of) the contract that compute drivers must follow, and shared
types that support that contract
"""
import sys
from oslo_log import log as logging
from oslo_utils import importutils
import nova.conf
from nova.i18n import _, _LE, _LI
from nova import utils
from nova.virt import event as virtevent
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
def driver_dict_from_config(named_driver_config, *args, **kwargs):
driver_registry = dict()
for driver_str in named_driver_config:
driver_type, _sep, driver = driver_str.partition('=')
driver_class = importutils.import_class(driver)
driver_registry[driver_type] = driver_class(*args, **kwargs)
return driver_registry
def get_block_device_info(instance, block_device_mapping):
"""Converts block device mappings for an instance to driver format.
Virt drivers expect block device mapping to be presented in the format
of a dict containing the following keys:
- root_device_name: device name of the root disk
- ephemerals: a (potentially empty) list of DriverEphemeralBlockDevice
instances
- swap: An instance of DriverSwapBlockDevice or None
- block_device_mapping: a (potentially empty) list of
DriverVolumeBlockDevice or any of it's more
specialized subclasses.
"""
from nova.virt import block_device as virt_block_device
block_device_info = {
'root_device_name': instance.root_device_name,
'ephemerals': virt_block_device.convert_ephemerals(
block_device_mapping),
'block_device_mapping':
virt_block_device.convert_all_volumes(*block_device_mapping)
}
swap_list = virt_block_device.convert_swap(block_device_mapping)
block_device_info['swap'] = virt_block_device.get_swap(swap_list)
return block_device_info
def block_device_info_get_root(block_device_info):
block_device_info = block_device_info or {}
return block_device_info.get('root_device_name')
def block_device_info_get_swap(block_device_info):
block_device_info = block_device_info or {}
return block_device_info.get('swap') or {'device_name': None,
'swap_size': 0}
def swap_is_usable(swap):
return swap and swap['device_name'] and swap['swap_size'] > 0
def block_device_info_get_ephemerals(block_device_info):
block_device_info = block_device_info or {}
ephemerals = block_device_info.get('ephemerals') or []
return ephemerals
def block_device_info_get_mapping(block_device_info):
block_device_info = block_device_info or {}
block_device_mapping = block_device_info.get('block_device_mapping') or []
return block_device_mapping
class ComputeDriver(object):
"""Base class for compute drivers.
The interface to this class talks in terms of 'instances' (Amazon EC2 and
internal Nova terminology), by which we mean 'running virtual machine'
(XenAPI terminology) or domain (Xen or libvirt terminology).
An instance has an ID, which is the identifier chosen by Nova to represent
the instance further up the stack. This is unfortunately also called a
'name' elsewhere. As far as this layer is concerned, 'instance ID' and
'instance name' are synonyms.
Note that the instance ID or name is not human-readable or
customer-controlled -- it's an internal ID chosen by Nova. At the
nova.virt layer, instances do not have human-readable names at all -- such
things are only known higher up the stack.
Most virtualization platforms will also have their own identity schemes,
to uniquely identify a VM or domain. These IDs must stay internal to the
platform-specific layer, and never escape the connection interface. The
platform-specific layer is responsible for keeping track of which instance
ID maps to which platform-specific ID, and vice versa.
Some methods here take an instance of nova.compute.service.Instance. This
is the data structure used by nova.compute to store details regarding an
instance, and pass them into this layer. This layer is responsible for
translating that generic data structure into terms that are specific to the
virtualization platform.
"""
capabilities = {
"has_imagecache": False,
"supports_recreate": False,
"supports_migrate_to_same_host": False,
"supports_attach_interface": False,
"supports_device_tagging": False,
}
def __init__(self, virtapi):
self.virtapi = virtapi
self._compute_event_callback = None
def init_host(self, host):
"""Initialize anything that is necessary for the driver to function,
including catching up with currently running VM's on the given host.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def cleanup_host(self, host):
"""Clean up anything that is necessary for the driver gracefully stop,
including ending remote sessions. This is optional.
"""
pass
def get_info(self, instance):
"""Get the current status of an instance, by name (not ID!)
:param instance: nova.objects.instance.Instance object
Returns a InstanceInfo object
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_num_instances(self):
"""Return the total number of virtual machines.
Return the number of virtual machines that the hypervisor knows
about.
.. note::
This implementation works for all drivers, but it is
not particularly efficient. Maintainers of the virt drivers are
encouraged to override this method with something more
efficient.
"""
return len(self.list_instances())
def instance_exists(self, instance):
"""Checks existence of an instance on the host.
:param instance: The instance to lookup
Returns True if an instance with the supplied ID exists on
the host, False otherwise.
.. note::
This implementation works for all drivers, but it is
not particularly efficient. Maintainers of the virt drivers are
encouraged to override this method with something more
efficient.
"""
try:
return instance.uuid in self.list_instance_uuids()
except NotImplementedError:
return instance.name in self.list_instances()
def estimate_instance_overhead(self, instance_info):
"""Estimate the virtualization overhead required to build an instance
of the given flavor.
Defaults to zero, drivers should override if per-instance overhead
calculations are desired.
:param instance_info: Instance/flavor to calculate overhead for.
:returns: Dict of estimated overhead values.
"""
return {'memory_mb': 0,
'disk_gb': 0}
def list_instances(self):
"""Return the names of all the instances known to the virtualization
layer, as a list.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def list_instance_uuids(self):
"""Return the UUIDS of all the instances known to the virtualization
layer, as a list.
"""
raise NotImplementedError()
def rebuild(self, context, instance, image_meta, injected_files,
admin_password, bdms, detach_block_devices,
attach_block_devices, network_info=None,
recreate=False, block_device_info=None,
preserve_ephemeral=False):
"""Destroy and re-make this instance.
A 'rebuild' effectively purges all existing data from the system and
remakes the VM with given 'metadata' and 'personalities'.
This base class method shuts down the VM, detaches all block devices,
then spins up the new VM afterwards. It may be overridden by
hypervisors that need to - e.g. for optimisations, or when the 'VM'
is actually proxied and needs to be held across the shutdown + spin
up steps.
:param context: security context
:param instance: nova.objects.instance.Instance
This function should use the data there to guide
the creation of the new instance.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in instance.
:param bdms: block-device-mappings to use for rebuild
:param detach_block_devices: function to detach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage.
:param attach_block_devices: function to attach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage.
:param network_info: instance network information
:param recreate: True if the instance is being recreated on a new
hypervisor - all the cleanup of old state is skipped.
:param block_device_info: Information about block devices to be
attached to the instance.
:param preserve_ephemeral: True if the default ephemeral storage
partition must be preserved on rebuild
"""
raise NotImplementedError()
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Create a new instance/VM/domain on the virtualization platform.
Once this successfully completes, the instance should be
running (power_state.RUNNING).
If this fails, any partial instance should be completely
cleaned up, and the virtualization platform should be in the state
that it was before this call began.
:param context: security context
:param instance: nova.objects.instance.Instance
This function should use the data there to guide
the creation of the new instance.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in instance.
:param network_info: instance network information
:param block_device_info: Information about block devices to be
attached to the instance.
"""
raise NotImplementedError()
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None):
"""Destroy the specified instance from the Hypervisor.
If the instance is not found (for example if networking failed), this
function should still succeed. It's probably a good idea to log a
warning in that case.
:param context: security context
:param instance: Instance object as returned by DB layer.
:param network_info: instance network information
:param block_device_info: Information about block devices that should
be detached from the instance.
:param destroy_disks: Indicates if disks should be destroyed
:param migrate_data: implementation specific params
"""
raise NotImplementedError()
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
"""Cleanup the instance resources .
Instance should have been destroyed from the Hypervisor before calling
this method.
:param context: security context
:param instance: Instance object as returned by DB layer.
:param network_info: instance network information
:param block_device_info: Information about block devices that should
be detached from the instance.
:param destroy_disks: Indicates if disks should be destroyed
:param migrate_data: implementation specific params
"""
raise NotImplementedError()
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot the specified instance.
After this is called successfully, the instance's state
goes back to power_state.RUNNING. The virtualization
platform should ensure that the reboot action has completed
successfully even in cases in which the underlying domain/vm
is paused or halted/stopped.
:param instance: nova.objects.instance.Instance
:param network_info: instance network information
:param reboot_type: Either a HARD or SOFT reboot
:param block_device_info: Info pertaining to attached volumes
:param bad_volumes_callback: Function to handle any bad volumes
encountered
"""
raise NotImplementedError()
def get_console_pool_info(self, console_type):
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_console_output(self, context, instance):
"""Get console output for an instance
:param context: security context
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_vnc_console(self, context, instance):
"""Get connection info for a vnc console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleVNC
"""
raise NotImplementedError()
def get_spice_console(self, context, instance):
"""Get connection info for a spice console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleSpice
"""
raise NotImplementedError()
def get_rdp_console(self, context, instance):
"""Get connection info for a rdp console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleRDP
"""
raise NotImplementedError()
def get_serial_console(self, context, instance):
"""Get connection info for a serial console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleSerial
"""
raise NotImplementedError()
def get_mks_console(self, context, instance):
"""Get connection info for a MKS console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleMKS
"""
raise NotImplementedError()
def get_diagnostics(self, instance):
"""Return diagnostics data about the given instance.
:param nova.objects.instance.Instance instance:
The instance to which the diagnostic data should be returned.
:return: Has a big overlap to the return value of the newer interface
:func:`get_instance_diagnostics`
:rtype: dict
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_instance_diagnostics(self, instance):
"""Return diagnostics data about the given instance.
:param nova.objects.instance.Instance instance:
The instance to which the diagnostic data should be returned.
:return: Has a big overlap to the return value of the older interface
:func:`get_diagnostics`
:rtype: nova.virt.diagnostics.Diagnostics
"""
raise NotImplementedError()
def get_all_bw_counters(self, instances):
"""Return bandwidth usage counters for each interface on each
running VM.
:param instances: nova.objects.instance.InstanceList
"""
raise NotImplementedError()
def get_all_volume_usage(self, context, compute_host_bdms):
"""Return usage info for volumes attached to vms on
a given host.-
"""
raise NotImplementedError()
def get_host_ip_addr(self):
"""Retrieves the IP address of the dom0
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach the disk to the instance at mountpoint using info."""
raise NotImplementedError()
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
"""Detach the disk attached to the instance."""
raise NotImplementedError()
def swap_volume(self, old_connection_info, new_connection_info,
instance, mountpoint, resize_to):
"""Replace the volume attached to the given `instance`.
:param dict old_connection_info:
The volume for this connection gets detached from the given
`instance`.
:param dict new_connection_info:
The volume for this connection gets attached to the given
'instance'.
:param nova.objects.instance.Instance instance:
The instance whose volume gets replaced by another one.
:param str mountpoint:
The mountpoint in the instance where the volume for
`old_connection_info` is attached to.
:param int resize_to:
If the new volume is larger than the old volume, it gets resized
to the given size (in Gigabyte) of `resize_to`.
:return: None
"""
raise NotImplementedError()
def attach_interface(self, instance, image_meta, vif):
"""Use hotplug to add a network interface to a running instance.
The counter action to this is :func:`detach_interface`.
:param nova.objects.instance.Instance instance:
The instance which will get an additional network interface.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param nova.network.model.NetworkInfo vif:
The object which has the information about the interface to attach.
:raise nova.exception.NovaException: If the attach fails.
:return: None
"""
raise NotImplementedError()
def detach_interface(self, instance, vif):
"""Use hotunplug to remove a network interface from a running instance.
The counter action to this is :func:`attach_interface`.
:param nova.objects.instance.Instance instance:
The instance which gets a network interface removed.
:param nova.network.model.NetworkInfo vif:
The object which has the information about the interface to detach.
:raise nova.exception.NovaException: If the detach fails.
:return: None
"""
raise NotImplementedError()
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
:param nova.objects.instance.Instance instance:
The instance whose disk should be migrated.
:param str dest:
The IP address of the destination host.
:param nova.objects.flavor.Flavor flavor:
The flavor of the instance whose disk get migrated.
:param nova.network.model.NetworkInfo network_info:
The network information of the given `instance`.
:param dict block_device_info:
Information about the block devices.
:param int timeout:
The time in seconds to wait for the guest OS to shutdown.
:param int retry_interval:
How often to signal guest while waiting for it to shutdown.
:return: A list of disk information dicts in JSON format.
:rtype: str
"""
raise NotImplementedError()
def snapshot(self, context, instance, image_id, update_task_state):
"""Snapshots the specified instance.
:param context: security context
:param instance: nova.objects.instance.Instance
:param image_id: Reference to a pre-created image that will
hold the snapshot.
"""
raise NotImplementedError()
def post_interrupted_snapshot_cleanup(self, context, instance):
"""Cleans up any resources left after an interrupted snapshot.
:param context: security context
:param instance: nova.objects.instance.Instance
"""
pass
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
"""Completes a resize/migration.
:param context: the context for the migration/resize
:param migration: the migrate/resize information
:param instance: nova.objects.instance.Instance being migrated/resized
:param disk_info: the newly transferred disk information
:param network_info: instance network information
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param resize_instance: True if the instance is being resized,
False otherwise
:param block_device_info: instance volume block device info
:param power_on: True if the instance should be powered on, False
otherwise
"""
raise NotImplementedError()
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize/migration, destroying the source VM.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
"""Finish reverting a resize/migration.
:param context: the context for the finish_revert_migration
:param instance: nova.objects.instance.Instance being migrated/resized
:param network_info: instance network information
:param block_device_info: instance volume block device info
:param power_on: True if the instance should be powered on, False
otherwise
"""
raise NotImplementedError()
def pause(self, instance):
"""Pause the given instance.
A paused instance doesn't use CPU cycles of the host anymore. The
state of the VM could be stored in the memory or storage space of the
host, depending on the underlying hypervisor technology.
A "stronger" version of `pause` is :func:'suspend'.
The counter action for `pause` is :func:`unpause`.
:param nova.objects.instance.Instance instance:
The instance which should be paused.
:return: None
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def unpause(self, instance):
"""Unpause the given paused instance.
The paused instance gets unpaused and will use CPU cycles of the
host again. The counter action for 'unpause' is :func:`pause`.
Depending on the underlying hypervisor technology, the guest has the
same state as before the 'pause'.
:param nova.objects.instance.Instance instance:
The instance which should be unpaused.
:return: None
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def suspend(self, context, instance):
"""Suspend the specified instance.
A suspended instance doesn't use CPU cycles or memory of the host
anymore. The state of the instance could be persisted on the host
and allocate storage space this way. A "softer" way of `suspend`
is :func:`pause`. The counter action for `suspend` is :func:`resume`.
:param nova.context.RequestContext context:
The context for the suspend.
:param nova.objects.instance.Instance instance:
The instance to suspend.
:return: None
"""
raise NotImplementedError()
def resume(self, context, instance, network_info, block_device_info=None):
"""resume the specified suspended instance.
The suspended instance gets resumed and will use CPU cycles and memory
of the host again. The counter action for 'resume' is :func:`suspend`.
Depending on the underlying hypervisor technology, the guest has the
same state as before the 'suspend'.
:param nova.context.RequestContext context:
The context for the resume.
:param nova.objects.instance.Instance instance:
The suspended instance to resume.
:param nova.network.model.NetworkInfo network_info:
Necessary network information for the resume.
:param dict block_device_info:
Instance volume block device info.
:return: None
"""
raise NotImplementedError()
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
"""resume guest state when a host is booted.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance.
:param nova.context.RequestContext context:
The context for the rescue.
:param nova.objects.instance.Instance instance:
The instance being rescued.
:param nova.network.model.NetworkInfo network_info:
Necessary network information for the resume.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param rescue_password: new root password to set for rescue.
"""
raise NotImplementedError()
def set_bootable(self, instance, is_bootable):
"""Set the ability to power on/off an instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def unrescue(self, instance, network_info):
"""Unrescue the specified instance.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def power_off(self, instance, timeout=0, retry_interval=0):
"""Power off the specified instance.
:param instance: nova.objects.instance.Instance
:param timeout: time to wait for GuestOS to shutdown
:param retry_interval: How often to signal guest while
waiting for it to shutdown
"""
raise NotImplementedError()
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def trigger_crash_dump(self, instance):
"""Trigger crash dump mechanism on the given instance.
Stalling instances can be triggered to dump the crash data. How the
guest OS reacts in details, depends on the configuration of it.
:param nova.objects.instance.Instance instance:
The instance where the crash dump should be triggered.
:return: None
"""
raise NotImplementedError()
def soft_delete(self, instance):
"""Soft delete the specified instance.
A soft-deleted instance doesn't allocate any resources anymore, but is
still available as a database entry. The counter action :func:`restore`
uses the database entry to create a new instance based on that.
:param nova.objects.instance.Instance instance:
The instance to soft-delete.
:return: None
"""
raise NotImplementedError()
def restore(self, instance):
"""Restore the specified soft-deleted instance.
The restored instance will be automatically booted. The counter action
for `restore` is :func:`soft_delete`.
:param nova.objects.instance.Instance instance:
The soft-deleted instance which should be restored from the
soft-deleted data.
:return: None
"""
raise NotImplementedError()
def get_available_resource(self, nodename):
"""Retrieve resource information.
This method is called when nova-compute launches, and
as part of a periodic task that records the results in the DB.
:param nodename:
node which the caller want to get resources from
a driver that manages only one node can safely ignore this
:returns: Dictionary describing resources
"""
raise NotImplementedError()
def pre_live_migration(self, context, instance, block_device_info,
network_info, disk_info, migrate_data=None):
"""Prepare an instance for live migration
:param context: security context
:param instance: nova.objects.instance.Instance object
:param block_device_info: instance block device information
:param network_info: instance network information
:param disk_info: instance disk information
:param migrate_data: a LiveMigrateData object
"""
raise NotImplementedError()
def live_migration(self, context, instance, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
"""Live migration of an instance to another host.
:param context: security context
:param instance:
nova.db.sqlalchemy.models.Instance object
instance object that is migrated.
:param dest: destination host
:param post_method:
post operation method.
expected nova.compute.manager._post_live_migration.
:param recover_method:
recovery method when any exception occurs.
expected nova.compute.manager._rollback_live_migration.
:param block_migration: if true, migrate VM disk.
:param migrate_data: a LiveMigrateData object
"""
raise NotImplementedError()
def live_migration_force_complete(self, instance):
"""Force live migration to complete
:param instance: Instance being live migrated
"""
raise NotImplementedError()
def live_migration_abort(self, instance):
"""Abort an in-progress live migration.
:param instance: instance that is live migrating
"""
raise NotImplementedError()
def rollback_live_migration_at_destination(self, context, instance,
network_info,
block_device_info,
destroy_disks=True,
migrate_data=None):
"""Clean up destination node after a failed live migration.
:param context: security context
:param instance: instance object that was being migrated
:param network_info: instance network information
:param block_device_info: instance block device information
:param destroy_disks:
if true, destroy disks at destination during cleanup
:param migrate_data: a LiveMigrateData object
"""
raise NotImplementedError()
def post_live_migration(self, context, instance, block_device_info,
migrate_data=None):
"""Post operation of live migration at source host.
:param context: security context
:instance: instance object that was migrated
:block_device_info: instance block device information
:param migrate_data: a LiveMigrateData object
"""
pass
def post_live_migration_at_source(self, context, instance, network_info):
"""Unplug VIFs from networks at source.
:param context: security context
:param instance: instance object reference
:param network_info: instance network information
"""
raise NotImplementedError(_("Hypervisor driver does not support "
"post_live_migration_at_source method"))
def post_live_migration_at_destination(self, context, instance,
network_info,
block_migration=False,
block_device_info=None):
"""Post operation of live migration at destination host.
:param context: security context
:param instance: instance object that is migrated
:param network_info: instance network information
:param block_migration: if true, post operation of block_migration.
"""
raise NotImplementedError()
def check_instance_shared_storage_local(self, context, instance):
"""Check if instance files located on shared storage.
This runs check on the destination host, and then calls
back to the source host to check the results.
:param context: security context
:param instance: nova.objects.instance.Instance object
"""
raise NotImplementedError()
def check_instance_shared_storage_remote(self, context, data):
"""Check if instance files located on shared storage.
:param context: security context
:param data: result of check_instance_shared_storage_local
"""
raise NotImplementedError()
def check_instance_shared_storage_cleanup(self, context, data):
"""Do cleanup on host after check_instance_shared_storage calls
:param context: security context
:param data: result of check_instance_shared_storage_local
"""
pass
def check_can_live_migrate_destination(self, context, instance,
src_compute_info, dst_compute_info,
block_migration=False,
disk_over_commit=False):
"""Check if it is possible to execute live migration.
This runs checks on the destination host, and then calls
back to the source host to check the results.
:param context: security context
:param instance: nova.db.sqlalchemy.models.Instance
:param src_compute_info: Info about the sending machine
:param dst_compute_info: Info about the receiving machine
:param block_migration: if true, prepare for block migration
:param disk_over_commit: if true, allow disk over commit
:returns: a LiveMigrateData object (hypervisor-dependent)
"""
raise NotImplementedError()
def cleanup_live_migration_destination_check(self, context,
dest_check_data):
"""Do required cleanup on dest host after check_can_live_migrate calls
:param context: security context
:param dest_check_data: result of check_can_live_migrate_destination
"""
raise NotImplementedError()
def check_can_live_migrate_source(self, context, instance,
dest_check_data, block_device_info=None):
"""Check if it is possible to execute live migration.
This checks if the live migration can succeed, based on the
results from check_can_live_migrate_destination.
:param context: security context
:param instance: nova.db.sqlalchemy.models.Instance
:param dest_check_data: result of check_can_live_migrate_destination
:param block_device_info: result of _get_instance_block_device_info
:returns: a LiveMigrateData object
"""
raise NotImplementedError()
def get_instance_disk_info(self, instance,
block_device_info=None):
"""Retrieve information about actual disk sizes of an instance.
:param instance: nova.objects.Instance
:param block_device_info:
Optional; Can be used to filter out devices which are
actually volumes.
:return:
json strings with below format::
"[{'path':'disk',
'type':'raw',
'virt_disk_size':'10737418240',
'backing_file':'backing_file',
'disk_size':'83886080'
'over_committed_disk_size':'10737418240'},
...]"
"""
raise NotImplementedError()
def refresh_security_group_rules(self, security_group_id):
"""This method is called after a change to security groups.
All security groups and their associated rules live in the datastore,
and calling this method should apply the updated rules to instances
running the specified security group.
An error should be raised if the operation cannot complete.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def refresh_instance_security_rules(self, instance):
"""Refresh security group rules
Gets called when an instance gets added to or removed from
the security group the instance is a member of or if the
group gains or loses a rule.
"""
raise NotImplementedError()
def reset_network(self, instance):
"""reset networking for specified instance."""
# TODO(Vek): Need to pass context in for access to auth_token
pass
def ensure_filtering_rules_for_instance(self, instance, network_info):
"""Setting up filtering rules and waiting for its completion.
To migrate an instance, filtering rules to hypervisors
and firewalls are inevitable on destination host.
( Waiting only for filtering rules to hypervisor,
since filtering rules to firewall rules can be set faster).
Concretely, the below method must be called.
- setup_basic_filtering (for nova-basic, etc.)
- prepare_instance_filter(for nova-instance-instance-xxx, etc.)
to_xml may have to be called since it defines PROJNET, PROJMASK.
but libvirt migrates those value through migrateToURI(),
so , no need to be called.
Don't use thread for this method since migration should
not be started when setting-up filtering rules operations
are not completed.
:param instance: nova.objects.instance.Instance object
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def filter_defer_apply_on(self):
"""Defer application of IPTables rules."""
pass
def filter_defer_apply_off(self):
"""Turn off deferral of IPTables rules and apply the rules now."""
pass
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance."""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def set_admin_password(self, instance, new_pass):
"""Set the root password on the specified instance.
:param instance: nova.objects.instance.Instance
:param new_pass: the new password
"""
raise NotImplementedError()
def inject_file(self, instance, b64_path, b64_contents):
"""Writes a file on the specified instance.
The first parameter is an instance of nova.compute.service.Instance,
and so the instance is being specified as instance.name. The second
parameter is the base64-encoded path to which the file is to be
written on the instance; the third is the contents of the file, also
base64-encoded.
NOTE(russellb) This method is deprecated and will be removed once it
can be removed from nova.compute.manager.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def change_instance_metadata(self, context, instance, diff):
"""Applies a diff to the instance metadata.
This is an optional driver method which is used to publish
changes to the instance's metadata to the hypervisor. If the
hypervisor has no means of publishing the instance metadata to
the instance, then this method should not be implemented.
:param context: security context
:param instance: nova.objects.instance.Instance
"""
pass
def inject_network_info(self, instance, nw_info):
"""inject network info for specified instance."""
# TODO(Vek): Need to pass context in for access to auth_token
pass
def poll_rebooting_instances(self, timeout, instances):
"""Perform a reboot on all given 'instances'.
Reboots the given `instances` which are longer in the rebooting state
than `timeout` seconds.
:param int timeout:
The timeout (in seconds) for considering rebooting instances
to be stuck.
:param list instances:
A list of nova.objects.instance.Instance objects that have been
in rebooting state longer than the configured timeout.
:return: None
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def host_power_action(self, action):
"""Reboots, shuts down or powers up the host.
:param str action:
The action the host should perform. The valid actions are:
""startup", "shutdown" and "reboot".
:return: The result of the power action
:rtype: : str
"""
raise NotImplementedError()
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window.
On start, it triggers the migration of all instances to other hosts.
Consider the combination with :func:`set_host_enabled`.
:param str host:
The name of the host whose maintenance mode should be changed.
:param bool mode:
If `True`, go into maintenance mode. If `False`, leave the
maintenance mode.
:return: "on_maintenance" if switched to maintenance mode or
"off_maintenance" if maintenance mode got left.
:rtype: str
"""
raise NotImplementedError()
def set_host_enabled(self, enabled):
"""Sets the ability of this host to accept new instances.
:param bool enabled:
If this is `True`, the host will accept new instances. If it is
`False`, the host won't accept new instances.
:return: If the host can accept further instances, return "enabled",
if further instances shouldn't be scheduled to this host,
return "disabled".
:rtype: str
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_host_uptime(self):
"""Returns the result of calling the Linux command `uptime` on this
host.
:return: A text which contains the uptime of this host since the
last boot.
:rtype: str
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def plug_vifs(self, instance, network_info):
"""Plug virtual interfaces (VIFs) into the given `instance` at
instance boot time.
The counter action is :func:`unplug_vifs`.
:param nova.objects.instance.Instance instance:
The instance which gets VIFs plugged.
:param nova.network.model.NetworkInfo network_info:
The object which contains information about the VIFs to plug.
:return: None
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def unplug_vifs(self, instance, network_info):
# NOTE(markus_z): 2015-08-18
# The compute manager doesn't use this interface, which seems odd
# since the manager should be the controlling thing here.
"""Unplug virtual interfaces (VIFs) from networks.
The counter action is :func:`plug_vifs`.
:param nova.objects.instance.Instance instance:
The instance which gets VIFs unplugged.
:param nova.network.model.NetworkInfo network_info:
The object which contains information about the VIFs to unplug.
:return: None
"""
raise NotImplementedError()
def get_host_cpu_stats(self):
"""Get the currently known host CPU stats.
:returns: a dict containing the CPU stat info, eg:
| {'kernel': kern,
| 'idle': idle,
| 'user': user,
| 'iowait': wait,
| 'frequency': freq},
where kern and user indicate the cumulative CPU time
(nanoseconds) spent by kernel and user processes
respectively, idle indicates the cumulative idle CPU time
(nanoseconds), wait indicates the cumulative I/O wait CPU
time (nanoseconds), since the host is booting up; freq
indicates the current CPU frequency (MHz). All values are
long integers.
"""
raise NotImplementedError()
def block_stats(self, instance, disk_id):
"""Return performance counters associated with the given disk_id on the
given instance. These are returned as [rd_req, rd_bytes, wr_req,
wr_bytes, errs], where rd indicates read, wr indicates write, req is
the total number of I/O requests made, bytes is the total number of
bytes transferred, and errs is the number of requests held up due to a
full pipeline.
All counters are long integers.
This method is optional. On some platforms (e.g. XenAPI) performance
statistics can be retrieved directly in aggregate form, without Nova
having to do the aggregation. On those platforms, this method is
unused.
Note that this function takes an instance ID.
"""
raise NotImplementedError()
def deallocate_networks_on_reschedule(self, instance):
"""Does the driver want networks deallocated on reschedule?"""
return False
def macs_for_instance(self, instance):
"""What MAC addresses must this instance have?
Some hypervisors (such as bare metal) cannot do freeform virtualization
of MAC addresses. This method allows drivers to return a set of MAC
addresses that the instance is to have. allocate_for_instance will take
this into consideration when provisioning networking for the instance.
Mapping of MAC addresses to actual networks (or permitting them to be
freeform) is up to the network implementation layer. For instance,
with openflow switches, fixed MAC addresses can still be virtualized
onto any L2 domain, with arbitrary VLANs etc, but regular switches
require pre-configured MAC->network mappings that will match the
actual configuration.
Most hypervisors can use the default implementation which returns None.
Hypervisors with MAC limits should return a set of MAC addresses, which
will be supplied to the allocate_for_instance call by the compute
manager, and it is up to that call to ensure that all assigned network
details are compatible with the set of MAC addresses.
This is called during spawn_instance by the compute manager.
:return: None, or a set of MAC ids (e.g. set(['12:34:56:78:90:ab'])).
None means 'no constraints', a set means 'these and only these
MAC addresses'.
"""
return None
def dhcp_options_for_instance(self, instance):
"""Get DHCP options for this instance.
Some hypervisors (such as bare metal) require that instances boot from
the network, and manage their own TFTP service. This requires passing
the appropriate options out to the DHCP service. Most hypervisors can
use the default implementation which returns None.
This is called during spawn_instance by the compute manager.
Note that the format of the return value is specific to the Neutron
client API.
:return: None, or a set of DHCP options, eg:
| [{'opt_name': 'bootfile-name',
| 'opt_value': '/tftpboot/path/to/config'},
| {'opt_name': 'server-ip-address',
| 'opt_value': '1.2.3.4'},
| {'opt_name': 'tftp-server',
| 'opt_value': '1.2.3.4'}
| ]
"""
return None
def manage_image_cache(self, context, all_instances):
"""Manage the driver's local image cache.
Some drivers chose to cache images for instances on disk. This method
is an opportunity to do management of that cache which isn't directly
related to other calls into the driver. The prime example is to clean
the cache and remove images which are no longer of interest.
:param all_instances: nova.objects.instance.InstanceList
"""
pass
def add_to_aggregate(self, context, aggregate, host, **kwargs):
"""Add a compute host to an aggregate.
The counter action to this is :func:`remove_from_aggregate`
:param nova.context.RequestContext context:
The security context.
:param nova.objects.aggregate.Aggregate aggregate:
The aggregate which should add the given `host`
:param str host:
The name of the host to add to the given `aggregate`.
:param dict kwargs:
A free-form thingy...
:return: None
"""
# NOTE(jogo) Currently only used for XenAPI-Pool
raise NotImplementedError()
def remove_from_aggregate(self, context, aggregate, host, **kwargs):
"""Remove a compute host from an aggregate.
The counter action to this is :func:`add_to_aggregate`
:param nova.context.RequestContext context:
The security context.
:param nova.objects.aggregate.Aggregate aggregate:
The aggregate which should remove the given `host`
:param str host:
The name of the host to remove from the given `aggregate`.
:param dict kwargs:
A free-form thingy...
:return: None
"""
raise NotImplementedError()
def undo_aggregate_operation(self, context, op, aggregate,
host, set_error=True):
"""Undo for Resource Pools."""
raise NotImplementedError()
def get_volume_connector(self, instance):
"""Get connector information for the instance for attaching to volumes.
Connector information is a dictionary representing the ip of the
machine that will be making the connection, the name of the iscsi
initiator and the hostname of the machine as follows::
{
'ip': ip,
'initiator': initiator,
'host': hostname
}
"""
raise NotImplementedError()
def get_available_nodes(self, refresh=False):
"""Returns nodenames of all nodes managed by the compute service.
This method is for multi compute-nodes support. If a driver supports
multi compute-nodes, this method returns a list of nodenames managed
by the service. Otherwise, this method should return
[hypervisor_hostname].
"""
raise NotImplementedError()
def node_is_available(self, nodename):
"""Return whether this compute service manages a particular node."""
if nodename in self.get_available_nodes():
return True
# Refresh and check again.
return nodename in self.get_available_nodes(refresh=True)
def get_per_instance_usage(self):
"""Get information about instance resource usage.
:returns: dict of nova uuid => dict of usage info
"""
return {}
def instance_on_disk(self, instance):
"""Checks access of instance files on the host.
:param instance: nova.objects.instance.Instance to lookup
Returns True if files of an instance with the supplied ID accessible on
the host, False otherwise.
.. note::
Used in rebuild for HA implementation and required for validation
of access to instance shared disk files
"""
return False
def register_event_listener(self, callback):
"""Register a callback to receive events.
Register a callback to receive asynchronous event
notifications from hypervisors. The callback will
be invoked with a single parameter, which will be
an instance of the nova.virt.event.Event class.
"""
self._compute_event_callback = callback
def emit_event(self, event):
"""Dispatches an event to the compute manager.
Invokes the event callback registered by the
compute manager to dispatch the event. This
must only be invoked from a green thread.
"""
if not self._compute_event_callback:
LOG.debug("Discarding event %s", str(event))
return
if not isinstance(event, virtevent.Event):
raise ValueError(
_("Event must be an instance of nova.virt.event.Event"))
try:
LOG.debug("Emitting event %s", str(event))
self._compute_event_callback(event)
except Exception as ex:
LOG.error(_LE("Exception dispatching event %(event)s: %(ex)s"),
{'event': event, 'ex': ex})
def delete_instance_files(self, instance):
"""Delete any lingering instance files for an instance.
:param instance: nova.objects.instance.Instance
:returns: True if the instance was deleted from disk, False otherwise.
"""
return True
@property
def need_legacy_block_device_info(self):
"""Tell the caller if the driver requires legacy block device info.
Tell the caller whether we expect the legacy format of block
device info to be passed in to methods that expect it.
"""
return True
def volume_snapshot_create(self, context, instance, volume_id,
create_info):
"""Snapshots volumes attached to a specified instance.
The counter action to this is :func:`volume_snapshot_delete`
:param nova.context.RequestContext context:
The security context.
:param nova.objects.instance.Instance instance:
The instance that has the volume attached
:param uuid volume_id:
Volume to be snapshotted
:param create_info: The data needed for nova to be able to attach
to the volume. This is the same data format returned by
Cinder's initialize_connection() API call. In the case of
doing a snapshot, it is the image file Cinder expects to be
used as the active disk after the snapshot operation has
completed. There may be other data included as well that is
needed for creating the snapshot.
"""
raise NotImplementedError()
def volume_snapshot_delete(self, context, instance, volume_id,
snapshot_id, delete_info):
"""Deletes a snapshot of a volume attached to a specified instance.
The counter action to this is :func:`volume_snapshot_create`
:param nova.context.RequestContext context:
The security context.
:param nova.objects.instance.Instance instance:
The instance that has the volume attached.
:param uuid volume_id:
Attached volume associated with the snapshot
:param uuid snapshot_id:
The snapshot to delete.
:param dict delete_info:
Volume backend technology specific data needed to be able to
complete the snapshot. For example, in the case of qcow2 backed
snapshots, this would include the file being merged, and the file
being merged into (if appropriate).
:return: None
"""
raise NotImplementedError()
def default_root_device_name(self, instance, image_meta, root_bdm):
"""Provide a default root device name for the driver.
:param nova.objects.instance.Instance instance:
The instance to get the root device for.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param nova.objects.BlockDeviceMapping root_bdm:
The description of the root device.
"""
raise NotImplementedError()
def default_device_names_for_instance(self, instance, root_device_name,
*block_device_lists):
"""Default the missing device names in the block device mapping."""
raise NotImplementedError()
def get_device_name_for_instance(self, instance,
bdms, block_device_obj):
"""Get the next device name based on the block device mapping.
:param instance: nova.objects.instance.Instance that volume is
requesting a device name
:param bdms: a nova.objects.BlockDeviceMappingList for the instance
:param block_device_obj: A nova.objects.BlockDeviceMapping instance
with all info about the requested block
device. device_name does not need to be set,
and should be decided by the driver
implementation if not set.
:returns: The chosen device name.
"""
raise NotImplementedError()
def is_supported_fs_format(self, fs_type):
"""Check whether the file format is supported by this driver
:param fs_type: the file system type to be checked,
the validate values are defined at disk API module.
"""
# NOTE(jichenjc): Return False here so that every hypervisor
# need to define their supported file system
# type and implement this function at their
# virt layer.
return False
def quiesce(self, context, instance, image_meta):
"""Quiesce the specified instance to prepare for snapshots.
If the specified instance doesn't support quiescing,
InstanceQuiesceNotSupported is raised. When it fails to quiesce by
other errors (e.g. agent timeout), NovaException is raised.
:param context: request context
:param instance: nova.objects.instance.Instance to be quiesced
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
"""
raise NotImplementedError()
def unquiesce(self, context, instance, image_meta):
"""Unquiesce the specified instance after snapshots.
If the specified instance doesn't support quiescing,
InstanceQuiesceNotSupported is raised. When it fails to quiesce by
other errors (e.g. agent timeout), NovaException is raised.
:param context: request context
:param instance: nova.objects.instance.Instance to be unquiesced
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
"""
raise NotImplementedError()
def network_binding_host_id(self, context, instance):
"""Get host ID to associate with network ports.
:param context: request context
:param instance: nova.objects.instance.Instance that the network
ports will be associated with
:returns: a string representing the host ID
"""
return instance.get('host')
def load_compute_driver(virtapi, compute_driver=None):
"""Load a compute driver module.
Load the compute driver module specified by the compute_driver
configuration option or, if supplied, the driver name supplied as an
argument.
Compute drivers constructors take a VirtAPI object as their first object
and this must be supplied.
:param virtapi: a VirtAPI instance
:param compute_driver: a compute driver name to override the config opt
:returns: a ComputeDriver instance
"""
if not compute_driver:
compute_driver = CONF.compute_driver
if not compute_driver:
LOG.error(_LE("Compute driver option required, but not specified"))
sys.exit(1)
LOG.info(_LI("Loading compute driver '%s'"), compute_driver)
try:
driver = importutils.import_object(
'nova.virt.%s' % compute_driver,
virtapi)
return utils.check_isinstance(driver, ComputeDriver)
except ImportError:
LOG.exception(_LE("Unable to load the virtualization driver"))
sys.exit(1)
def is_xenapi():
return CONF.compute_driver == 'xenapi.XenAPIDriver'
| [((1009, 1036), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1026, 1036), True, 'from oslo_log import log as logging\n'), ((2416, 2468), 'nova.virt.block_device.convert_swap', 'virt_block_device.convert_swap', (['block_device_mapping'], {}), '(block_device_mapping)\n', (2446, 2468), True, 'from nova.virt import block_device as virt_block_device\n'), ((2501, 2538), 'nova.virt.block_device.get_swap', 'virt_block_device.get_swap', (['swap_list'], {}), '(swap_list)\n', (2527, 2538), True, 'from nova.virt import block_device as virt_block_device\n'), ((1264, 1296), 'oslo_utils.importutils.import_class', 'importutils.import_class', (['driver'], {}), '(driver)\n', (1288, 1296), False, 'from oslo_utils import importutils\n'), ((2216, 2274), 'nova.virt.block_device.convert_ephemerals', 'virt_block_device.convert_ephemerals', (['block_device_mapping'], {}), '(block_device_mapping)\n', (2252, 2274), True, 'from nova.virt import block_device as virt_block_device\n'), ((2333, 2393), 'nova.virt.block_device.convert_all_volumes', 'virt_block_device.convert_all_volumes', (['*block_device_mapping'], {}), '(*block_device_mapping)\n', (2370, 2393), True, 'from nova.virt import block_device as virt_block_device\n'), ((63339, 63350), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (63347, 63350), False, 'import sys\n'), ((63365, 63399), 'nova.i18n._LI', '_LI', (['"""Loading compute driver \'%s\'"""'], {}), '("Loading compute driver \'%s\'")\n', (63368, 63399), False, 'from nova.i18n import _, _LE, _LI\n'), ((63443, 63510), 'oslo_utils.importutils.import_object', 'importutils.import_object', (["('nova.virt.%s' % compute_driver)", 'virtapi'], {}), "('nova.virt.%s' % compute_driver, virtapi)\n", (63468, 63510), False, 'from oslo_utils import importutils\n'), ((63551, 63596), 'nova.utils.check_isinstance', 'utils.check_isinstance', (['driver', 'ComputeDriver'], {}), '(driver, ComputeDriver)\n', (63573, 63596), False, 'from nova import utils\n'), ((34393, 34469), 'nova.i18n._', '_', (['"""Hypervisor driver does not support post_live_migration_at_source method"""'], {}), "('Hypervisor driver does not support post_live_migration_at_source method')\n", (34394, 34469), False, 'from nova.i18n import _, _LE, _LI\n'), ((63273, 63329), 'nova.i18n._LE', '_LE', (['"""Compute driver option required, but not specified"""'], {}), "('Compute driver option required, but not specified')\n", (63276, 63329), False, 'from nova.i18n import _, _LE, _LI\n'), ((63700, 63711), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (63708, 63711), False, 'import sys\n'), ((55973, 56028), 'nova.i18n._', '_', (['"""Event must be an instance of nova.virt.event.Event"""'], {}), "('Event must be an instance of nova.virt.event.Event')\n", (55974, 56028), False, 'from nova.i18n import _, _LE, _LI\n'), ((63643, 63690), 'nova.i18n._LE', '_LE', (['"""Unable to load the virtualization driver"""'], {}), "('Unable to load the virtualization driver')\n", (63646, 63690), False, 'from nova.i18n import _, _LE, _LI\n'), ((56201, 56253), 'nova.i18n._LE', '_LE', (['"""Exception dispatching event %(event)s: %(ex)s"""'], {}), "('Exception dispatching event %(event)s: %(ex)s')\n", (56204, 56253), False, 'from nova.i18n import _, _LE, _LI\n')] |
P1ayerOne/src | otp/chat/ChatInputNormal.py | 3a4343e29f844fe95da7d51aaee7fb680d02bf72 | from direct.showbase import DirectObject
from otp.otpbase import OTPGlobals
import sys
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from otp.otpbase import OTPLocalizer
class ChatInputNormal(DirectObject.DirectObject):
ExecNamespace = None
def __init__(self, chatMgr):
self.chatMgr = chatMgr
self.normalPos = Vec3(-1.083, 0, 0.804)
self.whisperPos = Vec3(0.0, 0, 0.71)
self.whisperAvatarName = None
self.whisperAvatarId = None
self.toPlayer = 0
wantHistory = 0
if __dev__:
wantHistory = 1
self.wantHistory = base.config.GetBool('want-chat-history', wantHistory)
self.history = ['']
self.historySize = base.config.GetInt('chat-history-size', 10)
self.historyIndex = 0
return
def typeCallback(self, extraArgs):
messenger.send('enterNormalChat')
def delete(self):
self.ignore('arrow_up-up')
self.ignore('arrow_down-up')
self.chatFrame.destroy()
del self.chatFrame
del self.chatButton
del self.cancelButton
del self.chatEntry
del self.whisperLabel
del self.chatMgr
def activateByData(self, whisperAvatarId = None, toPlayer = 0):
self.toPlayer = toPlayer
self.whisperAvatarId = whisperAvatarId
self.whisperAvatarName = base.talkAssistant.findName(self.whisperAvatarId, self.toPlayer)
if self.whisperAvatarId:
self.chatFrame.setPos(self.whisperPos)
self.whisperLabel['text'] = OTPLocalizer.ChatInputWhisperLabel % self.whisperAvatarName
self.whisperLabel.show()
else:
self.chatFrame.setPos(self.normalPos)
self.whisperLabel.hide()
self.chatEntry['focus'] = 1
self.chatFrame.show()
if self.wantHistory:
self.accept('arrow_up-up', self.getPrevHistory)
self.accept('arrow_down-up', self.getNextHistory)
def deactivate(self):
self.chatEntry.set('')
self.chatEntry['focus'] = 0
self.chatFrame.hide()
self.whisperLabel.hide()
base.win.closeIme()
self.ignore('arrow_up-up')
self.ignore('arrow_down-up')
def checkForOverRide(self):
return False
def sendChat(self, text):
if self.checkForOverRide():
self.chatEntry.enterText('')
return
self.deactivate()
self.chatMgr.fsm.request('mainMenu')
if text:
if self.toPlayer:
if self.whisperAvatarId:
self.whisperAvatarName = None
self.whisperAvatarId = None
self.toPlayer = 0
elif self.whisperAvatarId:
self.chatMgr.sendWhisperString(text, self.whisperAvatarId)
self.whisperAvatarName = None
self.whisperAvatarId = None
else:
if self.chatMgr.execChat:
if text[0] == '>':
text = self.__execMessage(text[1:])
base.localAvatar.setChatAbsolute(text, CFSpeech | CFTimeout)
return
base.talkAssistant.sendOpenTalk(text)
if self.wantHistory:
self.addToHistory(text)
return
def chatOverflow(self, overflowText):
self.sendChat(self.chatEntry.get())
def __execMessage(self, message):
if not ChatInputNormal.ExecNamespace:
ChatInputNormal.ExecNamespace = {}
exec('from pandac.PandaModules import *', globals(), self.ExecNamespace)
self.importExecNamespace()
try:
if not isClient():
print('EXECWARNING ChatInputNormal eval: %s' % message)
printStack()
return str(eval(message, globals(), ChatInputNormal.ExecNamespace))
except SyntaxError:
try:
if not isClient():
print('EXECWARNING ChatInputNormal exec: %s' % message)
printStack()
exec(message, globals(), ChatInputNormal.ExecNamespace)
return 'ok'
except:
exception = sys.exc_info()[0]
extraInfo = sys.exc_info()[1]
if extraInfo:
return str(extraInfo)
else:
return str(exception)
except:
exception = sys.exc_info()[0]
extraInfo = sys.exc_info()[1]
if extraInfo:
return str(extraInfo)
else:
return str(exception)
def cancelButtonPressed(self):
self.chatEntry.set('')
self.chatMgr.fsm.request('mainMenu')
def chatButtonPressed(self):
self.sendChat(self.chatEntry.get())
def importExecNamespace(self):
pass
def addToHistory(self, text):
self.history = [text] + self.history[:self.historySize - 1]
self.historyIndex = 0
def getPrevHistory(self):
self.chatEntry.set(self.history[self.historyIndex])
self.historyIndex += 1
self.historyIndex %= len(self.history)
def getNextHistory(self):
self.chatEntry.set(self.history[self.historyIndex])
self.historyIndex -= 1
self.historyIndex %= len(self.history)
def setPos(self, posX, posY = None, posZ = None):
if posX and posY and posZ:
self.chatFrame.setPos(posX, posY, posZ)
else:
self.chatFrame.setPos(posX)
| [((4495, 4509), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4507, 4509), False, 'import sys\n'), ((4537, 4551), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4549, 4551), False, 'import sys\n'), ((4254, 4268), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4266, 4268), False, 'import sys\n'), ((4300, 4314), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4312, 4314), False, 'import sys\n')] |
stevaras2/bert | train_text_summarizer.py | 1efaa300eb91dea85c40de5e1586e8d2c94b89bb | import argparse
import json
import numpy as np
import pandas as pd
import os
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report,f1_score
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras import backend as K
from keras.utils.vis_utils import plot_model
from sklearn.externals import joblib
import time
def f1(y_true, y_pred):
def recall(y_true, y_pred):
"""Recall metric.
Only computes a batch-wise average of recall.
Computes the recall, a metric for multi-label classification of
how many relevant items are selected.
"""
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))
recall = true_positives / (possible_positives + K.epsilon())
return recall
def precision(y_true, y_pred):
"""Precision metric.
Only computes a batch-wise average of precision.
Computes the precision, a metric for multi-label classification of
how many selected items are relevant.
"""
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))
precision = true_positives / (predicted_positives + K.epsilon())
return precision
precision = precision(y_true, y_pred)
recall = recall(y_true, y_pred)
return 2*((precision*recall)/(precision+recall+K.epsilon()))
def get_embeddings(sentences_list,layer_json):
'''
:param sentences_list: the path o the sentences.txt
:param layer_json: the path of the json file that contains the embeddings of the sentences
:return: Dictionary with key each sentence of the sentences_list and as value the embedding
'''
sentences = dict()#dict with key the index of each line of the sentences_list.txt and as value the sentence
embeddings = dict()##dict with key the index of each sentence and as value the its embedding
sentence_emb = dict()#key:sentence,value:its embedding
with open(sentences_list,'r') as file:
for index,line in enumerate(file):
sentences[index] = line.strip()
with open(layer_json, 'r',encoding='utf-8') as f:
for line in f:
embeddings[json.loads(line)['linex_index']] = np.asarray(json.loads(line)['features'])
for key,value in sentences.items():
sentence_emb[value] = embeddings[key]
return sentence_emb
def train_classifier(sentences_list,layer_json,dataset_csv,filename):
'''
:param sentences_list: the path o the sentences.txt
:param layer_json: the path of the json file that contains the embeddings of the sentences
:param dataset_csv: the path of the dataset
:param filename: The path of the pickle file that the model will be stored
:return:
'''
dataset = pd.read_csv(dataset_csv)
bert_dict = get_embeddings(sentences_list,layer_json)
length = list()
sentence_emb = list()
previous_emb = list()
next_list = list()
section_list = list()
label = list()
errors = 0
for row in dataset.iterrows():
sentence = row[1][0].strip()
previous = row[1][1].strip()
nexts = row[1][2].strip()
section = row[1][3].strip()
if sentence in bert_dict:
sentence_emb.append(bert_dict[sentence])
else:
sentence_emb.append(np.zeros(768))
print(sentence)
errors += 1
if previous in bert_dict:
previous_emb.append(bert_dict[previous])
else:
previous_emb.append(np.zeros(768))
if nexts in bert_dict:
next_list.append(bert_dict[nexts])
else:
next_list.append(np.zeros(768))
if section in bert_dict:
section_list.append(bert_dict[section])
else:
section_list.append(np.zeros(768))
length.append(row[1][4])
label.append(row[1][5])
sentence_emb = np.asarray(sentence_emb)
print(sentence_emb.shape)
next_emb = np.asarray(next_list)
print(next_emb.shape)
previous_emb = np.asarray(previous_emb)
print(previous_emb.shape)
section_emb = np.asarray(section_list)
print(sentence_emb.shape)
length = np.asarray(length)
print(length.shape)
label = np.asarray(label)
print(errors)
features = np.concatenate([sentence_emb, previous_emb, next_emb,section_emb], axis=1)
features = np.column_stack([features, length]) # np.append(features,length,axis=1)
print(features.shape)
X_train, X_val, y_train, y_val = train_test_split(features, label, test_size=0.33, random_state=42)
log = LogisticRegression(random_state=0, solver='newton-cg', max_iter=1000, C=0.1)
log.fit(X_train, y_train)
#save the model
_ = joblib.dump(log, filename, compress=9)
predictions = log.predict(X_val)
print("###########################################")
print("Results using embeddings from the",layer_json,"file")
print(classification_report(y_val, predictions))
print("F1 score using Logistic Regression:",f1_score(y_val, predictions))
print("###########################################")
#train a DNN
f1_results = list()
for i in range(3):
model = Sequential()
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dense(128, activation='relu', trainable=True))
model.add(Dropout(0.30))
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dropout(0.25))
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dropout(0.35))
model.add(Dense(1, activation='sigmoid'))
# compile network
model.compile(loss='binary_crossentropy', optimizer='sgd', metrics=[f1])
# fit network
model.fit(X_train, y_train, epochs=100, batch_size=64)
loss, f_1 = model.evaluate(X_val, y_val, verbose=1)
print('\nTest F1: %f' % (f_1 * 100))
f1_results.append(f_1)
model = None
print("###########################################")
print("Results using embeddings from the", layer_json, "file")
# evaluate
print(np.mean(f1_results))
print("###########################################")
def parameter_tuning_LR(sentences_list,layer_json,dataset_csv):
'''
:param sentences_list: the path o the sentences.txt
:param layer_json: the path of the json file that contains the embeddings of the sentences
:param dataset_csv: the path of the dataset
:return:
'''
dataset = pd.read_csv(dataset_csv)
bert_dict = get_embeddings(sentences_list,layer_json)
length = list()
sentence_emb = list()
previous_emb = list()
next_list = list()
section_list = list()
label = list()
errors = 0
for row in dataset.iterrows():
sentence = row[1][0].strip()
previous = row[1][1].strip()
nexts = row[1][2].strip()
section = row[1][3].strip()
if sentence in bert_dict:
sentence_emb.append(bert_dict[sentence])
else:
sentence_emb.append(np.zeros(768))
print(sentence)
errors += 1
if previous in bert_dict:
previous_emb.append(bert_dict[previous])
else:
previous_emb.append(np.zeros(768))
if nexts in bert_dict:
next_list.append(bert_dict[nexts])
else:
next_list.append(np.zeros(768))
if section in bert_dict:
section_list.append(bert_dict[section])
else:
section_list.append(np.zeros(768))
length.append(row[1][4])
label.append(row[1][5])
sentence_emb = np.asarray(sentence_emb)
print(sentence_emb.shape)
next_emb = np.asarray(next_list)
print(next_emb.shape)
previous_emb = np.asarray(previous_emb)
print(previous_emb.shape)
section_emb = np.asarray(section_list)
print(sentence_emb.shape)
length = np.asarray(length)
print(length.shape)
label = np.asarray(label)
print(errors)
features = np.concatenate([sentence_emb, previous_emb, next_emb,section_emb], axis=1)
features = np.column_stack([features, length])
print(features.shape)
X_train, X_val, y_train, y_val = train_test_split(features, label, test_size=0.33, random_state=42)
C = [0.1,1,2,5,10]
solver = ['newton-cg','saga','sag']
best_params = dict()
best_score = 0.0
for c in C:
for s in solver:
start = time.time()
log = LogisticRegression(random_state=0, solver=s, max_iter=1000, C=c)
log.fit(X_train, y_train)
predictions = log.predict(X_val)
print("###########################################")
print("LR with C =",c,'and solver = ',s)
print("Results using embeddings from the", layer_json, "file")
print(classification_report(y_val, predictions))
f1 = f1_score(y_val, predictions)
if f1 > best_score:
best_score = f1
best_params['c'] = c
best_params['solver'] = s
print("F1 score using Logistic Regression:",f1)
print("###########################################")
end = time.time()
running_time = end - start
print("Running time:"+str(running_time))
def visualize_DNN(file_to_save):
'''
Save the DNN architecture to a png file. Better use the Visulize_DNN.ipynd
:param file_to_save: the png file that the architecture of the DNN will be saved.
:return: None
'''
model = Sequential()
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dense(128, activation='relu', trainable=True))
model.add(Dropout(0.30))
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dropout(0.25))
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dropout(0.35))
model.add(Dense(1, activation='sigmoid'))
plot_model(model, to_file=file_to_save, show_shapes=True)
def save_model(sentences_list,layer_json,dataset_csv,pkl):
dataset = pd.read_csv(dataset_csv)
bert_dict = get_embeddings(sentences_list, layer_json)
length = list()
sentence_emb = list()
previous_emb = list()
next_list = list()
section_list = list()
label = list()
errors = 0
for row in dataset.iterrows():
sentence = row[1][0].strip()
previous = row[1][1].strip()
nexts = row[1][2].strip()
section = row[1][3].strip()
if sentence in bert_dict:
sentence_emb.append(bert_dict[sentence])
else:
sentence_emb.append(np.zeros(768))
print(sentence)
errors += 1
if previous in bert_dict:
previous_emb.append(bert_dict[previous])
else:
previous_emb.append(np.zeros(768))
if nexts in bert_dict:
next_list.append(bert_dict[nexts])
else:
next_list.append(np.zeros(768))
if section in bert_dict:
section_list.append(bert_dict[section])
else:
section_list.append(np.zeros(768))
length.append(row[1][4])
label.append(row[1][5])
sentence_emb = np.asarray(sentence_emb)
print(sentence_emb.shape)
next_emb = np.asarray(next_list)
print(next_emb.shape)
previous_emb = np.asarray(previous_emb)
print(previous_emb.shape)
section_emb = np.asarray(section_list)
print(sentence_emb.shape)
length = np.asarray(length)
print(length.shape)
label = np.asarray(label)
print(errors)
features = np.concatenate([sentence_emb, previous_emb, next_emb, section_emb], axis=1)
features = np.column_stack([features, length])
print(features.shape)
log = LogisticRegression(random_state=0, solver='saga', max_iter=1000, C=1)
log.fit(features, label)
_ = joblib.dump(log, pkl, compress=9)
if __name__ == '__main__':
#save_model('sentences_list.txt','Fudan_output_layer_-1.json','train_sentences1.csv','summarizer1.pkl')
ap = argparse.ArgumentParser()
ap.add_argument("-s", "--sentences", required=True, help="sentences list")
ap.add_argument("-o", "--output", required=True, help="output")
ap.add_argument("-ts", "--train set", required=True, help="path to train set")
ap.add_argument("-sp", "--summarizer path", required=True, help="path to save summarizer")
args = vars(ap.parse_args())
layer = train_classifier(args['sentences'], args['output'], args['train set'],args['summarizer path'])
#layer_1 = train_classifier('sentences_list.txt', 'new_output_layer_-1.json', 'train_sentences1.csv','fine_tune_BERT_sentence_classification1.pkl')
#layer_2 = train_classifier('sentences_list.txt','new_output_layer_-2.json','train_sentences1.csv','fine_tune_BERT_sentence_classification2.pkl')
#layer_3 = train_classifier('sentences_list.txt','new_output_layer_-3.json','train_sentences1.csv','fine_tune_BERT_sentence_classification3.pkl')
#layer_4 = train_classifier('sentences_list.txt','new_output_layer_-4.json','train_sentences1.csv','fine_tune_BERT_sentence_classification4.pkl')
#tuning = parameter_tuning_LR('sentences_list.txt','new_output_layer_-1.json','train_sentences1.csv')
#layer_1 = train_classifier('sentences_list.txt','output_layer_-1.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl')
#layer_2 = train_classifier('sentences_list.txt','output_layer_-2.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl')
#layer_3 = train_classifier('sentences_list.txt','output_layer_-3.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl')
#layer_4 = train_classifier('sentences_list.txt','output_layer_-4.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl')
| [((2973, 2997), 'pandas.read_csv', 'pd.read_csv', (['dataset_csv'], {}), '(dataset_csv)\n', (2984, 2997), True, 'import pandas as pd\n'), ((4112, 4136), 'numpy.asarray', 'np.asarray', (['sentence_emb'], {}), '(sentence_emb)\n', (4122, 4136), True, 'import numpy as np\n'), ((4182, 4203), 'numpy.asarray', 'np.asarray', (['next_list'], {}), '(next_list)\n', (4192, 4203), True, 'import numpy as np\n'), ((4249, 4273), 'numpy.asarray', 'np.asarray', (['previous_emb'], {}), '(previous_emb)\n', (4259, 4273), True, 'import numpy as np\n'), ((4322, 4346), 'numpy.asarray', 'np.asarray', (['section_list'], {}), '(section_list)\n', (4332, 4346), True, 'import numpy as np\n'), ((4390, 4408), 'numpy.asarray', 'np.asarray', (['length'], {}), '(length)\n', (4400, 4408), True, 'import numpy as np\n'), ((4445, 4462), 'numpy.asarray', 'np.asarray', (['label'], {}), '(label)\n', (4455, 4462), True, 'import numpy as np\n'), ((4496, 4571), 'numpy.concatenate', 'np.concatenate', (['[sentence_emb, previous_emb, next_emb, section_emb]'], {'axis': '(1)'}), '([sentence_emb, previous_emb, next_emb, section_emb], axis=1)\n', (4510, 4571), True, 'import numpy as np\n'), ((4586, 4621), 'numpy.column_stack', 'np.column_stack', (['[features, length]'], {}), '([features, length])\n', (4601, 4621), True, 'import numpy as np\n'), ((4723, 4789), 'sklearn.model_selection.train_test_split', 'train_test_split', (['features', 'label'], {'test_size': '(0.33)', 'random_state': '(42)'}), '(features, label, test_size=0.33, random_state=42)\n', (4739, 4789), False, 'from sklearn.model_selection import train_test_split\n'), ((4801, 4877), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'random_state': '(0)', 'solver': '"""newton-cg"""', 'max_iter': '(1000)', 'C': '(0.1)'}), "(random_state=0, solver='newton-cg', max_iter=1000, C=0.1)\n", (4819, 4877), False, 'from sklearn.linear_model import LogisticRegression\n'), ((4937, 4975), 'sklearn.externals.joblib.dump', 'joblib.dump', (['log', 'filename'], {'compress': '(9)'}), '(log, filename, compress=9)\n', (4948, 4975), False, 'from sklearn.externals import joblib\n'), ((6715, 6739), 'pandas.read_csv', 'pd.read_csv', (['dataset_csv'], {}), '(dataset_csv)\n', (6726, 6739), True, 'import pandas as pd\n'), ((7854, 7878), 'numpy.asarray', 'np.asarray', (['sentence_emb'], {}), '(sentence_emb)\n', (7864, 7878), True, 'import numpy as np\n'), ((7924, 7945), 'numpy.asarray', 'np.asarray', (['next_list'], {}), '(next_list)\n', (7934, 7945), True, 'import numpy as np\n'), ((7991, 8015), 'numpy.asarray', 'np.asarray', (['previous_emb'], {}), '(previous_emb)\n', (8001, 8015), True, 'import numpy as np\n'), ((8064, 8088), 'numpy.asarray', 'np.asarray', (['section_list'], {}), '(section_list)\n', (8074, 8088), True, 'import numpy as np\n'), ((8132, 8150), 'numpy.asarray', 'np.asarray', (['length'], {}), '(length)\n', (8142, 8150), True, 'import numpy as np\n'), ((8187, 8204), 'numpy.asarray', 'np.asarray', (['label'], {}), '(label)\n', (8197, 8204), True, 'import numpy as np\n'), ((8238, 8313), 'numpy.concatenate', 'np.concatenate', (['[sentence_emb, previous_emb, next_emb, section_emb]'], {'axis': '(1)'}), '([sentence_emb, previous_emb, next_emb, section_emb], axis=1)\n', (8252, 8313), True, 'import numpy as np\n'), ((8328, 8363), 'numpy.column_stack', 'np.column_stack', (['[features, length]'], {}), '([features, length])\n', (8343, 8363), True, 'import numpy as np\n'), ((8428, 8494), 'sklearn.model_selection.train_test_split', 'train_test_split', (['features', 'label'], {'test_size': '(0.33)', 'random_state': '(42)'}), '(features, label, test_size=0.33, random_state=42)\n', (8444, 8494), False, 'from sklearn.model_selection import train_test_split\n'), ((9783, 9795), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (9793, 9795), False, 'from keras.models import Sequential\n'), ((10175, 10232), 'keras.utils.vis_utils.plot_model', 'plot_model', (['model'], {'to_file': 'file_to_save', 'show_shapes': '(True)'}), '(model, to_file=file_to_save, show_shapes=True)\n', (10185, 10232), False, 'from keras.utils.vis_utils import plot_model\n'), ((10309, 10333), 'pandas.read_csv', 'pd.read_csv', (['dataset_csv'], {}), '(dataset_csv)\n', (10320, 10333), True, 'import pandas as pd\n'), ((11448, 11472), 'numpy.asarray', 'np.asarray', (['sentence_emb'], {}), '(sentence_emb)\n', (11458, 11472), True, 'import numpy as np\n'), ((11518, 11539), 'numpy.asarray', 'np.asarray', (['next_list'], {}), '(next_list)\n', (11528, 11539), True, 'import numpy as np\n'), ((11585, 11609), 'numpy.asarray', 'np.asarray', (['previous_emb'], {}), '(previous_emb)\n', (11595, 11609), True, 'import numpy as np\n'), ((11658, 11682), 'numpy.asarray', 'np.asarray', (['section_list'], {}), '(section_list)\n', (11668, 11682), True, 'import numpy as np\n'), ((11726, 11744), 'numpy.asarray', 'np.asarray', (['length'], {}), '(length)\n', (11736, 11744), True, 'import numpy as np\n'), ((11781, 11798), 'numpy.asarray', 'np.asarray', (['label'], {}), '(label)\n', (11791, 11798), True, 'import numpy as np\n'), ((11832, 11907), 'numpy.concatenate', 'np.concatenate', (['[sentence_emb, previous_emb, next_emb, section_emb]'], {'axis': '(1)'}), '([sentence_emb, previous_emb, next_emb, section_emb], axis=1)\n', (11846, 11907), True, 'import numpy as np\n'), ((11923, 11958), 'numpy.column_stack', 'np.column_stack', (['[features, length]'], {}), '([features, length])\n', (11938, 11958), True, 'import numpy as np\n'), ((11996, 12065), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'random_state': '(0)', 'solver': '"""saga"""', 'max_iter': '(1000)', 'C': '(1)'}), "(random_state=0, solver='saga', max_iter=1000, C=1)\n", (12014, 12065), False, 'from sklearn.linear_model import LogisticRegression\n'), ((12104, 12137), 'sklearn.externals.joblib.dump', 'joblib.dump', (['log', 'pkl'], {'compress': '(9)'}), '(log, pkl, compress=9)\n', (12115, 12137), False, 'from sklearn.externals import joblib\n'), ((12286, 12311), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (12309, 12311), False, 'import argparse\n'), ((5146, 5187), 'sklearn.metrics.classification_report', 'classification_report', (['y_val', 'predictions'], {}), '(y_val, predictions)\n', (5167, 5187), False, 'from sklearn.metrics import classification_report, f1_score\n'), ((5237, 5265), 'sklearn.metrics.f1_score', 'f1_score', (['y_val', 'predictions'], {}), '(y_val, predictions)\n', (5245, 5265), False, 'from sklearn.metrics import classification_report, f1_score\n'), ((5406, 5418), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (5416, 5418), False, 'from keras.models import Sequential\n'), ((6327, 6346), 'numpy.mean', 'np.mean', (['f1_results'], {}), '(f1_results)\n', (6334, 6346), True, 'import numpy as np\n'), ((9810, 9854), 'keras.layers.Dense', 'Dense', (['(64)'], {'activation': '"""relu"""', 'trainable': '(True)'}), "(64, activation='relu', trainable=True)\n", (9815, 9854), False, 'from keras.layers import Dense, Dropout\n'), ((9870, 9915), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""', 'trainable': '(True)'}), "(128, activation='relu', trainable=True)\n", (9875, 9915), False, 'from keras.layers import Dense, Dropout\n'), ((9931, 9943), 'keras.layers.Dropout', 'Dropout', (['(0.3)'], {}), '(0.3)\n', (9938, 9943), False, 'from keras.layers import Dense, Dropout\n'), ((9960, 10004), 'keras.layers.Dense', 'Dense', (['(64)'], {'activation': '"""relu"""', 'trainable': '(True)'}), "(64, activation='relu', trainable=True)\n", (9965, 10004), False, 'from keras.layers import Dense, Dropout\n'), ((10020, 10033), 'keras.layers.Dropout', 'Dropout', (['(0.25)'], {}), '(0.25)\n', (10027, 10033), False, 'from keras.layers import Dense, Dropout\n'), ((10049, 10093), 'keras.layers.Dense', 'Dense', (['(64)'], {'activation': '"""relu"""', 'trainable': '(True)'}), "(64, activation='relu', trainable=True)\n", (10054, 10093), False, 'from keras.layers import Dense, Dropout\n'), ((10109, 10122), 'keras.layers.Dropout', 'Dropout', (['(0.35)'], {}), '(0.35)\n', (10116, 10122), False, 'from keras.layers import Dense, Dropout\n'), ((10138, 10168), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (10143, 10168), False, 'from keras.layers import Dense, Dropout\n'), ((5437, 5481), 'keras.layers.Dense', 'Dense', (['(64)'], {'activation': '"""relu"""', 'trainable': '(True)'}), "(64, activation='relu', trainable=True)\n", (5442, 5481), False, 'from keras.layers import Dense, Dropout\n'), ((5501, 5546), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""', 'trainable': '(True)'}), "(128, activation='relu', trainable=True)\n", (5506, 5546), False, 'from keras.layers import Dense, Dropout\n'), ((5566, 5578), 'keras.layers.Dropout', 'Dropout', (['(0.3)'], {}), '(0.3)\n', (5573, 5578), False, 'from keras.layers import Dense, Dropout\n'), ((5599, 5643), 'keras.layers.Dense', 'Dense', (['(64)'], {'activation': '"""relu"""', 'trainable': '(True)'}), "(64, activation='relu', trainable=True)\n", (5604, 5643), False, 'from keras.layers import Dense, Dropout\n'), ((5663, 5676), 'keras.layers.Dropout', 'Dropout', (['(0.25)'], {}), '(0.25)\n', (5670, 5676), False, 'from keras.layers import Dense, Dropout\n'), ((5696, 5740), 'keras.layers.Dense', 'Dense', (['(64)'], {'activation': '"""relu"""', 'trainable': '(True)'}), "(64, activation='relu', trainable=True)\n", (5701, 5740), False, 'from keras.layers import Dense, Dropout\n'), ((5760, 5773), 'keras.layers.Dropout', 'Dropout', (['(0.35)'], {}), '(0.35)\n', (5767, 5773), False, 'from keras.layers import Dense, Dropout\n'), ((5793, 5823), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (5798, 5823), False, 'from keras.layers import Dense, Dropout\n'), ((8666, 8677), 'time.time', 'time.time', ([], {}), '()\n', (8675, 8677), False, 'import time\n'), ((8696, 8760), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'random_state': '(0)', 'solver': 's', 'max_iter': '(1000)', 'C': 'c'}), '(random_state=0, solver=s, max_iter=1000, C=c)\n', (8714, 8760), False, 'from sklearn.linear_model import LogisticRegression\n'), ((9116, 9144), 'sklearn.metrics.f1_score', 'f1_score', (['y_val', 'predictions'], {}), '(y_val, predictions)\n', (9124, 9144), False, 'from sklearn.metrics import classification_report, f1_score\n'), ((9431, 9442), 'time.time', 'time.time', ([], {}), '()\n', (9440, 9442), False, 'import time\n'), ((751, 780), 'keras.backend.clip', 'K.clip', (['(y_true * y_pred)', '(0)', '(1)'], {}), '(y_true * y_pred, 0, 1)\n', (757, 780), True, 'from keras import backend as K\n'), ((826, 846), 'keras.backend.clip', 'K.clip', (['y_true', '(0)', '(1)'], {}), '(y_true, 0, 1)\n', (832, 846), True, 'from keras import backend as K\n'), ((905, 916), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (914, 916), True, 'from keras import backend as K\n'), ((1236, 1265), 'keras.backend.clip', 'K.clip', (['(y_true * y_pred)', '(0)', '(1)'], {}), '(y_true * y_pred, 0, 1)\n', (1242, 1265), True, 'from keras import backend as K\n'), ((1312, 1332), 'keras.backend.clip', 'K.clip', (['y_pred', '(0)', '(1)'], {}), '(y_pred, 0, 1)\n', (1318, 1332), True, 'from keras import backend as K\n'), ((1395, 1406), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (1404, 1406), True, 'from keras import backend as K\n'), ((1562, 1573), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (1571, 1573), True, 'from keras import backend as K\n'), ((3526, 3539), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (3534, 3539), True, 'import numpy as np\n'), ((3727, 3740), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (3735, 3740), True, 'import numpy as np\n'), ((3864, 3877), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (3872, 3877), True, 'import numpy as np\n'), ((4011, 4024), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (4019, 4024), True, 'import numpy as np\n'), ((7268, 7281), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (7276, 7281), True, 'import numpy as np\n'), ((7469, 7482), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (7477, 7482), True, 'import numpy as np\n'), ((7606, 7619), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (7614, 7619), True, 'import numpy as np\n'), ((7753, 7766), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (7761, 7766), True, 'import numpy as np\n'), ((9056, 9097), 'sklearn.metrics.classification_report', 'classification_report', (['y_val', 'predictions'], {}), '(y_val, predictions)\n', (9077, 9097), False, 'from sklearn.metrics import classification_report, f1_score\n'), ((10862, 10875), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (10870, 10875), True, 'import numpy as np\n'), ((11063, 11076), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (11071, 11076), True, 'import numpy as np\n'), ((11200, 11213), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (11208, 11213), True, 'import numpy as np\n'), ((11347, 11360), 'numpy.zeros', 'np.zeros', (['(768)'], {}), '(768)\n', (11355, 11360), True, 'import numpy as np\n'), ((2434, 2450), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (2444, 2450), False, 'import json\n'), ((2388, 2404), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (2398, 2404), False, 'import json\n')] |
zmanji/incubator-aurora | src/test/python/apache/aurora/executor/test_status_manager.py | 9f594f1de6bbf46c74863dd3fc4d2708b7a974f2 | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import time
from unittest import TestCase
import mock
from mesos.interface.mesos_pb2 import TaskState
from apache.aurora.executor.common.status_checker import StatusChecker
from apache.aurora.executor.status_manager import StatusManager
class FakeStatusChecker(StatusChecker):
def __init__(self):
self.call_count = 0
@property
def status(self):
if self.call_count == 2:
return TaskState.Value('TASK_KILLED')
self.call_count += 1
return None
class TestStatusManager(TestCase):
def setUp(self):
self.callback_called = False
def test_run(self):
checker = FakeStatusChecker()
def callback(result):
assert result == TaskState.Value('TASK_KILLED')
self.callback_called = True
mock_time = mock.Mock(spec=time)
status_manager = StatusManager(checker, callback, mock_time)
status_manager.run()
assert mock_time.sleep.call_count == 2
assert self.callback_called is True
| [((1299, 1319), 'mock.Mock', 'mock.Mock', ([], {'spec': 'time'}), '(spec=time)\n', (1308, 1319), False, 'import mock\n'), ((1341, 1384), 'apache.aurora.executor.status_manager.StatusManager', 'StatusManager', (['checker', 'callback', 'mock_time'], {}), '(checker, callback, mock_time)\n', (1354, 1384), False, 'from apache.aurora.executor.status_manager import StatusManager\n'), ((951, 981), 'mesos.interface.mesos_pb2.TaskState.Value', 'TaskState.Value', (['"""TASK_KILLED"""'], {}), "('TASK_KILLED')\n", (966, 981), False, 'from mesos.interface.mesos_pb2 import TaskState\n'), ((1218, 1248), 'mesos.interface.mesos_pb2.TaskState.Value', 'TaskState.Value', (['"""TASK_KILLED"""'], {}), "('TASK_KILLED')\n", (1233, 1248), False, 'from mesos.interface.mesos_pb2 import TaskState\n')] |
wangqf1997/Human-injury-based-safety-decision-of-automated-vehicles | Supplemental/A5. Collision estimation module/Con_est.py | b104fdeb3d85e867f6b04c5ae7b5a197e705aeba | '''
-------------------------------------------------------------------------------------------------
This code accompanies the paper titled "Human injury-based safety decision of automated vehicles"
Author: Qingfan Wang, Qing Zhou, Miao Lin, Bingbing Nie
Corresponding author: Bingbing Nie (nbb@tsinghua.edu.cn)
-------------------------------------------------------------------------------------------------
'''
import torch
import numpy as np
from torch import nn
from torch.nn.utils import weight_norm
__author__ = "Qingfan Wang"
def Collision_cond(veh_striking_list, V1_v, V2_v, delta_angle, veh_param):
''' Estimate the collision condition. '''
(veh_l, veh_w, veh_cgf, veh_cgs, veh_k, veh_m) = veh_param
delta_angle_2 = np.arccos(np.abs(np.cos(delta_angle)))
if -1e-6 < delta_angle_2 < 1e-6:
delta_angle_2 = 1e-6
delta_v1_list = []
delta_v2_list = []
# Estimate the collision condition (delat-v) according to the principal impact direction.
for veh_striking in veh_striking_list:
if veh_striking[0] == 1:
veh_ca = np.arctan(veh_cgf[0] / veh_cgs[0])
veh_a2 = np.abs(veh_cgs[1] - veh_striking[3])
veh_RDS = np.abs(V1_v * np.cos(delta_angle) - V2_v)
veh_a1 = np.abs(np.sqrt(veh_cgf[0] ** 2 + veh_cgs[0] ** 2) * np.cos(veh_ca + delta_angle_2))
if (veh_striking[1]+1) in [16, 1, 2, 3, 17, 20, 21] and (veh_striking[2]+1) in [16, 1, 2, 3, 17, 20, 21]:
veh_e = 2 / veh_RDS
else:
veh_e = 0.5 / veh_RDS
elif veh_striking[0] == 2:
veh_ca = np.arctan(veh_cgf[0] / veh_cgs[0])
veh_a2 = np.abs(veh_cgf[1] - veh_striking[3])
veh_a1 = np.abs(np.sqrt(veh_cgf[0] ** 2 + veh_cgs[0] ** 2) * np.cos(delta_angle_2 - veh_ca + np.pi / 2))
veh_RDS = V1_v * np.sin(delta_angle_2)
veh_e = 1.5 / veh_RDS
elif veh_striking[0] == 3:
veh_ca = np.arctan(veh_cgf[1] / veh_cgs[1])
veh_a1 = np.abs(veh_cgs[0] - veh_striking[3])
veh_RDS = np.abs(V2_v * np.cos(delta_angle) - V1_v)
veh_a2 = np.abs(np.sqrt(veh_cgf[1] ** 2 + veh_cgs[1] ** 2) * np.cos(veh_ca + delta_angle_2))
if (veh_striking[1]+1) in [16, 1, 2, 3, 17, 20, 21] and (veh_striking[2]+1) in [16, 1, 2, 3, 17, 20, 21]:
veh_e = 2 / veh_RDS
else:
veh_e = 0.5 / veh_RDS
elif veh_striking[0] == 4:
veh_ca = np.arctan(veh_cgf[1] / veh_cgs[1])
veh_a1 = np.abs(veh_cgf[0] - veh_striking[3])
veh_a2 = np.abs(np.sqrt(veh_cgf[1] ** 2 + veh_cgs[1] ** 2) * np.cos(delta_angle_2 - veh_ca + np.pi / 2))
veh_RDS = V2_v * np.sin(delta_angle_2)
veh_e = 1.5 / veh_RDS
# Obtain delta-v based on the plane 2-DOF rigid-body collision model with momentum conservation.
veh_y1 = veh_k[0] ** 2 / (veh_a1 ** 2 + veh_k[0] ** 2)
veh_y2 = veh_k[1] ** 2 / (veh_a2 ** 2 + veh_k[1] ** 2)
delta_v1 = (1 + veh_e) * veh_m[1] * veh_y1 * veh_y2 * veh_RDS / (veh_m[0] * veh_y1 + veh_m[1] * veh_y2)
delta_v2 = (1 + veh_e) * veh_m[0] * veh_y1 * veh_y2 * veh_RDS / (veh_m[0] * veh_y1 + veh_m[1] * veh_y2)
delta_v1_list.append(delta_v1)
delta_v2_list.append(delta_v2)
delta_v1_ = max(delta_v1_list)
delta_v2_ = max(delta_v2_list)
index = delta_v1_list.index(max(delta_v1_list))
return delta_v1_, delta_v2_, index | [((764, 783), 'numpy.cos', 'np.cos', (['delta_angle'], {}), '(delta_angle)\n', (770, 783), True, 'import numpy as np\n'), ((1091, 1125), 'numpy.arctan', 'np.arctan', (['(veh_cgf[0] / veh_cgs[0])'], {}), '(veh_cgf[0] / veh_cgs[0])\n', (1100, 1125), True, 'import numpy as np\n'), ((1147, 1183), 'numpy.abs', 'np.abs', (['(veh_cgs[1] - veh_striking[3])'], {}), '(veh_cgs[1] - veh_striking[3])\n', (1153, 1183), True, 'import numpy as np\n'), ((1620, 1654), 'numpy.arctan', 'np.arctan', (['(veh_cgf[0] / veh_cgs[0])'], {}), '(veh_cgf[0] / veh_cgs[0])\n', (1629, 1654), True, 'import numpy as np\n'), ((1676, 1712), 'numpy.abs', 'np.abs', (['(veh_cgf[1] - veh_striking[3])'], {}), '(veh_cgf[1] - veh_striking[3])\n', (1682, 1712), True, 'import numpy as np\n'), ((1276, 1318), 'numpy.sqrt', 'np.sqrt', (['(veh_cgf[0] ** 2 + veh_cgs[0] ** 2)'], {}), '(veh_cgf[0] ** 2 + veh_cgs[0] ** 2)\n', (1283, 1318), True, 'import numpy as np\n'), ((1321, 1351), 'numpy.cos', 'np.cos', (['(veh_ca + delta_angle_2)'], {}), '(veh_ca + delta_angle_2)\n', (1327, 1351), True, 'import numpy as np\n'), ((1859, 1880), 'numpy.sin', 'np.sin', (['delta_angle_2'], {}), '(delta_angle_2)\n', (1865, 1880), True, 'import numpy as np\n'), ((1972, 2006), 'numpy.arctan', 'np.arctan', (['(veh_cgf[1] / veh_cgs[1])'], {}), '(veh_cgf[1] / veh_cgs[1])\n', (1981, 2006), True, 'import numpy as np\n'), ((2028, 2064), 'numpy.abs', 'np.abs', (['(veh_cgs[0] - veh_striking[3])'], {}), '(veh_cgs[0] - veh_striking[3])\n', (2034, 2064), True, 'import numpy as np\n'), ((1220, 1239), 'numpy.cos', 'np.cos', (['delta_angle'], {}), '(delta_angle)\n', (1226, 1239), True, 'import numpy as np\n'), ((1741, 1783), 'numpy.sqrt', 'np.sqrt', (['(veh_cgf[0] ** 2 + veh_cgs[0] ** 2)'], {}), '(veh_cgf[0] ** 2 + veh_cgs[0] ** 2)\n', (1748, 1783), True, 'import numpy as np\n'), ((1786, 1828), 'numpy.cos', 'np.cos', (['(delta_angle_2 - veh_ca + np.pi / 2)'], {}), '(delta_angle_2 - veh_ca + np.pi / 2)\n', (1792, 1828), True, 'import numpy as np\n'), ((2501, 2535), 'numpy.arctan', 'np.arctan', (['(veh_cgf[1] / veh_cgs[1])'], {}), '(veh_cgf[1] / veh_cgs[1])\n', (2510, 2535), True, 'import numpy as np\n'), ((2557, 2593), 'numpy.abs', 'np.abs', (['(veh_cgf[0] - veh_striking[3])'], {}), '(veh_cgf[0] - veh_striking[3])\n', (2563, 2593), True, 'import numpy as np\n'), ((2157, 2199), 'numpy.sqrt', 'np.sqrt', (['(veh_cgf[1] ** 2 + veh_cgs[1] ** 2)'], {}), '(veh_cgf[1] ** 2 + veh_cgs[1] ** 2)\n', (2164, 2199), True, 'import numpy as np\n'), ((2202, 2232), 'numpy.cos', 'np.cos', (['(veh_ca + delta_angle_2)'], {}), '(veh_ca + delta_angle_2)\n', (2208, 2232), True, 'import numpy as np\n'), ((2740, 2761), 'numpy.sin', 'np.sin', (['delta_angle_2'], {}), '(delta_angle_2)\n', (2746, 2761), True, 'import numpy as np\n'), ((2101, 2120), 'numpy.cos', 'np.cos', (['delta_angle'], {}), '(delta_angle)\n', (2107, 2120), True, 'import numpy as np\n'), ((2622, 2664), 'numpy.sqrt', 'np.sqrt', (['(veh_cgf[1] ** 2 + veh_cgs[1] ** 2)'], {}), '(veh_cgf[1] ** 2 + veh_cgs[1] ** 2)\n', (2629, 2664), True, 'import numpy as np\n'), ((2667, 2709), 'numpy.cos', 'np.cos', (['(delta_angle_2 - veh_ca + np.pi / 2)'], {}), '(delta_angle_2 - veh_ca + np.pi / 2)\n', (2673, 2709), True, 'import numpy as np\n')] |
TontonTremblay/pixel-nerf | train/train.py | 349b5f3f173cd76def05b6de8aa52c69a4f0c7fa | # Training to a set of multiple objects (e.g. ShapeNet or DTU)
# tensorboard logs available in logs/<expname>
import sys
import os
sys.path.insert(
0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "src"))
)
import warnings
import trainlib
from model import make_model, loss
from render import NeRFRenderer
from data import get_split_dataset
import util
import numpy as np
import torch.nn.functional as F
import torch
from dotmap import DotMap
def extra_args(parser):
parser.add_argument(
"--batch_size", "-B", type=int, default=4, help="Object batch size ('SB')"
)
parser.add_argument(
"--nviews",
"-V",
type=str,
default="1",
help="Number of source views (multiview); put multiple (space delim) to pick randomly per batch ('NV')",
)
parser.add_argument(
"--freeze_enc",
action="store_true",
default=None,
help="Freeze encoder weights and only train MLP",
)
parser.add_argument(
"--no_bbox_step",
type=int,
default=100000,
help="Step to stop using bbox sampling",
)
parser.add_argument(
"--fixed_test",
action="store_true",
default=None,
help="Freeze encoder weights and only train MLP",
)
return parser
args, conf = util.args.parse_args(extra_args, training=True, default_ray_batch_size=128)
device = util.get_cuda(args.gpu_id[0])
dset, val_dset, _ = get_split_dataset(args.dataset_format, args.datadir)
print(
"dset z_near {}, z_far {}, lindisp {}".format(dset.z_near, dset.z_far, dset.lindisp)
)
net = make_model(conf["model"]).to(device=device)
net.stop_encoder_grad = args.freeze_enc
if args.freeze_enc:
print("Encoder frozen")
net.encoder.eval()
renderer = NeRFRenderer.from_conf(conf["renderer"], lindisp=dset.lindisp,).to(
device=device
)
# Parallize
render_par = renderer.bind_parallel(net, args.gpu_id).eval()
nviews = list(map(int, args.nviews.split()))
class PixelNeRFTrainer(trainlib.Trainer):
def __init__(self):
super().__init__(net, dset, val_dset, args, conf["train"], device=device)
self.renderer_state_path = "%s/%s/_renderer" % (
self.args.checkpoints_path,
self.args.name,
)
self.lambda_coarse = conf.get_float("loss.lambda_coarse")
self.lambda_fine = conf.get_float("loss.lambda_fine", 1.0)
print(
"lambda coarse {} and fine {}".format(self.lambda_coarse, self.lambda_fine)
)
self.rgb_coarse_crit = loss.get_rgb_loss(conf["loss.rgb"], True)
fine_loss_conf = conf["loss.rgb"]
if "rgb_fine" in conf["loss"]:
print("using fine loss")
fine_loss_conf = conf["loss.rgb_fine"]
self.rgb_fine_crit = loss.get_rgb_loss(fine_loss_conf, False)
if args.resume:
if os.path.exists(self.renderer_state_path):
renderer.load_state_dict(
torch.load(self.renderer_state_path, map_location=device)
)
self.z_near = dset.z_near
self.z_far = dset.z_far
self.use_bbox = args.no_bbox_step > 0
def post_batch(self, epoch, batch):
renderer.sched_step(args.batch_size)
def extra_save_state(self):
torch.save(renderer.state_dict(), self.renderer_state_path)
def calc_losses(self, data, is_train=True, global_step=0):
if "images" not in data:
return {}
all_images = data["images"].to(device=device) # (SB, NV, 3, H, W)
SB, NV, _, H, W = all_images.shape
all_poses = data["poses"].to(device=device) # (SB, NV, 4, 4)
all_bboxes = data.get("bbox") # (SB, NV, 4) cmin rmin cmax rmax
all_focals = data["focal"] # (SB)
all_c = data.get("c") # (SB)
if self.use_bbox and global_step >= args.no_bbox_step:
self.use_bbox = False
print(">>> Stopped using bbox sampling @ iter", global_step)
if not is_train or not self.use_bbox:
all_bboxes = None
all_rgb_gt = []
all_rays = []
curr_nviews = nviews[torch.randint(0, len(nviews), ()).item()]
if curr_nviews == 1:
image_ord = torch.randint(0, NV, (SB, 1))
else:
image_ord = torch.empty((SB, curr_nviews), dtype=torch.long)
for obj_idx in range(SB):
if all_bboxes is not None:
bboxes = all_bboxes[obj_idx]
images = all_images[obj_idx] # (NV, 3, H, W)
poses = all_poses[obj_idx] # (NV, 4, 4)
focal = all_focals[obj_idx]
c = None
if "c" in data:
c = data["c"][obj_idx]
if curr_nviews > 1:
# Somewhat inefficient, don't know better way
image_ord[obj_idx] = torch.from_numpy(
np.random.choice(NV, curr_nviews, replace=False)
)
images_0to1 = images * 0.5 + 0.5
cam_rays = util.gen_rays(
poses, W, H, focal, self.z_near, self.z_far, c=c
) # (NV, H, W, 8)
rgb_gt_all = images_0to1
rgb_gt_all = (
rgb_gt_all.permute(0, 2, 3, 1).contiguous().reshape(-1, 3)
) # (NV, H, W, 3)
if all_bboxes is not None:
pix = util.bbox_sample(bboxes, args.ray_batch_size)
pix_inds = pix[..., 0] * H * W + pix[..., 1] * W + pix[..., 2]
else:
pix_inds = torch.randint(0, NV * H * W, (args.ray_batch_size,))
rgb_gt = rgb_gt_all[pix_inds] # (ray_batch_size, 3)
rays = cam_rays.view(-1, cam_rays.shape[-1])[pix_inds].to(
device=device
) # (ray_batch_size, 8)
all_rgb_gt.append(rgb_gt)
all_rays.append(rays)
all_rgb_gt = torch.stack(all_rgb_gt) # (SB, ray_batch_size, 3)
all_rays = torch.stack(all_rays) # (SB, ray_batch_size, 8)
image_ord = image_ord.to(device)
src_images = util.batched_index_select_nd(
all_images, image_ord
) # (SB, NS, 3, H, W)
src_poses = util.batched_index_select_nd(all_poses, image_ord) # (SB, NS, 4, 4)
all_bboxes = all_poses = all_images = None
net.encode(
src_images,
src_poses,
all_focals.to(device=device),
c=all_c.to(device=device) if all_c is not None else None,
)
render_dict = DotMap(render_par(all_rays, want_weights=True,))
coarse = render_dict.coarse
fine = render_dict.fine
using_fine = len(fine) > 0
loss_dict = {}
rgb_loss = self.rgb_coarse_crit(coarse.rgb, all_rgb_gt)
if rgb_loss.isnan().any().item()==True:
raise()
loss_dict["rc"] = rgb_loss.item() * self.lambda_coarse
if using_fine:
fine_loss = self.rgb_fine_crit(fine.rgb, all_rgb_gt)
rgb_loss = rgb_loss * self.lambda_coarse + fine_loss * self.lambda_fine
loss_dict["rf"] = fine_loss.item() * self.lambda_fine
loss = rgb_loss
if is_train:
loss.backward()
loss_dict["t"] = loss.item()
return loss_dict
def train_step(self, data, global_step):
return self.calc_losses(data, is_train=True, global_step=global_step)
def eval_step(self, data, global_step):
renderer.eval()
losses = self.calc_losses(data, is_train=False, global_step=global_step)
renderer.train()
return losses
def vis_step(self, data, global_step, idx=None):
if "images" not in data:
return {}
if idx is None:
batch_idx = np.random.randint(0, data["images"].shape[0])
else:
print(idx)
batch_idx = idx
images = data["images"][batch_idx].to(device=device) # (NV, 3, H, W)
poses = data["poses"][batch_idx].to(device=device) # (NV, 4, 4)
focal = data["focal"][batch_idx : batch_idx + 1] # (1)
c = data.get("c")
if c is not None:
c = c[batch_idx : batch_idx + 1] # (1)
NV, _, H, W = images.shape
cam_rays = util.gen_rays(
poses, W, H, focal, self.z_near, self.z_far, c=c
) # (NV, H, W, 8)
images_0to1 = images * 0.5 + 0.5 # (NV, 3, H, W)
curr_nviews = nviews[torch.randint(0, len(nviews), (1,)).item()]
views_src = np.sort(np.random.choice(NV, curr_nviews, replace=False))
view_dest = np.random.randint(0, NV - curr_nviews)
for vs in range(curr_nviews):
view_dest += view_dest >= views_src[vs]
views_src = torch.from_numpy(views_src)
# set renderer net to eval mode
renderer.eval()
source_views = (
images_0to1[views_src]
.permute(0, 2, 3, 1)
.cpu()
.numpy()
.reshape(-1, H, W, 3)
)
gt = images_0to1[view_dest].permute(1, 2, 0).cpu().numpy().reshape(H, W, 3)
with torch.no_grad():
test_rays = cam_rays[view_dest] # (H, W, 8)
test_images = images[views_src] # (NS, 3, H, W)
net.encode(
test_images.unsqueeze(0),
poses[views_src].unsqueeze(0),
focal.to(device=device),
c=c.to(device=device) if c is not None else None,
)
test_rays = test_rays.reshape(1, H * W, -1)
render_dict = DotMap(render_par(test_rays, want_weights=True))
coarse = render_dict.coarse
fine = render_dict.fine
using_fine = len(fine) > 0
alpha_coarse_np = coarse.weights[0].sum(dim=-1).cpu().numpy().reshape(H, W)
rgb_coarse_np = coarse.rgb[0].cpu().numpy().reshape(H, W, 3)
depth_coarse_np = coarse.depth[0].cpu().numpy().reshape(H, W)
if using_fine:
alpha_fine_np = fine.weights[0].sum(dim=1).cpu().numpy().reshape(H, W)
depth_fine_np = fine.depth[0].cpu().numpy().reshape(H, W)
rgb_fine_np = fine.rgb[0].cpu().numpy().reshape(H, W, 3)
print("c rgb min {} max {}".format(rgb_coarse_np.min(), rgb_coarse_np.max()))
print(
"c alpha min {}, max {}".format(
alpha_coarse_np.min(), alpha_coarse_np.max()
)
)
alpha_coarse_cmap = util.cmap(alpha_coarse_np) / 255
depth_coarse_cmap = util.cmap(depth_coarse_np) / 255
vis_list = [
*source_views,
gt,
depth_coarse_cmap,
rgb_coarse_np,
alpha_coarse_cmap,
]
vis_coarse = np.hstack(vis_list)
vis = vis_coarse
if using_fine:
print("f rgb min {} max {}".format(rgb_fine_np.min(), rgb_fine_np.max()))
print(
"f alpha min {}, max {}".format(
alpha_fine_np.min(), alpha_fine_np.max()
)
)
depth_fine_cmap = util.cmap(depth_fine_np) / 255
alpha_fine_cmap = util.cmap(alpha_fine_np) / 255
vis_list = [
*source_views,
gt,
depth_fine_cmap,
rgb_fine_np,
alpha_fine_cmap,
]
vis_fine = np.hstack(vis_list)
vis = np.vstack((vis_coarse, vis_fine))
rgb_psnr = rgb_fine_np
else:
rgb_psnr = rgb_coarse_np
psnr = util.psnr(rgb_psnr, gt)
vals = {"psnr": psnr}
print("psnr", psnr)
# set the renderer network back to train mode
renderer.train()
return vis, vals
trainer = PixelNeRFTrainer()
trainer.start()
| [((1333, 1408), 'util.args.parse_args', 'util.args.parse_args', (['extra_args'], {'training': '(True)', 'default_ray_batch_size': '(128)'}), '(extra_args, training=True, default_ray_batch_size=128)\n', (1353, 1408), False, 'import util\n'), ((1418, 1447), 'util.get_cuda', 'util.get_cuda', (['args.gpu_id[0]'], {}), '(args.gpu_id[0])\n', (1431, 1447), False, 'import util\n'), ((1469, 1521), 'data.get_split_dataset', 'get_split_dataset', (['args.dataset_format', 'args.datadir'], {}), '(args.dataset_format, args.datadir)\n', (1486, 1521), False, 'from data import get_split_dataset\n'), ((1627, 1652), 'model.make_model', 'make_model', (["conf['model']"], {}), "(conf['model'])\n", (1637, 1652), False, 'from model import make_model, loss\n'), ((1794, 1856), 'render.NeRFRenderer.from_conf', 'NeRFRenderer.from_conf', (["conf['renderer']"], {'lindisp': 'dset.lindisp'}), "(conf['renderer'], lindisp=dset.lindisp)\n", (1816, 1856), False, 'from render import NeRFRenderer\n'), ((2565, 2606), 'model.loss.get_rgb_loss', 'loss.get_rgb_loss', (["conf['loss.rgb']", '(True)'], {}), "(conf['loss.rgb'], True)\n", (2582, 2606), False, 'from model import make_model, loss\n'), ((2805, 2845), 'model.loss.get_rgb_loss', 'loss.get_rgb_loss', (['fine_loss_conf', '(False)'], {}), '(fine_loss_conf, False)\n', (2822, 2845), False, 'from model import make_model, loss\n'), ((5894, 5917), 'torch.stack', 'torch.stack', (['all_rgb_gt'], {}), '(all_rgb_gt)\n', (5905, 5917), False, 'import torch\n'), ((5964, 5985), 'torch.stack', 'torch.stack', (['all_rays'], {}), '(all_rays)\n', (5975, 5985), False, 'import torch\n'), ((6076, 6127), 'util.batched_index_select_nd', 'util.batched_index_select_nd', (['all_images', 'image_ord'], {}), '(all_images, image_ord)\n', (6104, 6127), False, 'import util\n'), ((6191, 6241), 'util.batched_index_select_nd', 'util.batched_index_select_nd', (['all_poses', 'image_ord'], {}), '(all_poses, image_ord)\n', (6219, 6241), False, 'import util\n'), ((7232, 7243), 'model.loss.item', 'loss.item', ([], {}), '()\n', (7241, 7243), False, 'from model import make_model, loss\n'), ((8232, 8295), 'util.gen_rays', 'util.gen_rays', (['poses', 'W', 'H', 'focal', 'self.z_near', 'self.z_far'], {'c': 'c'}), '(poses, W, H, focal, self.z_near, self.z_far, c=c)\n', (8245, 8295), False, 'import util\n'), ((8565, 8603), 'numpy.random.randint', 'np.random.randint', (['(0)', '(NV - curr_nviews)'], {}), '(0, NV - curr_nviews)\n', (8582, 8603), True, 'import numpy as np\n'), ((8714, 8741), 'torch.from_numpy', 'torch.from_numpy', (['views_src'], {}), '(views_src)\n', (8730, 8741), False, 'import torch\n'), ((10735, 10754), 'numpy.hstack', 'np.hstack', (['vis_list'], {}), '(vis_list)\n', (10744, 10754), True, 'import numpy as np\n'), ((11556, 11579), 'util.psnr', 'util.psnr', (['rgb_psnr', 'gt'], {}), '(rgb_psnr, gt)\n', (11565, 11579), False, 'import util\n'), ((186, 211), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (201, 211), False, 'import os\n'), ((2886, 2926), 'os.path.exists', 'os.path.exists', (['self.renderer_state_path'], {}), '(self.renderer_state_path)\n', (2900, 2926), False, 'import os\n'), ((4250, 4279), 'torch.randint', 'torch.randint', (['(0)', 'NV', '(SB, 1)'], {}), '(0, NV, (SB, 1))\n', (4263, 4279), False, 'import torch\n'), ((4318, 4366), 'torch.empty', 'torch.empty', (['(SB, curr_nviews)'], {'dtype': 'torch.long'}), '((SB, curr_nviews), dtype=torch.long)\n', (4329, 4366), False, 'import torch\n'), ((5029, 5092), 'util.gen_rays', 'util.gen_rays', (['poses', 'W', 'H', 'focal', 'self.z_near', 'self.z_far'], {'c': 'c'}), '(poses, W, H, focal, self.z_near, self.z_far, c=c)\n', (5042, 5092), False, 'import util\n'), ((7191, 7206), 'model.loss.backward', 'loss.backward', ([], {}), '()\n', (7204, 7206), False, 'from model import make_model, loss\n'), ((7748, 7793), 'numpy.random.randint', 'np.random.randint', (['(0)', "data['images'].shape[0]"], {}), "(0, data['images'].shape[0])\n", (7765, 7793), True, 'import numpy as np\n'), ((8495, 8543), 'numpy.random.choice', 'np.random.choice', (['NV', 'curr_nviews'], {'replace': '(False)'}), '(NV, curr_nviews, replace=False)\n', (8511, 8543), True, 'import numpy as np\n'), ((9082, 9097), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (9095, 9097), False, 'import torch\n'), ((10456, 10482), 'util.cmap', 'util.cmap', (['alpha_coarse_np'], {}), '(alpha_coarse_np)\n', (10465, 10482), False, 'import util\n'), ((10517, 10543), 'util.cmap', 'util.cmap', (['depth_coarse_np'], {}), '(depth_coarse_np)\n', (10526, 10543), False, 'import util\n'), ((11382, 11401), 'numpy.hstack', 'np.hstack', (['vis_list'], {}), '(vis_list)\n', (11391, 11401), True, 'import numpy as np\n'), ((11420, 11453), 'numpy.vstack', 'np.vstack', (['(vis_coarse, vis_fine)'], {}), '((vis_coarse, vis_fine))\n', (11429, 11453), True, 'import numpy as np\n'), ((5372, 5417), 'util.bbox_sample', 'util.bbox_sample', (['bboxes', 'args.ray_batch_size'], {}), '(bboxes, args.ray_batch_size)\n', (5388, 5417), False, 'import util\n'), ((5542, 5594), 'torch.randint', 'torch.randint', (['(0)', '(NV * H * W)', '(args.ray_batch_size,)'], {}), '(0, NV * H * W, (args.ray_batch_size,))\n', (5555, 5594), False, 'import torch\n'), ((11081, 11105), 'util.cmap', 'util.cmap', (['depth_fine_np'], {}), '(depth_fine_np)\n', (11090, 11105), False, 'import util\n'), ((11142, 11166), 'util.cmap', 'util.cmap', (['alpha_fine_np'], {}), '(alpha_fine_np)\n', (11151, 11166), False, 'import util\n'), ((2990, 3047), 'torch.load', 'torch.load', (['self.renderer_state_path'], {'map_location': 'device'}), '(self.renderer_state_path, map_location=device)\n', (3000, 3047), False, 'import torch\n'), ((4893, 4941), 'numpy.random.choice', 'np.random.choice', (['NV', 'curr_nviews'], {'replace': '(False)'}), '(NV, curr_nviews, replace=False)\n', (4909, 4941), True, 'import numpy as np\n')] |
kne42/napari | napari/_qt/dialogs/qt_plugin_dialog.py | d61d0be0ef8ea622dd3d6acd270c0529816c11ec | import os
import sys
from pathlib import Path
from typing import Sequence
from napari_plugin_engine.dist import standard_metadata
from napari_plugin_engine.exceptions import PluginError
from qtpy.QtCore import QEvent, QProcess, QProcessEnvironment, QSize, Qt, Slot
from qtpy.QtGui import QFont, QMovie
from qtpy.QtWidgets import (
QCheckBox,
QDialog,
QFrame,
QHBoxLayout,
QLabel,
QLineEdit,
QListWidget,
QListWidgetItem,
QPushButton,
QSizePolicy,
QSplitter,
QTextEdit,
QVBoxLayout,
QWidget,
)
import napari.resources
from ...plugins import plugin_manager
from ...plugins.pypi import (
ProjectInfo,
iter_napari_plugin_info,
normalized_name,
)
from ...utils._appdirs import user_plugin_dir, user_site_packages
from ...utils.misc import parse_version, running_as_bundled_app
from ...utils.translations import trans
from ..qthreading import create_worker
from ..widgets.qt_eliding_label import ElidingLabel
from ..widgets.qt_plugin_sorter import QtPluginSorter
from .qt_plugin_report import QtPluginErrReporter
# TODO: add error icon and handle pip install errors
# TODO: add queue to handle clicks when already processing
class Installer:
def __init__(self, output_widget: QTextEdit = None):
from ...plugins import plugin_manager
# create install process
self._output_widget = None
self.process = QProcess()
self.process.setProgram(sys.executable)
self.process.setProcessChannelMode(QProcess.MergedChannels)
self.process.readyReadStandardOutput.connect(self._on_stdout_ready)
# setup process path
env = QProcessEnvironment()
combined_paths = os.pathsep.join(
[user_site_packages(), env.systemEnvironment().value("PYTHONPATH")]
)
env.insert("PYTHONPATH", combined_paths)
# use path of parent process
env.insert(
"PATH", QProcessEnvironment.systemEnvironment().value("PATH")
)
self.process.setProcessEnvironment(env)
self.process.finished.connect(lambda: plugin_manager.discover())
self.process.finished.connect(lambda: plugin_manager.prune())
self.set_output_widget(output_widget)
def set_output_widget(self, output_widget: QTextEdit):
if output_widget:
self._output_widget = output_widget
self.process.setParent(output_widget)
def _on_stdout_ready(self):
if self._output_widget:
text = self.process.readAllStandardOutput().data().decode()
self._output_widget.append(text)
def install(self, pkg_list: Sequence[str]):
cmd = ['-m', 'pip', 'install', '--upgrade']
if running_as_bundled_app() and sys.platform.startswith('linux'):
cmd += [
'--no-warn-script-location',
'--prefix',
user_plugin_dir(),
]
self.process.setArguments(cmd + list(pkg_list))
if self._output_widget:
self._output_widget.clear()
self.process.start()
def uninstall(self, pkg_list: Sequence[str]):
args = ['-m', 'pip', 'uninstall', '-y']
self.process.setArguments(args + list(pkg_list))
if self._output_widget:
self._output_widget.clear()
self.process.start()
for pkg in pkg_list:
plugin_manager.unregister(pkg)
class PluginListItem(QFrame):
def __init__(
self,
package_name: str,
version: str = '',
url: str = '',
summary: str = '',
author: str = '',
license: str = "UNKNOWN",
*,
plugin_name: str = None,
parent: QWidget = None,
enabled: bool = True,
):
super().__init__(parent)
self.setup_ui(enabled)
if plugin_name:
self.plugin_name.setText(plugin_name)
self.package_name.setText(f"{package_name} {version}")
self.summary.setText(summary)
self.package_author.setText(author)
self.action_button.setText(trans._("uninstall"))
self.action_button.setObjectName("remove_button")
self.enabled_checkbox.setChecked(enabled)
if PluginError.get(plugin_name=plugin_name):
def _show_error():
rep = QtPluginErrReporter(
parent=self._get_dialog(), initial_plugin=plugin_name
)
rep.setWindowFlags(Qt.Sheet)
close = QPushButton(trans._("close"), rep)
rep.layout.addWidget(close)
rep.plugin_combo.hide()
close.clicked.connect(rep.close)
rep.open()
self.error_indicator.clicked.connect(_show_error)
self.error_indicator.show()
self.summary.setIndent(18)
else:
self.summary.setIndent(38)
else:
self.plugin_name.setText(package_name)
self.package_name.setText(version)
self.summary.setText(summary)
self.package_author.setText(author)
self.action_button.setText(trans._("install"))
self.enabled_checkbox.hide()
def _get_dialog(self) -> QDialog:
p = self.parent()
while not isinstance(p, QDialog) and p.parent():
p = p.parent()
return p
def setup_ui(self, enabled=True):
self.v_lay = QVBoxLayout(self)
self.v_lay.setContentsMargins(-1, 6, -1, 6)
self.v_lay.setSpacing(0)
self.row1 = QHBoxLayout()
self.row1.setSpacing(6)
self.enabled_checkbox = QCheckBox(self)
self.enabled_checkbox.setChecked(enabled)
self.enabled_checkbox.stateChanged.connect(self._on_enabled_checkbox)
self.enabled_checkbox.setToolTip(trans._("enable/disable"))
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.enabled_checkbox.sizePolicy().hasHeightForWidth()
)
self.enabled_checkbox.setSizePolicy(sizePolicy)
self.enabled_checkbox.setMinimumSize(QSize(20, 0))
self.enabled_checkbox.setText("")
self.row1.addWidget(self.enabled_checkbox)
self.plugin_name = QLabel(self)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.plugin_name.sizePolicy().hasHeightForWidth()
)
self.plugin_name.setSizePolicy(sizePolicy)
font15 = QFont()
font15.setPointSize(15)
self.plugin_name.setFont(font15)
self.row1.addWidget(self.plugin_name)
self.package_name = QLabel(self)
self.package_name.setAlignment(
Qt.AlignRight | Qt.AlignTrailing | Qt.AlignVCenter
)
self.row1.addWidget(self.package_name)
self.action_button = QPushButton(self)
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.action_button.sizePolicy().hasHeightForWidth()
)
self.action_button.setSizePolicy(sizePolicy)
self.row1.addWidget(self.action_button)
self.v_lay.addLayout(self.row1)
self.row2 = QHBoxLayout()
self.error_indicator = QPushButton()
self.error_indicator.setObjectName("warning_icon")
self.error_indicator.setCursor(Qt.PointingHandCursor)
self.error_indicator.hide()
self.row2.addWidget(self.error_indicator)
self.row2.setContentsMargins(-1, 4, 0, -1)
self.summary = ElidingLabel(parent=self)
sizePolicy = QSizePolicy(
QSizePolicy.MinimumExpanding, QSizePolicy.Preferred
)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.summary.sizePolicy().hasHeightForWidth()
)
self.summary.setSizePolicy(sizePolicy)
self.summary.setObjectName("small_text")
self.row2.addWidget(self.summary)
self.package_author = QLabel(self)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.package_author.sizePolicy().hasHeightForWidth()
)
self.package_author.setSizePolicy(sizePolicy)
self.package_author.setObjectName("small_text")
self.row2.addWidget(self.package_author)
self.v_lay.addLayout(self.row2)
def _on_enabled_checkbox(self, state: int):
"""Called with `state` when checkbox is clicked."""
plugin_manager.set_blocked(self.plugin_name.text(), not state)
class QPluginList(QListWidget):
def __init__(self, parent: QWidget, installer: Installer):
super().__init__(parent)
self.installer = installer
self.setSortingEnabled(True)
@Slot(ProjectInfo)
def addItem(
self, project_info: ProjectInfo, plugin_name=None, enabled=True
):
# don't add duplicates
if (
self.findItems(project_info.name, Qt.MatchFixedString)
and not plugin_name
):
return
# including summary here for sake of filtering below.
searchable_text = project_info.name + " " + project_info.summary
item = QListWidgetItem(searchable_text, parent=self)
item.version = project_info.version
super().addItem(item)
widg = PluginListItem(
*project_info,
parent=self,
plugin_name=plugin_name,
enabled=enabled,
)
method = getattr(
self.installer, 'uninstall' if plugin_name else 'install'
)
widg.action_button.clicked.connect(lambda: method([project_info.name]))
item.setSizeHint(widg.sizeHint())
self.setItemWidget(item, widg)
@Slot(ProjectInfo)
def tag_outdated(self, project_info: ProjectInfo):
for item in self.findItems(project_info.name, Qt.MatchFixedString):
current = item.version
latest = project_info.version
if parse_version(current) >= parse_version(latest):
continue
if hasattr(item, 'outdated'):
# already tagged it
continue
item.outdated = True
widg = self.itemWidget(item)
update_btn = QPushButton(
trans._("update (v{latest})", latest=latest), widg
)
update_btn.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
update_btn.clicked.connect(
lambda: self.installer.install([item.text()])
)
widg.row1.insertWidget(3, update_btn)
def filter(self, text: str):
"""Filter items to those containing `text`."""
shown = self.findItems(text, Qt.MatchContains)
for i in range(self.count()):
item = self.item(i)
item.setHidden(item not in shown)
class QtPluginDialog(QDialog):
def __init__(self, parent=None):
super().__init__(parent)
self.installer = Installer()
self.setup_ui()
self.installer.set_output_widget(self.stdout_text)
self.installer.process.started.connect(self._on_installer_start)
self.installer.process.finished.connect(self._on_installer_done)
self.refresh()
def _on_installer_start(self):
self.show_status_btn.setChecked(True)
self.working_indicator.show()
self.process_error_indicator.hide()
def _on_installer_done(self, exit_code, exit_status):
self.working_indicator.hide()
if exit_code:
self.process_error_indicator.show()
else:
self.show_status_btn.setChecked(False)
self.refresh()
self.plugin_sorter.refresh()
def refresh(self):
self.installed_list.clear()
self.available_list.clear()
# fetch installed
from ...plugins import plugin_manager
plugin_manager.discover() # since they might not be loaded yet
already_installed = set()
for plugin_name, mod_name, distname in plugin_manager.iter_available():
# not showing these in the plugin dialog
if plugin_name in ('napari_plugin_engine',):
continue
if distname:
already_installed.add(distname)
meta = standard_metadata(distname)
else:
meta = {}
self.installed_list.addItem(
ProjectInfo(
normalized_name(distname or ''),
meta.get('version', ''),
meta.get('url', ''),
meta.get('summary', ''),
meta.get('author', ''),
meta.get('license', ''),
),
plugin_name=plugin_name,
enabled=plugin_name in plugin_manager.plugins,
)
# self.v_splitter.setSizes([70 * self.installed_list.count(), 10, 10])
# fetch available plugins
self.worker = create_worker(iter_napari_plugin_info)
def _handle_yield(project_info):
if project_info.name in already_installed:
self.installed_list.tag_outdated(project_info)
else:
self.available_list.addItem(project_info)
self.worker.yielded.connect(_handle_yield)
self.worker.finished.connect(self.working_indicator.hide)
self.worker.finished.connect(self._update_count_in_label)
self.worker.start()
def setup_ui(self):
self.resize(1080, 640)
vlay_1 = QVBoxLayout(self)
self.h_splitter = QSplitter(self)
vlay_1.addWidget(self.h_splitter)
self.h_splitter.setOrientation(Qt.Horizontal)
self.v_splitter = QSplitter(self.h_splitter)
self.v_splitter.setOrientation(Qt.Vertical)
self.v_splitter.setMinimumWidth(500)
self.plugin_sorter = QtPluginSorter(parent=self.h_splitter)
self.plugin_sorter.layout().setContentsMargins(2, 0, 0, 0)
self.plugin_sorter.hide()
installed = QWidget(self.v_splitter)
lay = QVBoxLayout(installed)
lay.setContentsMargins(0, 2, 0, 2)
self.installed_label = QLabel(trans._("Installed Plugins"))
self.installed_filter = QLineEdit()
self.installed_filter.setPlaceholderText("search...")
self.installed_filter.setMaximumWidth(350)
self.installed_filter.setClearButtonEnabled(True)
mid_layout = QHBoxLayout()
mid_layout.addWidget(self.installed_label)
mid_layout.addWidget(self.installed_filter)
mid_layout.addStretch()
lay.addLayout(mid_layout)
self.installed_list = QPluginList(installed, self.installer)
self.installed_filter.textChanged.connect(self.installed_list.filter)
lay.addWidget(self.installed_list)
uninstalled = QWidget(self.v_splitter)
lay = QVBoxLayout(uninstalled)
lay.setContentsMargins(0, 2, 0, 2)
self.avail_label = QLabel(trans._("Available Plugins"))
self.avail_filter = QLineEdit()
self.avail_filter.setPlaceholderText("search...")
self.avail_filter.setMaximumWidth(350)
self.avail_filter.setClearButtonEnabled(True)
mid_layout = QHBoxLayout()
mid_layout.addWidget(self.avail_label)
mid_layout.addWidget(self.avail_filter)
mid_layout.addStretch()
lay.addLayout(mid_layout)
self.available_list = QPluginList(uninstalled, self.installer)
self.avail_filter.textChanged.connect(self.available_list.filter)
lay.addWidget(self.available_list)
self.stdout_text = QTextEdit(self.v_splitter)
self.stdout_text.setReadOnly(True)
self.stdout_text.setObjectName("pip_install_status")
self.stdout_text.hide()
buttonBox = QHBoxLayout()
self.working_indicator = QLabel(trans._("loading ..."), self)
sp = self.working_indicator.sizePolicy()
sp.setRetainSizeWhenHidden(True)
self.working_indicator.setSizePolicy(sp)
self.process_error_indicator = QLabel(self)
self.process_error_indicator.setObjectName("error_label")
self.process_error_indicator.hide()
load_gif = str(Path(napari.resources.__file__).parent / "loading.gif")
mov = QMovie(load_gif)
mov.setScaledSize(QSize(18, 18))
self.working_indicator.setMovie(mov)
mov.start()
self.direct_entry_edit = QLineEdit(self)
self.direct_entry_edit.installEventFilter(self)
self.direct_entry_edit.setPlaceholderText(
trans._('install by name/url, or drop file...')
)
self.direct_entry_btn = QPushButton(trans._("Install"), self)
self.direct_entry_btn.clicked.connect(self._install_packages)
self.show_status_btn = QPushButton(trans._("Show Status"), self)
self.show_status_btn.setFixedWidth(100)
self.show_sorter_btn = QPushButton(trans._("<< Show Sorter"), self)
self.close_btn = QPushButton(trans._("Close"), self)
self.close_btn.clicked.connect(self.accept)
buttonBox.addWidget(self.show_status_btn)
buttonBox.addWidget(self.working_indicator)
buttonBox.addWidget(self.direct_entry_edit)
buttonBox.addWidget(self.direct_entry_btn)
buttonBox.addWidget(self.process_error_indicator)
buttonBox.addSpacing(60)
buttonBox.addWidget(self.show_sorter_btn)
buttonBox.addWidget(self.close_btn)
buttonBox.setContentsMargins(0, 0, 4, 0)
vlay_1.addLayout(buttonBox)
self.show_status_btn.setCheckable(True)
self.show_status_btn.setChecked(False)
self.show_status_btn.toggled.connect(self._toggle_status)
self.show_sorter_btn.setCheckable(True)
self.show_sorter_btn.setChecked(False)
self.show_sorter_btn.toggled.connect(self._toggle_sorter)
self.v_splitter.setStretchFactor(1, 2)
self.h_splitter.setStretchFactor(0, 2)
self.avail_filter.setFocus()
def _update_count_in_label(self):
count = self.available_list.count()
self.avail_label.setText(
trans._("Available Plugins ({count})", count=count)
)
def eventFilter(self, watched, event):
if event.type() == QEvent.DragEnter:
# we need to accept this event explicitly to be able
# to receive QDropEvents!
event.accept()
if event.type() == QEvent.Drop:
md = event.mimeData()
if md.hasUrls():
files = [url.toLocalFile() for url in md.urls()]
self.direct_entry_edit.setText(files[0])
return True
return super().eventFilter(watched, event)
def _toggle_sorter(self, show):
if show:
self.show_sorter_btn.setText(trans._(">> Hide Sorter"))
self.plugin_sorter.show()
else:
self.show_sorter_btn.setText(trans._("<< Show Sorter"))
self.plugin_sorter.hide()
def _toggle_status(self, show):
if show:
self.show_status_btn.setText(trans._("Hide Status"))
self.stdout_text.show()
else:
self.show_status_btn.setText(trans._("Show Status"))
self.stdout_text.hide()
def _install_packages(self, packages: Sequence[str] = ()):
if not packages:
_packages = self.direct_entry_edit.text()
if os.path.exists(_packages):
packages = [_packages]
else:
packages = _packages.split()
self.direct_entry_edit.clear()
if packages:
self.installer.install(packages)
if __name__ == "__main__":
from qtpy.QtWidgets import QApplication
app = QApplication([])
w = QtPluginDialog()
w.show()
app.exec_()
| [((9268, 9285), 'qtpy.QtCore.Slot', 'Slot', (['ProjectInfo'], {}), '(ProjectInfo)\n', (9272, 9285), False, 'from qtpy.QtCore import QEvent, QProcess, QProcessEnvironment, QSize, Qt, Slot\n'), ((10260, 10277), 'qtpy.QtCore.Slot', 'Slot', (['ProjectInfo'], {}), '(ProjectInfo)\n', (10264, 10277), False, 'from qtpy.QtCore import QEvent, QProcess, QProcessEnvironment, QSize, Qt, Slot\n'), ((20280, 20296), 'qtpy.QtWidgets.QApplication', 'QApplication', (['[]'], {}), '([])\n', (20292, 20296), False, 'from qtpy.QtWidgets import QApplication\n'), ((1405, 1415), 'qtpy.QtCore.QProcess', 'QProcess', ([], {}), '()\n', (1413, 1415), False, 'from qtpy.QtCore import QEvent, QProcess, QProcessEnvironment, QSize, Qt, Slot\n'), ((1651, 1672), 'qtpy.QtCore.QProcessEnvironment', 'QProcessEnvironment', ([], {}), '()\n', (1670, 1672), False, 'from qtpy.QtCore import QEvent, QProcess, QProcessEnvironment, QSize, Qt, Slot\n'), ((5487, 5504), 'qtpy.QtWidgets.QVBoxLayout', 'QVBoxLayout', (['self'], {}), '(self)\n', (5498, 5504), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((5610, 5623), 'qtpy.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (5621, 5623), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((5688, 5703), 'qtpy.QtWidgets.QCheckBox', 'QCheckBox', (['self'], {}), '(self)\n', (5697, 5703), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((5921, 5970), 'qtpy.QtWidgets.QSizePolicy', 'QSizePolicy', (['QSizePolicy.Fixed', 'QSizePolicy.Fixed'], {}), '(QSizePolicy.Fixed, QSizePolicy.Fixed)\n', (5932, 5970), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((6405, 6417), 'qtpy.QtWidgets.QLabel', 'QLabel', (['self'], {}), '(self)\n', (6411, 6417), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((6439, 6494), 'qtpy.QtWidgets.QSizePolicy', 'QSizePolicy', (['QSizePolicy.Preferred', 'QSizePolicy.Minimum'], {}), '(QSizePolicy.Preferred, QSizePolicy.Minimum)\n', (6450, 6494), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((6757, 6764), 'qtpy.QtGui.QFont', 'QFont', ([], {}), '()\n', (6762, 6764), False, 'from qtpy.QtGui import QFont, QMovie\n'), ((6912, 6924), 'qtpy.QtWidgets.QLabel', 'QLabel', (['self'], {}), '(self)\n', (6918, 6924), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((7114, 7131), 'qtpy.QtWidgets.QPushButton', 'QPushButton', (['self'], {}), '(self)\n', (7125, 7131), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((7153, 7202), 'qtpy.QtWidgets.QSizePolicy', 'QSizePolicy', (['QSizePolicy.Fixed', 'QSizePolicy.Fixed'], {}), '(QSizePolicy.Fixed, QSizePolicy.Fixed)\n', (7164, 7202), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((7560, 7573), 'qtpy.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (7571, 7573), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((7605, 7618), 'qtpy.QtWidgets.QPushButton', 'QPushButton', ([], {}), '()\n', (7616, 7618), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((7947, 8011), 'qtpy.QtWidgets.QSizePolicy', 'QSizePolicy', (['QSizePolicy.MinimumExpanding', 'QSizePolicy.Preferred'], {}), '(QSizePolicy.MinimumExpanding, QSizePolicy.Preferred)\n', (7958, 8011), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((8392, 8404), 'qtpy.QtWidgets.QLabel', 'QLabel', (['self'], {}), '(self)\n', (8398, 8404), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((8426, 8483), 'qtpy.QtWidgets.QSizePolicy', 'QSizePolicy', (['QSizePolicy.Preferred', 'QSizePolicy.Preferred'], {}), '(QSizePolicy.Preferred, QSizePolicy.Preferred)\n', (8437, 8483), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((9706, 9751), 'qtpy.QtWidgets.QListWidgetItem', 'QListWidgetItem', (['searchable_text'], {'parent': 'self'}), '(searchable_text, parent=self)\n', (9721, 9751), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((14057, 14074), 'qtpy.QtWidgets.QVBoxLayout', 'QVBoxLayout', (['self'], {}), '(self)\n', (14068, 14074), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((14101, 14116), 'qtpy.QtWidgets.QSplitter', 'QSplitter', (['self'], {}), '(self)\n', (14110, 14116), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((14239, 14265), 'qtpy.QtWidgets.QSplitter', 'QSplitter', (['self.h_splitter'], {}), '(self.h_splitter)\n', (14248, 14265), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((14553, 14577), 'qtpy.QtWidgets.QWidget', 'QWidget', (['self.v_splitter'], {}), '(self.v_splitter)\n', (14560, 14577), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((14592, 14614), 'qtpy.QtWidgets.QVBoxLayout', 'QVBoxLayout', (['installed'], {}), '(installed)\n', (14603, 14614), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((14758, 14769), 'qtpy.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (14767, 14769), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((14962, 14975), 'qtpy.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (14973, 14975), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((15359, 15383), 'qtpy.QtWidgets.QWidget', 'QWidget', (['self.v_splitter'], {}), '(self.v_splitter)\n', (15366, 15383), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((15398, 15422), 'qtpy.QtWidgets.QVBoxLayout', 'QVBoxLayout', (['uninstalled'], {}), '(uninstalled)\n', (15409, 15422), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((15558, 15569), 'qtpy.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (15567, 15569), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((15750, 15763), 'qtpy.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (15761, 15763), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((16141, 16167), 'qtpy.QtWidgets.QTextEdit', 'QTextEdit', (['self.v_splitter'], {}), '(self.v_splitter)\n', (16150, 16167), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((16325, 16338), 'qtpy.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (16336, 16338), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((16587, 16599), 'qtpy.QtWidgets.QLabel', 'QLabel', (['self'], {}), '(self)\n', (16593, 16599), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((16803, 16819), 'qtpy.QtGui.QMovie', 'QMovie', (['load_gif'], {}), '(load_gif)\n', (16809, 16819), False, 'from qtpy.QtGui import QFont, QMovie\n'), ((16960, 16975), 'qtpy.QtWidgets.QLineEdit', 'QLineEdit', (['self'], {}), '(self)\n', (16969, 16975), False, 'from qtpy.QtWidgets import QCheckBox, QDialog, QFrame, QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QSizePolicy, QSplitter, QTextEdit, QVBoxLayout, QWidget\n'), ((2739, 2771), 'sys.platform.startswith', 'sys.platform.startswith', (['"""linux"""'], {}), "('linux')\n", (2762, 2771), False, 'import sys\n'), ((4231, 4271), 'napari_plugin_engine.exceptions.PluginError.get', 'PluginError.get', ([], {'plugin_name': 'plugin_name'}), '(plugin_name=plugin_name)\n', (4246, 4271), False, 'from napari_plugin_engine.exceptions import PluginError\n'), ((6271, 6283), 'qtpy.QtCore.QSize', 'QSize', (['(20)', '(0)'], {}), '(20, 0)\n', (6276, 6283), False, 'from qtpy.QtCore import QEvent, QProcess, QProcessEnvironment, QSize, Qt, Slot\n'), ((16846, 16859), 'qtpy.QtCore.QSize', 'QSize', (['(18)', '(18)'], {}), '(18, 18)\n', (16851, 16859), False, 'from qtpy.QtCore import QEvent, QProcess, QProcessEnvironment, QSize, Qt, Slot\n'), ((19958, 19983), 'os.path.exists', 'os.path.exists', (['_packages'], {}), '(_packages)\n', (19972, 19983), False, 'import os\n'), ((12809, 12836), 'napari_plugin_engine.dist.standard_metadata', 'standard_metadata', (['distname'], {}), '(distname)\n', (12826, 12836), False, 'from napari_plugin_engine.dist import standard_metadata\n'), ((1931, 1970), 'qtpy.QtCore.QProcessEnvironment.systemEnvironment', 'QProcessEnvironment.systemEnvironment', ([], {}), '()\n', (1968, 1970), False, 'from qtpy.QtCore import QEvent, QProcess, QProcessEnvironment, QSize, Qt, Slot\n'), ((16733, 16764), 'pathlib.Path', 'Path', (['napari.resources.__file__'], {}), '(napari.resources.__file__)\n', (16737, 16764), False, 'from pathlib import Path\n')] |
WizzyBots/hata | hata/discord/webhook/utils.py | f6991afc0bebf7dad932888a536f4d010f8663c7 | __all__ = ('create_partial_webhook_from_id', )
from scarletio import export
from ..core import USERS
from .preinstanced import WebhookType
from .webhook import Webhook
@export
def create_partial_webhook_from_id(webhook_id, token, *, type_=WebhookType.bot, channel_id=0):
"""
Creates a partial webhook from the given parameters. If the webhook with the given `webhook_id` already exists,
then returns that instead.
Parameters
----------
webhook_id : `int`
The identifier number of the webhook.
token : `str`
The token of the webhook.
type_ : ``WebhookType`` = `WebhookType.bot`, Optional (Keyword only)
The webhook's type. Defaults to `WebhookType.bot`.
channel_id : `int` = `0`, Optional (Keyword only)
The webhook's channel's identifier. Defaults to `0`.
Returns
-------
webhook : ``Webhook``
"""
try:
webhook = USERS[webhook_id]
except KeyError:
webhook = Webhook._create_empty(webhook_id)
webhook.channel_id = channel_id
webhook.type = type_
USERS[webhook_id] = webhook
webhook.token = token
return webhook
| [] |
henriqueumeda/-Python-study | MIT/600.1x - Introduction to Computer Science and Programming Using Python/Unit 4/Problem Set 4/get_word_score.py | 28e93a377afa4732037a29eb74d4bc7c9e24b62f | SCRABBLE_LETTER_VALUES = {
'a': 1, 'b': 3, 'c': 3, 'd': 2, 'e': 1, 'f': 4, 'g': 2, 'h': 4, 'i': 1, 'j': 8, 'k': 5, 'l': 1, 'm': 3, 'n': 1,
'o': 1, 'p': 3, 'q': 10, 'r': 1, 's': 1, 't': 1, 'u': 1, 'v': 4, 'w': 4, 'x': 8, 'y': 4, 'z': 10
}
def getWordScore(word, n):
"""
Returns the score for a word. Assumes the word is a valid word.
The score for a word is the sum of the points for letters in the
word, multiplied by the length of the word, PLUS 50 points if all n
letters are used on the first turn.
Letters are scored as in Scrabble; A is worth 1, B is worth 3, C is
worth 3, D is worth 2, E is worth 1, and so on (see SCRABBLE_LETTER_VALUES)
word: string (lowercase letters)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
returns: int >= 0
"""
total_points = 0
for letter in word:
total_points += SCRABBLE_LETTER_VALUES[letter]
total_points *= len(word)
if len(word) == n:
total_points += 50
return total_points
print(getWordScore('waybill', 7))
| [] |
rizwansaeed/lusid-sdk-python-preview | sdk/lusid/models/lusid_instrument.py | 52d092d6d4099b8526f0318f3fe1ddc0b943da6a | # coding: utf-8
"""
LUSID API
# Introduction This page documents the [LUSID APIs](https://www.lusid.com/api/swagger), which allows authorised clients to query and update their data within the LUSID platform. SDKs to interact with the LUSID APIs are available in the following languages : * [C#](https://github.com/finbourne/lusid-sdk-csharp) * [Java](https://github.com/finbourne/lusid-sdk-java) * [JavaScript](https://github.com/finbourne/lusid-sdk-js) * [Python](https://github.com/finbourne/lusid-sdk-python) # Data Model The LUSID API has a relatively lightweight but extremely powerful data model. One of the goals of LUSID was not to enforce on clients a single rigid data model but rather to provide a flexible foundation onto which clients can map their own data models. The core entities in LUSID provide a minimal structure and set of relationships, and the data model can be extended using Properties. The LUSID data model is exposed through the LUSID APIs. The APIs provide access to both business objects and the meta data used to configure the systems behaviours. The key business entities are: - * **Portfolios** A portfolio is a container for transactions and holdings (a **Transaction Portfolio**) or constituents (a **Reference Portfolio**). * **Derived Portfolios**. Derived Portfolios allow Portfolios to be created based on other Portfolios, by overriding or adding specific items. * **Holdings** A Holding is a quantity of an Instrument or a balance of cash within a Portfolio. Holdings can only be adjusted via Transactions. * **Transactions** A Transaction is an economic event that occurs in a Portfolio, causing its holdings to change. * **Corporate Actions** A corporate action is a market event which occurs to an Instrument and thus applies to all portfolios which holding the instrument. Examples are stock splits or mergers. * **Constituents** A constituent is a record in a Reference Portfolio containing an Instrument and an associated weight. * **Instruments** An instrument represents a currency, tradable instrument or OTC contract that is attached to a transaction and a holding. * **Properties** All major entities allow additional user defined properties to be associated with them. For example, a Portfolio manager may be associated with a portfolio. Meta data includes: - * **Transaction Types** Transactions are booked with a specific transaction type. The types are client defined and are used to map the Transaction to a series of movements which update the portfolio holdings. * **Properties Types** Types of user defined properties used within the system. ## Scope All data in LUSID is segregated at the client level. Entities in LUSID are identifiable by a unique code. Every entity lives within a logical data partition known as a Scope. Scope is an identity namespace allowing two entities with the same unique code to co-exist within individual address spaces. For example, prices for equities from different vendors may be uploaded into different scopes such as `client/vendor1` and `client/vendor2`. A portfolio may then be valued using either of the price sources by referencing the appropriate scope. LUSID Clients cannot access scopes of other clients. ## Instruments LUSID has its own built-in instrument master which you can use to master your own instrument universe. Every instrument must be created with one or more unique market identifiers, such as [FIGI](https://openfigi.com/). For any non-listed instruments (eg OTCs), you can upload an instrument against a custom ID of your choosing. In addition, LUSID will allocate each instrument a unique 'LUSID instrument identifier'. The LUSID instrument identifier is what is used when uploading transactions, holdings, prices, etc. The API exposes an `instrument/lookup` endpoint which can be used to lookup these LUSID identifiers using their market identifiers. Cash can be referenced using the ISO currency code prefixed with \"`CCY_`\" e.g. `CCY_GBP` ## Instrument Data Instrument data can be uploaded to the system using the [Instrument Properties](#tag/InstrumentProperties) endpoint. | Field|Type|Description | | ---|---|--- | | Key|propertykey|The key of the property. This takes the format {domain}/{scope}/{code} e.g. 'Instrument/system/Name' or 'Transaction/strategy/quantsignal'. | | Value|string|The value of the property. | | EffectiveFrom|datetimeoffset|The effective datetime from which the property is valid. | | EffectiveUntil|datetimeoffset|The effective datetime until which the property is valid. If not supplied this will be valid indefinitely, potentially overwriting values with EffectiveFrom's in the future. | ## Transaction Portfolios Portfolios are the top-level entity containers within LUSID, containing transactions, corporate actions and holdings. The transactions build up the portfolio holdings on which valuations, analytics profit & loss and risk can be calculated. Properties can be associated with Portfolios to add in additional data. Portfolio properties can be changed over time, for example to allow a Portfolio Manager to be linked with a Portfolio. Additionally, portfolios can be securitised and held by other portfolios, allowing LUSID to perform \"drill-through\" into underlying fund holdings ### Derived Portfolios LUSID also allows for a portfolio to be composed of another portfolio via derived portfolios. A derived portfolio can contain its own transactions and also inherits any transactions from its parent portfolio. Any changes made to the parent portfolio are automatically reflected in derived portfolio. Derived portfolios in conjunction with scopes are a powerful construct. For example, to do pre-trade what-if analysis, a derived portfolio could be created a new namespace linked to the underlying live (parent) portfolio. Analysis can then be undertaken on the derived portfolio without affecting the live portfolio. ### Transactions A transaction represents an economic activity against a Portfolio. Transactions are processed according to a configuration. This will tell the LUSID engine how to interpret the transaction and correctly update the holdings. LUSID comes with a set of transaction types you can use out of the box, or you can configure your own set(s) of transactions. For more details see the [LUSID Getting Started Guide for transaction configuration.](https://support.lusid.com/configuring-transaction-types) | Field|Type|Description | | ---|---|--- | | TransactionId|string|The unique identifier for the transaction. | | Type|string|The type of the transaction e.g. 'Buy', 'Sell'. The transaction type should have been pre-configured via the System Configuration API endpoint. If it hasn't been pre-configured the transaction will still be updated or inserted however you will be unable to generate the resultant holdings for the portfolio that contains this transaction as LUSID does not know how to process it. | | InstrumentIdentifiers|map|A set of instrument identifiers to use to resolve the transaction to a unique instrument. | | TransactionDate|dateorcutlabel|The date of the transaction. | | SettlementDate|dateorcutlabel|The settlement date of the transaction. | | Units|decimal|The number of units transacted in the associated instrument. | | TransactionPrice|transactionprice|The price for each unit of the transacted instrument in the transaction currency. | | TotalConsideration|currencyandamount|The total value of the transaction in the settlement currency. | | ExchangeRate|decimal|The exchange rate between the transaction and settlement currency. For example if the transaction currency is in USD and the settlement currency is in GBP this this the USD/GBP rate. | | TransactionCurrency|currency|The transaction currency. | | Properties|map|Set of unique transaction properties and associated values to store with the transaction. Each property must be from the 'Transaction' domain. | | CounterpartyId|string|The identifier for the counterparty of the transaction. | | Source|string|The source of the transaction. This is used to look up the appropriate transaction group set in the transaction type configuration. | From these fields, the following values can be calculated * **Transaction value in Transaction currency**: TotalConsideration / ExchangeRate * **Transaction value in Portfolio currency**: Transaction value in Transaction currency * TradeToPortfolioRate #### Example Transactions ##### A Common Purchase Example Three example transactions are shown in the table below. They represent a purchase of USD denominated IBM shares within a Sterling denominated portfolio. * The first two transactions are for separate buy and fx trades * Buying 500 IBM shares for $71,480.00 * A spot foreign exchange conversion to fund the IBM purchase. (Buy $71,480.00 for £54,846.60) * The third transaction is an alternate version of the above trades. Buying 500 IBM shares and settling directly in Sterling. | Column | Buy Trade | Fx Trade | Buy Trade with foreign Settlement | | ----- | ----- | ----- | ----- | | TransactionId | FBN00001 | FBN00002 | FBN00003 | | Type | Buy | FxBuy | Buy | | InstrumentIdentifiers | { \"figi\", \"BBG000BLNNH6\" } | { \"CCY\", \"CCY_USD\" } | { \"figi\", \"BBG000BLNNH6\" } | | TransactionDate | 2018-08-02 | 2018-08-02 | 2018-08-02 | | SettlementDate | 2018-08-06 | 2018-08-06 | 2018-08-06 | | Units | 500 | 71480 | 500 | | TransactionPrice | 142.96 | 1 | 142.96 | | TradeCurrency | USD | USD | USD | | ExchangeRate | 1 | 0.7673 | 0.7673 | | TotalConsideration.Amount | 71480.00 | 54846.60 | 54846.60 | | TotalConsideration.Currency | USD | GBP | GBP | | Trade/default/TradeToPortfolioRate* | 0.7673 | 0.7673 | 0.7673 | [* This is a property field] ##### A Forward FX Example LUSID has a flexible transaction modelling system, meaning there are a number of different ways of modelling forward fx trades. The default LUSID transaction types are FwdFxBuy and FwdFxSell. Using these transaction types, LUSID will generate two holdings for each Forward FX trade, one for each currency in the trade. An example Forward Fx trade to sell GBP for USD in a JPY-denominated portfolio is shown below: | Column | Forward 'Sell' Trade | Notes | | ----- | ----- | ---- | | TransactionId | FBN00004 | | | Type | FwdFxSell | | | InstrumentIdentifiers | { \"Instrument/default/Currency\", \"GBP\" } | | | TransactionDate | 2018-08-02 | | | SettlementDate | 2019-02-06 | Six month forward | | Units | 10000.00 | Units of GBP | | TransactionPrice | 1 | | | TradeCurrency | GBP | Currency being sold | | ExchangeRate | 1.3142 | Agreed rate between GBP and USD | | TotalConsideration.Amount | 13142.00 | Amount in the settlement currency, USD | | TotalConsideration.Currency | USD | Settlement currency | | Trade/default/TradeToPortfolioRate | 142.88 | Rate between trade currency, GBP and portfolio base currency, JPY | Please note that exactly the same economic behaviour could be modelled using the FwdFxBuy Transaction Type with the amounts and rates reversed. ### Holdings A holding represents a position in an instrument or cash on a given date. | Field|Type|Description | | ---|---|--- | | InstrumentUid|string|The unqiue Lusid Instrument Id (LUID) of the instrument that the holding is in. | | SubHoldingKeys|map|The sub-holding properties which identify the holding. Each property will be from the 'Transaction' domain. These are configured when a transaction portfolio is created. | | Properties|map|The properties which have been requested to be decorated onto the holding. These will be from the 'Instrument' or 'Holding' domain. | | HoldingType|string|The type of the holding e.g. Position, Balance, CashCommitment, Receivable, ForwardFX etc. | | Units|decimal|The total number of units of the holding. | | SettledUnits|decimal|The total number of settled units of the holding. | | Cost|currencyandamount|The total cost of the holding in the transaction currency. | | CostPortfolioCcy|currencyandamount|The total cost of the holding in the portfolio currency. | | Transaction|transaction|The transaction associated with an unsettled holding. | ## Corporate Actions Corporate actions are represented within LUSID in terms of a set of instrument-specific 'transitions'. These transitions are used to specify the participants of the corporate action, and the effect that the corporate action will have on holdings in those participants. ### Corporate Action | Field|Type|Description | | ---|---|--- | | CorporateActionCode|code|The unique identifier of this corporate action | | Description|string| | | AnnouncementDate|datetimeoffset|The announcement date of the corporate action | | ExDate|datetimeoffset|The ex date of the corporate action | | RecordDate|datetimeoffset|The record date of the corporate action | | PaymentDate|datetimeoffset|The payment date of the corporate action | | Transitions|corporateactiontransition[]|The transitions that result from this corporate action | ### Transition | Field|Type|Description | | ---|---|--- | | InputTransition|corporateactiontransitioncomponent|Indicating the basis of the corporate action - which security and how many units | | OutputTransitions|corporateactiontransitioncomponent[]|What will be generated relative to the input transition | ### Example Corporate Action Transitions #### A Dividend Action Transition In this example, for each share of IBM, 0.20 units (or 20 pence) of GBP are generated. | Column | Input Transition | Output Transition | | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"ccy\" : \"CCY_GBP\" } | | Units Factor | 1 | 0.20 | | Cost Factor | 1 | 0 | #### A Split Action Transition In this example, for each share of IBM, we end up with 2 units (2 shares) of IBM, with total value unchanged. | Column | Input Transition | Output Transition | | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000BLNNH6\" } | | Units Factor | 1 | 2 | | Cost Factor | 1 | 1 | #### A Spinoff Action Transition In this example, for each share of IBM, we end up with 1 unit (1 share) of IBM and 3 units (3 shares) of Celestica, with 85% of the value remaining on the IBM share, and 5% in each Celestica share (15% total). | Column | Input Transition | Output Transition 1 | Output Transition 2 | | ----- | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000HBGRF3\" } | | Units Factor | 1 | 1 | 3 | | Cost Factor | 1 | 0.85 | 0.15 | ## Reference Portfolios Reference portfolios are portfolios that contain constituents with weights. They are designed to represent entities such as indices and benchmarks. ### Constituents | Field|Type|Description | | ---|---|--- | | InstrumentIdentifiers|map|Unique instrument identifiers | | InstrumentUid|string|LUSID's internal unique instrument identifier, resolved from the instrument identifiers | | Currency|decimal| | | Weight|decimal| | | FloatingWeight|decimal| | ## Portfolio Groups Portfolio groups allow the construction of a hierarchy from portfolios and groups. Portfolio operations on the group are executed on an aggregated set of portfolios in the hierarchy. For example: * Global Portfolios _(group)_ * APAC _(group)_ * Hong Kong _(portfolio)_ * Japan _(portfolio)_ * Europe _(group)_ * France _(portfolio)_ * Germany _(portfolio)_ * UK _(portfolio)_ In this example **Global Portfolios** is a group that consists of an aggregate of **Hong Kong**, **Japan**, **France**, **Germany** and **UK** portfolios. ## Properties Properties are key-value pairs that can be applied to any entity within a domain (where a domain is `trade`, `portfolio`, `security` etc). Properties must be defined before use with a `PropertyDefinition` and can then subsequently be added to entities. ## Schema A detailed description of the entities used by the API and parameters for endpoints which take a JSON document can be retrieved via the `schema` endpoint. ## Meta data The following headers are returned on all responses from LUSID | Name | Purpose | | --- | --- | | lusid-meta-duration | Duration of the request | | lusid-meta-success | Whether or not LUSID considered the request to be successful | | lusid-meta-requestId | The unique identifier for the request | | lusid-schema-url | Url of the schema for the data being returned | | lusid-property-schema-url | Url of the schema for any properties | # Error Codes | Code|Name|Description | | ---|---|--- | | <a name=\"-10\">-10</a>|Server Configuration Error| | | <a name=\"-1\">-1</a>|Unknown error|An unexpected error was encountered on our side. | | <a name=\"102\">102</a>|Version Not Found| | | <a name=\"103\">103</a>|Api Rate Limit Violation| | | <a name=\"104\">104</a>|Instrument Not Found| | | <a name=\"105\">105</a>|Property Not Found| | | <a name=\"106\">106</a>|Portfolio Recursion Depth| | | <a name=\"108\">108</a>|Group Not Found| | | <a name=\"109\">109</a>|Portfolio Not Found| | | <a name=\"110\">110</a>|Property Schema Not Found| | | <a name=\"111\">111</a>|Portfolio Ancestry Not Found| | | <a name=\"112\">112</a>|Portfolio With Id Already Exists| | | <a name=\"113\">113</a>|Orphaned Portfolio| | | <a name=\"119\">119</a>|Missing Base Claims| | | <a name=\"121\">121</a>|Property Not Defined| | | <a name=\"122\">122</a>|Cannot Delete System Property| | | <a name=\"123\">123</a>|Cannot Modify Immutable Property Field| | | <a name=\"124\">124</a>|Property Already Exists| | | <a name=\"125\">125</a>|Invalid Property Life Time| | | <a name=\"126\">126</a>|Property Constraint Style Excludes Properties| | | <a name=\"127\">127</a>|Cannot Modify Default Data Type| | | <a name=\"128\">128</a>|Group Already Exists| | | <a name=\"129\">129</a>|No Such Data Type| | | <a name=\"130\">130</a>|Undefined Value For Data Type| | | <a name=\"131\">131</a>|Unsupported Value Type Defined On Data Type| | | <a name=\"132\">132</a>|Validation Error| | | <a name=\"133\">133</a>|Loop Detected In Group Hierarchy| | | <a name=\"134\">134</a>|Undefined Acceptable Values| | | <a name=\"135\">135</a>|Sub Group Already Exists| | | <a name=\"138\">138</a>|Price Source Not Found| | | <a name=\"139\">139</a>|Analytic Store Not Found| | | <a name=\"141\">141</a>|Analytic Store Already Exists| | | <a name=\"143\">143</a>|Client Instrument Already Exists| | | <a name=\"144\">144</a>|Duplicate In Parameter Set| | | <a name=\"147\">147</a>|Results Not Found| | | <a name=\"148\">148</a>|Order Field Not In Result Set| | | <a name=\"149\">149</a>|Operation Failed| | | <a name=\"150\">150</a>|Elastic Search Error| | | <a name=\"151\">151</a>|Invalid Parameter Value| | | <a name=\"153\">153</a>|Command Processing Failure| | | <a name=\"154\">154</a>|Entity State Construction Failure| | | <a name=\"155\">155</a>|Entity Timeline Does Not Exist| | | <a name=\"156\">156</a>|Concurrency Conflict Failure| | | <a name=\"157\">157</a>|Invalid Request| | | <a name=\"158\">158</a>|Event Publish Unknown| | | <a name=\"159\">159</a>|Event Query Failure| | | <a name=\"160\">160</a>|Blob Did Not Exist| | | <a name=\"162\">162</a>|Sub System Request Failure| | | <a name=\"163\">163</a>|Sub System Configuration Failure| | | <a name=\"165\">165</a>|Failed To Delete| | | <a name=\"166\">166</a>|Upsert Client Instrument Failure| | | <a name=\"167\">167</a>|Illegal As At Interval| | | <a name=\"168\">168</a>|Illegal Bitemporal Query| | | <a name=\"169\">169</a>|Invalid Alternate Id| | | <a name=\"170\">170</a>|Cannot Add Source Portfolio Property Explicitly| | | <a name=\"171\">171</a>|Entity Already Exists In Group| | | <a name=\"173\">173</a>|Entity With Id Already Exists| | | <a name=\"174\">174</a>|Derived Portfolio Details Do Not Exist| | | <a name=\"176\">176</a>|Portfolio With Name Already Exists| | | <a name=\"177\">177</a>|Invalid Transactions| | | <a name=\"178\">178</a>|Reference Portfolio Not Found| | | <a name=\"179\">179</a>|Duplicate Id| | | <a name=\"180\">180</a>|Command Retrieval Failure| | | <a name=\"181\">181</a>|Data Filter Application Failure| | | <a name=\"182\">182</a>|Search Failed| | | <a name=\"183\">183</a>|Movements Engine Configuration Key Failure| | | <a name=\"184\">184</a>|Fx Rate Source Not Found| | | <a name=\"185\">185</a>|Accrual Source Not Found| | | <a name=\"186\">186</a>|Access Denied| | | <a name=\"187\">187</a>|Invalid Identity Token| | | <a name=\"188\">188</a>|Invalid Request Headers| | | <a name=\"189\">189</a>|Price Not Found| | | <a name=\"190\">190</a>|Invalid Sub Holding Keys Provided| | | <a name=\"191\">191</a>|Duplicate Sub Holding Keys Provided| | | <a name=\"192\">192</a>|Cut Definition Not Found| | | <a name=\"193\">193</a>|Cut Definition Invalid| | | <a name=\"194\">194</a>|Time Variant Property Deletion Date Unspecified| | | <a name=\"195\">195</a>|Perpetual Property Deletion Date Specified| | | <a name=\"196\">196</a>|Time Variant Property Upsert Date Unspecified| | | <a name=\"197\">197</a>|Perpetual Property Upsert Date Specified| | | <a name=\"200\">200</a>|Invalid Unit For Data Type| | | <a name=\"201\">201</a>|Invalid Type For Data Type| | | <a name=\"202\">202</a>|Invalid Value For Data Type| | | <a name=\"203\">203</a>|Unit Not Defined For Data Type| | | <a name=\"204\">204</a>|Units Not Supported On Data Type| | | <a name=\"205\">205</a>|Cannot Specify Units On Data Type| | | <a name=\"206\">206</a>|Unit Schema Inconsistent With Data Type| | | <a name=\"207\">207</a>|Unit Definition Not Specified| | | <a name=\"208\">208</a>|Duplicate Unit Definitions Specified| | | <a name=\"209\">209</a>|Invalid Units Definition| | | <a name=\"210\">210</a>|Invalid Instrument Identifier Unit| | | <a name=\"211\">211</a>|Holdings Adjustment Does Not Exist| | | <a name=\"212\">212</a>|Could Not Build Excel Url| | | <a name=\"213\">213</a>|Could Not Get Excel Version| | | <a name=\"214\">214</a>|Instrument By Code Not Found| | | <a name=\"215\">215</a>|Entity Schema Does Not Exist| | | <a name=\"216\">216</a>|Feature Not Supported On Portfolio Type| | | <a name=\"217\">217</a>|Quote Not Found| | | <a name=\"218\">218</a>|Invalid Quote Identifier| | | <a name=\"219\">219</a>|Invalid Metric For Data Type| | | <a name=\"220\">220</a>|Invalid Instrument Definition| | | <a name=\"221\">221</a>|Instrument Upsert Failure| | | <a name=\"222\">222</a>|Reference Portfolio Request Not Supported| | | <a name=\"223\">223</a>|Transaction Portfolio Request Not Supported| | | <a name=\"224\">224</a>|Invalid Property Value Assignment| | | <a name=\"230\">230</a>|Transaction Type Not Found| | | <a name=\"231\">231</a>|Transaction Type Duplication| | | <a name=\"232\">232</a>|Portfolio Does Not Exist At Given Date| | | <a name=\"233\">233</a>|Query Parser Failure| | | <a name=\"234\">234</a>|Duplicate Constituent| | | <a name=\"235\">235</a>|Unresolved Instrument Constituent| | | <a name=\"236\">236</a>|Unresolved Instrument In Transition| | | <a name=\"237\">237</a>|Missing Side Definitions| | | <a name=\"299\">299</a>|Invalid Recipe| | | <a name=\"300\">300</a>|Missing Recipe| | | <a name=\"301\">301</a>|Dependencies| | | <a name=\"304\">304</a>|Portfolio Preprocess Failure| | | <a name=\"310\">310</a>|Valuation Engine Failure| | | <a name=\"311\">311</a>|Task Factory Failure| | | <a name=\"312\">312</a>|Task Evaluation Failure| | | <a name=\"313\">313</a>|Task Generation Failure| | | <a name=\"314\">314</a>|Engine Configuration Failure| | | <a name=\"315\">315</a>|Model Specification Failure| | | <a name=\"320\">320</a>|Market Data Key Failure| | | <a name=\"321\">321</a>|Market Resolver Failure| | | <a name=\"322\">322</a>|Market Data Failure| | | <a name=\"330\">330</a>|Curve Failure| | | <a name=\"331\">331</a>|Volatility Surface Failure| | | <a name=\"332\">332</a>|Volatility Cube Failure| | | <a name=\"350\">350</a>|Instrument Failure| | | <a name=\"351\">351</a>|Cash Flows Failure| | | <a name=\"352\">352</a>|Reference Data Failure| | | <a name=\"360\">360</a>|Aggregation Failure| | | <a name=\"361\">361</a>|Aggregation Measure Failure| | | <a name=\"370\">370</a>|Result Retrieval Failure| | | <a name=\"371\">371</a>|Result Processing Failure| | | <a name=\"372\">372</a>|Vendor Result Processing Failure| | | <a name=\"373\">373</a>|Vendor Result Mapping Failure| | | <a name=\"374\">374</a>|Vendor Library Unauthorised| | | <a name=\"375\">375</a>|Vendor Connectivity Error| | | <a name=\"376\">376</a>|Vendor Interface Error| | | <a name=\"377\">377</a>|Vendor Pricing Failure| | | <a name=\"378\">378</a>|Vendor Translation Failure| | | <a name=\"379\">379</a>|Vendor Key Mapping Failure| | | <a name=\"380\">380</a>|Vendor Reflection Failure| | | <a name=\"390\">390</a>|Attempt To Upsert Duplicate Quotes| | | <a name=\"391\">391</a>|Corporate Action Source Does Not Exist| | | <a name=\"392\">392</a>|Corporate Action Source Already Exists| | | <a name=\"393\">393</a>|Instrument Identifier Already In Use| | | <a name=\"394\">394</a>|Properties Not Found| | | <a name=\"395\">395</a>|Batch Operation Aborted| | | <a name=\"400\">400</a>|Invalid Iso4217 Currency Code| | | <a name=\"401\">401</a>|Cannot Assign Instrument Identifier To Currency| | | <a name=\"402\">402</a>|Cannot Assign Currency Identifier To Non Currency| | | <a name=\"403\">403</a>|Currency Instrument Cannot Be Deleted| | | <a name=\"404\">404</a>|Currency Instrument Cannot Have Economic Definition| | | <a name=\"405\">405</a>|Currency Instrument Cannot Have Lookthrough Portfolio| | | <a name=\"406\">406</a>|Cannot Create Currency Instrument With Multiple Identifiers| | | <a name=\"407\">407</a>|Specified Currency Is Undefined| | | <a name=\"410\">410</a>|Index Does Not Exist| | | <a name=\"411\">411</a>|Sort Field Does Not Exist| | | <a name=\"413\">413</a>|Negative Pagination Parameters| | | <a name=\"414\">414</a>|Invalid Search Syntax| | | <a name=\"415\">415</a>|Filter Execution Timeout| | | <a name=\"420\">420</a>|Side Definition Inconsistent| | | <a name=\"450\">450</a>|Invalid Quote Access Metadata Rule| | | <a name=\"451\">451</a>|Access Metadata Not Found| | | <a name=\"452\">452</a>|Invalid Access Metadata Identifier| | | <a name=\"460\">460</a>|Standard Resource Not Found| | | <a name=\"461\">461</a>|Standard Resource Conflict| | | <a name=\"462\">462</a>|Calendar Not Found| | | <a name=\"463\">463</a>|Date In A Calendar Not Found| | | <a name=\"464\">464</a>|Invalid Date Source Data| | | <a name=\"465\">465</a>|Invalid Timezone| | | <a name=\"601\">601</a>|Person Identifier Already In Use| | | <a name=\"602\">602</a>|Person Not Found| | | <a name=\"603\">603</a>|Cannot Set Identifier| | | <a name=\"617\">617</a>|Invalid Recipe Specification In Request| | | <a name=\"618\">618</a>|Inline Recipe Deserialisation Failure| | | <a name=\"619\">619</a>|Identifier Types Not Set For Entity| | | <a name=\"620\">620</a>|Cannot Delete All Client Defined Identifiers| | | <a name=\"650\">650</a>|The Order requested was not found.| | | <a name=\"654\">654</a>|The Allocation requested was not found.| | | <a name=\"655\">655</a>|Cannot build the fx forward target with the given holdings.| | | <a name=\"656\">656</a>|Group does not contain expected entities.| | | <a name=\"667\">667</a>|Relation definition already exists| | | <a name=\"673\">673</a>|Missing entitlements for entities in Group| | | <a name=\"674\">674</a>|Next Best Action not found| | | <a name=\"676\">676</a>|Relation definition not defined| | | <a name=\"677\">677</a>|Invalid entity identifier for relation| | | <a name=\"681\">681</a>|Sorting by specified field not supported|One or more of the provided fields to order by were either invalid or not supported. | | <a name=\"682\">682</a>|Too many fields to sort by|The number of fields to sort the data by exceeds the number allowed by the endpoint | | <a name=\"684\">684</a>|Sequence Not Found| | | <a name=\"685\">685</a>|Sequence Already Exists| | | <a name=\"686\">686</a>|Non-cycling sequence has been exhausted| | | <a name=\"687\">687</a>|Legal Entity Identifier Already In Use| | | <a name=\"688\">688</a>|Legal Entity Not Found| | | <a name=\"689\">689</a>|The supplied pagination token is invalid| | | <a name=\"690\">690</a>|Property Type Is Not Supported| | | <a name=\"691\">691</a>|Multiple Tax-lots For Currency Type Is Not Supported| | # noqa: E501
The version of the OpenAPI document: 0.11.2275
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class LusidInstrument(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'instrument_type': 'str'
}
attribute_map = {
'instrument_type': 'instrumentType'
}
required_map = {
'instrument_type': 'required'
}
discriminator_value_class_map = {
'EquityOption': 'EquityOption',
'InstrumentLeg': 'InstrumentLeg',
'InterestRateSwaption': 'InterestRateSwaption',
'FxForward': 'FxForward',
'InterestRateSwap': 'InterestRateSwap',
'ExoticInstrument': 'ExoticInstrument',
'FxOption': 'FxOption',
'Bond': 'Bond',
'TermDeposit': 'TermDeposit',
'CreditDefaultSwap': 'CreditDefaultSwap',
'Future': 'Future'
}
def __init__(self, instrument_type=None): # noqa: E501
"""
LusidInstrument - a model defined in OpenAPI
:param instrument_type: The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashflowLeg, Unknown, TermDeposit (required)
:type instrument_type: str
""" # noqa: E501
self._instrument_type = None
self.discriminator = 'instrument_type'
self.instrument_type = instrument_type
@property
def instrument_type(self):
"""Gets the instrument_type of this LusidInstrument. # noqa: E501
The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashflowLeg, Unknown, TermDeposit # noqa: E501
:return: The instrument_type of this LusidInstrument. # noqa: E501
:rtype: str
"""
return self._instrument_type
@instrument_type.setter
def instrument_type(self, instrument_type):
"""Sets the instrument_type of this LusidInstrument.
The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashflowLeg, Unknown, TermDeposit # noqa: E501
:param instrument_type: The instrument_type of this LusidInstrument. # noqa: E501
:type: str
"""
if instrument_type is None:
raise ValueError("Invalid value for `instrument_type`, must not be `None`") # noqa: E501
allowed_values = ["QuotedSecurity", "InterestRateSwap", "FxForward", "Future", "ExoticInstrument", "FxOption", "CreditDefaultSwap", "InterestRateSwaption", "Bond", "EquityOption", "FixedLeg", "FloatingLeg", "BespokeCashflowLeg", "Unknown", "TermDeposit"] # noqa: E501
if instrument_type not in allowed_values:
raise ValueError(
"Invalid value for `instrument_type` ({0}), must be one of {1}" # noqa: E501
.format(instrument_type, allowed_values)
)
self._instrument_type = instrument_type
def get_real_child_model(self, data):
"""Returns the real base class specified by the discriminator"""
discriminator_key = self.attribute_map[self.discriminator]
discriminator_value = data[discriminator_key]
return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LusidInstrument):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [((32924, 32957), 'six.iteritems', 'six.iteritems', (['self.openapi_types'], {}), '(self.openapi_types)\n', (32937, 32957), False, 'import six\n')] |
luizgui05/autolatex. | autolatex-master/exemplos_codigo/certificados/certificados.py | 366eb3d88b7e60c119737f958e35cce99e8775e9 | import os
import sys
import sqlite3
con = None
filename = 'certificado'
# Abrir banco de dados para ler nomes.
try:
con = sqlite3.connect('math.db')
cur = con.cursor()
cur.execute('select * from math')
data = cur.fetchall()
except sqlite3.Error, e:
print "Error %s:" % e.args[0]
sys.exit(1)
finally:
if con:
con.close()
# Gerar um certificado para cada nome.
for row in data:
f = open(filename+'.tex','r+')
old = f.readlines()
if old[0][1:4] == 'def':
offset = 1
else:
offset = 0
f.seek(0)
f.write('\\def\\name {'+row[0]+'}\n')
f.writelines(old[offset:])
f.close()
# Compilar arquivo LaTeX
try:
os.system('pdflatex '+filename+'.tex')
os.system('mv '+filename+'.pdf '+filename+'_'+row[0].replace(' ','_')+'.pdf')
#os.system('xdg-open '+filename+'.pdf &')
except OSError:
print('LaTeX not installed.')
| [] |
snajder-r/nanoepiseg | nanoepiseg/main_list_chunks.py | 2fe36a82e5b899330da5db6559eb45fe12cad37c | from pathlib import Path
from meth5.meth5 import MetH5File
def main(m5file:Path, chunk_size:int, quiet:bool):
with MetH5File(m5file, "r", chunk_size=chunk_size) as f:
for chrom in f.get_chromosomes():
print(f"{chrom}: {f[chrom].get_number_of_chunks()}")
| [((121, 166), 'meth5.meth5.MetH5File', 'MetH5File', (['m5file', '"""r"""'], {'chunk_size': 'chunk_size'}), "(m5file, 'r', chunk_size=chunk_size)\n", (130, 166), False, 'from meth5.meth5 import MetH5File\n')] |
tmilliman/sir_to_netcdf | qscatv2/make_seasonal_images.py | d4641cdc5a9e92a55c0edb2dc6cd8c0e2da6f1fa | #!/usr/bin/env python
# script to make seasonal means and stddev images of 4-day sig0
# values.
import os
import sys
import glob
import numpy as np
import sirpy2 as sp2
import argparse
from osgeo import gdal
DATADIR = "./"
NODATA_VALUE = -9999.0
Q2M = {
"JAS": list(range(7, 10)),
"OND": list(range(10, 13)),
"JFM": list(range(1, 4)),
"AMJ": list(range(4, 7)),
}
# this allows GDAL to throw Python Exceptions
gdal.UseExceptions()
def db2pr(dbvalue):
pr = 10 ** (dbvalue / 10.0)
return pr
if __name__ == "__main__":
# set up arguments
parser = argparse.ArgumentParser(
"script to make quarterly " + "means and stdevs of qscat dB values"
)
parser.add_argument(
"-v",
"--verbose",
help="increase output verbosity",
action="store_true",
default=False,
)
parser.add_argument(
"-q",
"--quarter",
nargs="?",
choices=("JAS", "OND", "JFM", "AMJ"),
default="JAS",
const="JAS",
help="Quarter for aggregation. Default=JAS",
)
parser.add_argument("region", help="BYU region string (e.g. SAm, NAm, Ama, etc.)")
parser.add_argument(
"year", type=int, help="Year e.g. 1999 (qscat data start in 1999)"
)
args = parser.parse_args()
verbose = args.verbose
year = args.year
quarter = args.quarter
# region list (LAEA regions only)
valid_region_list = [
"Grn",
"Ala",
"CAm",
"NAm",
"SAm",
"NAf",
"SAf",
"Sib",
"Eur",
"SAs",
"ChJ",
"Ind",
"Aus",
"Ber",
]
region = args.region
try:
region_index = valid_region_list.index(region)
except Exception:
sys.stderr.write("Region not valid.\n")
sys.stderr.write("Valid regions are:\n")
sys.stderr.write("{}\n".format(valid_region_list))
sys.exit(1)
if verbose:
print("region: {}".format(region))
print("year: {}".format(year))
print("quarter: {}".format(quarter))
# set data dir
indir = os.path.join(DATADIR, "geotiffs", region, str(year))
outdir = indir
if year == 1999:
year2 = 99
else:
year2 = "{:02d}".format(year - 2000)
monthlist = Q2M[quarter]
# make a list of files for this year
filepatt = "quev-a-{}{}-*.tif".format(region, year2)
globpatt = os.path.join(indir, filepatt)
if verbose:
print("glob pattern: {}".format(globpatt))
filelist = glob.glob(globpatt)
qlist = []
for filepath in filelist:
fn = os.path.basename(filepath)
if verbose:
print(fn)
fn_dt = sp2.fn2dt(fn, date_flag="center")
iyear = fn_dt.year
imonth = fn_dt.month
iday = fn_dt.day
if imonth in monthlist:
qlist.append(fn)
if verbose:
print("{}: {}-{}-{}".format(fn, iyear, imonth, iday))
print("{}-{}: {}".format(year, quarter, qlist))
if len(qlist) == 0:
warnmsg = "No images found for this quarter.\n"
sys.stdout.write(warnmsg)
sys.exit(0)
# loop over images for this quarter
db_quarter = []
for i, image in enumerate(qlist):
a_imgpath = os.path.join(indir, image)
try:
a_ds = gdal.Open(a_imgpath)
except Exception:
print("Unable to open {}".format(a_imgpath))
sys.exit(1)
try:
srcband = a_ds.GetRasterBand(1)
except Exception:
print("Band ({}) not found".format(1))
sys.exit(1)
a_data = srcband.ReadAsArray()
a_mask = a_data == NODATA_VALUE
# if this is the first image get projection and geotransform
if i == 0:
prj = a_ds.GetProjection()
gt = a_ds.GetGeoTransform()
ny, nx = a_data.shape
db_data = a_data
db_masked = np.ma.MaskedArray(db_data, a_mask)
# add image to db_quarter list
db_quarter.append(db_masked)
# close datasets
a_ds = None
# stack list into array and find mean and std
dbarray = np.ma.stack(db_quarter, axis=2)
dbmean = np.ma.mean(dbarray, axis=2)
dbstd = np.ma.std(dbarray, axis=2)
print(dbmean.shape)
# finally, save as a geotiff
output_format = "GTiff"
driver = gdal.GetDriverByName(output_format)
dst_filename = "{}-quev-mean-db-{}-{}.tif"
dst_filename = dst_filename.format(region, year, quarter)
dst_dir = os.path.join(DATADIR, "geotiffs", region, str(year))
dst_path = os.path.join(dst_dir, dst_filename)
if verbose:
print("Output file for sig0 means: {}".format(dst_path))
dst_ds = driver.Create(dst_path, nx, ny, 1, gdal.GDT_Float32)
dst_data = np.ma.filled(dbmean, fill_value=NODATA_VALUE)
dst_ds.GetRasterBand(1).WriteArray(dst_data)
dst_ds.GetRasterBand(1).SetNoDataValue(NODATA_VALUE)
print("gt: {}".format(gt))
dst_ds.SetGeoTransform(gt)
dst_ds.SetProjection(prj)
dst_ds = None
dbmean_min = dbmean.min()
dbmean_max = dbmean.max()
dbmean_median = np.ma.median(dbmean)
print("Quarterly ({}) Mean Stats".format(quarter))
print(" Min: {}".format(dbmean_min))
print(" Max: {}".format(dbmean_max))
print(" Median: {}".format(dbmean_median))
# repeat for standard deviation
output_format = "GTiff"
driver = gdal.GetDriverByName(output_format)
dst_filename = "{}-quev-std-db-{}-{}.tif".format(region, year, quarter)
dst_dir = os.path.join(DATADIR, "geotiffs", region, str(year))
dst_path = os.path.join(dst_dir, dst_filename)
if verbose:
print("Output file: {}".format(dst_path))
dst_ds = driver.Create(dst_path, nx, ny, 1, gdal.GDT_Float32)
dst_data = np.ma.filled(dbstd, fill_value=NODATA_VALUE)
dst_ds.GetRasterBand(1).WriteArray(dst_data)
dst_ds.GetRasterBand(1).SetNoDataValue(NODATA_VALUE)
print("gt: {}".format(gt))
dst_ds.SetGeoTransform(gt)
dst_ds.SetProjection(prj)
dst_ds = None
dbstd_min = dbstd.min()
dbstd_max = dbstd.max()
dbstd_median = np.ma.median(dbstd)
print("Quarterly ({}) Stdev Stats".format(quarter))
print(" Min: {}".format(dbstd_min))
print(" Max: {}".format(dbstd_max))
print(" Median: {}".format(dbstd_median))
| [((430, 450), 'osgeo.gdal.UseExceptions', 'gdal.UseExceptions', ([], {}), '()\n', (448, 450), False, 'from osgeo import gdal\n'), ((585, 681), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (["('script to make quarterly ' + 'means and stdevs of qscat dB values')"], {}), "('script to make quarterly ' +\n 'means and stdevs of qscat dB values')\n", (608, 681), False, 'import argparse\n'), ((2438, 2467), 'os.path.join', 'os.path.join', (['indir', 'filepatt'], {}), '(indir, filepatt)\n', (2450, 2467), False, 'import os\n'), ((2550, 2569), 'glob.glob', 'glob.glob', (['globpatt'], {}), '(globpatt)\n', (2559, 2569), False, 'import glob\n'), ((4188, 4219), 'numpy.ma.stack', 'np.ma.stack', (['db_quarter'], {'axis': '(2)'}), '(db_quarter, axis=2)\n', (4199, 4219), True, 'import numpy as np\n'), ((4233, 4260), 'numpy.ma.mean', 'np.ma.mean', (['dbarray'], {'axis': '(2)'}), '(dbarray, axis=2)\n', (4243, 4260), True, 'import numpy as np\n'), ((4273, 4299), 'numpy.ma.std', 'np.ma.std', (['dbarray'], {'axis': '(2)'}), '(dbarray, axis=2)\n', (4282, 4299), True, 'import numpy as np\n'), ((4399, 4434), 'osgeo.gdal.GetDriverByName', 'gdal.GetDriverByName', (['output_format'], {}), '(output_format)\n', (4419, 4434), False, 'from osgeo import gdal\n'), ((4626, 4661), 'os.path.join', 'os.path.join', (['dst_dir', 'dst_filename'], {}), '(dst_dir, dst_filename)\n', (4638, 4661), False, 'import os\n'), ((4825, 4870), 'numpy.ma.filled', 'np.ma.filled', (['dbmean'], {'fill_value': 'NODATA_VALUE'}), '(dbmean, fill_value=NODATA_VALUE)\n', (4837, 4870), True, 'import numpy as np\n'), ((5168, 5188), 'numpy.ma.median', 'np.ma.median', (['dbmean'], {}), '(dbmean)\n', (5180, 5188), True, 'import numpy as np\n'), ((5455, 5490), 'osgeo.gdal.GetDriverByName', 'gdal.GetDriverByName', (['output_format'], {}), '(output_format)\n', (5475, 5490), False, 'from osgeo import gdal\n'), ((5649, 5684), 'os.path.join', 'os.path.join', (['dst_dir', 'dst_filename'], {}), '(dst_dir, dst_filename)\n', (5661, 5684), False, 'import os\n'), ((5833, 5877), 'numpy.ma.filled', 'np.ma.filled', (['dbstd'], {'fill_value': 'NODATA_VALUE'}), '(dbstd, fill_value=NODATA_VALUE)\n', (5845, 5877), True, 'import numpy as np\n'), ((6170, 6189), 'numpy.ma.median', 'np.ma.median', (['dbstd'], {}), '(dbstd)\n', (6182, 6189), True, 'import numpy as np\n'), ((2629, 2655), 'os.path.basename', 'os.path.basename', (['filepath'], {}), '(filepath)\n', (2645, 2655), False, 'import os\n'), ((2714, 2747), 'sirpy2.fn2dt', 'sp2.fn2dt', (['fn'], {'date_flag': '"""center"""'}), "(fn, date_flag='center')\n", (2723, 2747), True, 'import sirpy2 as sp2\n'), ((3126, 3151), 'sys.stdout.write', 'sys.stdout.write', (['warnmsg'], {}), '(warnmsg)\n', (3142, 3151), False, 'import sys\n'), ((3160, 3171), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3168, 3171), False, 'import sys\n'), ((3291, 3317), 'os.path.join', 'os.path.join', (['indir', 'image'], {}), '(indir, image)\n', (3303, 3317), False, 'import os\n'), ((3965, 3999), 'numpy.ma.MaskedArray', 'np.ma.MaskedArray', (['db_data', 'a_mask'], {}), '(db_data, a_mask)\n', (3982, 3999), True, 'import numpy as np\n'), ((1783, 1822), 'sys.stderr.write', 'sys.stderr.write', (['"""Region not valid.\n"""'], {}), "('Region not valid.\\n')\n", (1799, 1822), False, 'import sys\n'), ((1831, 1871), 'sys.stderr.write', 'sys.stderr.write', (['"""Valid regions are:\n"""'], {}), "('Valid regions are:\\n')\n", (1847, 1871), False, 'import sys\n'), ((1939, 1950), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1947, 1950), False, 'import sys\n'), ((3350, 3370), 'osgeo.gdal.Open', 'gdal.Open', (['a_imgpath'], {}), '(a_imgpath)\n', (3359, 3370), False, 'from osgeo import gdal\n'), ((3466, 3477), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3474, 3477), False, 'import sys\n'), ((3625, 3636), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3633, 3636), False, 'import sys\n')] |
noahbarros/Python-Exercises | ex062.py | fafda898473bc984280e201ed11d8ad76cc8624a | primeiro = int(input('Digite o priemiro termo da PA: '))
razão = int(input('Digite a razão da PA: '))
termo = primeiro
cont = 1
total = 0
mais = 10
while mais != 0:
total += mais
while cont <= total:
print(f'{termo} ', end='')
termo += razão
cont += 1
print('Pausa')
mais = int(input('Quantos termos você quer usar a mais? '))
print(f'a progressão foi finalizada com {total} termos mostrados')
| [] |
Brokdar/ArxmlDataExtractor | arxml_data_extractor/handler/object_handler.py | 2853112cbd4d001418b11ccb99f1db268347dfab | from lxml.etree import Element, QName
from typing import Union, List, Any
from tqdm import tqdm
import logging
from arxml_data_extractor.handler import value_handler
from arxml_data_extractor.handler.path_handler import PathHandler
from arxml_data_extractor.asr.asr_parser import AsrParser
from arxml_data_extractor.query.data_query import DataQuery
from arxml_data_extractor.query.data_object import DataObject
from arxml_data_extractor.query.data_value import DataValue
class ObjectHandler():
def __init__(self, parser: AsrParser):
self.logger = logging.getLogger()
self.path_handler = PathHandler(parser)
def handle(self, data_object: DataObject, node: Element = None) -> Union[list, dict]:
is_not_root = True
if node is None:
is_not_root = False
node = self.path_handler.parser.root
if is_not_root:
self.logger.info(f'ObjectHandler - handle DataObject(\'{data_object.name}\')')
else:
self.logger.info(f'ObjectHandler - [root] handle DataObject(\'{data_object.name}\')')
values = []
elements = self.path_handler.elements_by_path(data_object.path, node)
for element in tqdm(
elements,
desc=f'Handle DataObject(\'{data_object.name}\')',
disable=is_not_root,
bar_format="{desc:<70}{percentage:3.0f}% |{bar:70}| {n_fmt:>4}/{total_fmt}"):
if element is not None:
self.logger.info(
f'ObjectHandler - element found: \'{QName(element).localname}\' at line {element.sourceline - 1}'
)
values.append(self.__handle_values(data_object.values, element))
if not values:
self.logger.warning(
f'ObjectHandler - no values found for DataObject(\'{data_object.name}\')')
else:
self.logger.info(
f'ObjectHandler - values found for DataObject(\'{data_object.name}\'): {len(values)}'
)
return values[0] if len(values) == 1 else values
def __handle_values(self, values: List[Union[DataValue, DataObject]], node: Element) -> dict:
results = {}
for value in values:
if isinstance(value, DataObject):
results[value.name] = self.handle(value, node)
elif isinstance(value, DataValue):
results[value.name] = self.__handle_value(value.query, node)
if results[value.name] is None:
self.logger.info(
f'ObjectHandler - no value found for DataValue(\'{value.name}\')')
else:
self.logger.info(
f'ObjectHandler - value found: DataValue(\'{value.name}\') = \'{results[value.name]}\''
)
else:
error = f'ObjectHandler - invalid value type ({type(value)}). Value must be of type DataObject or DataValue'
self.logger.error(error)
raise TypeError(error)
return results
def __handle_value(self, query: DataQuery, node: Element) -> Any:
if isinstance(query.path, DataQuery.XPath):
if query.path.is_reference:
element = self.path_handler.element_by_inline_ref(query.path, node)
else:
element = self.path_handler.element_by_xpath(query.path.xpath, node)
else: # DataQuery.Reference isn't allowed on DataValue
return None
if element is None:
return None
return value_handler.handle(query, element)
| [((580, 599), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (597, 599), False, 'import logging\n'), ((629, 648), 'arxml_data_extractor.handler.path_handler.PathHandler', 'PathHandler', (['parser'], {}), '(parser)\n', (640, 648), False, 'from arxml_data_extractor.handler.path_handler import PathHandler\n'), ((1238, 1409), 'tqdm.tqdm', 'tqdm', (['elements'], {'desc': 'f"""Handle DataObject(\'{data_object.name}\')"""', 'disable': 'is_not_root', 'bar_format': '"""{desc:<70}{percentage:3.0f}% |{bar:70}| {n_fmt:>4}/{total_fmt}"""'}), '(elements, desc=f"Handle DataObject(\'{data_object.name}\')", disable=\n is_not_root, bar_format=\n \'{desc:<70}{percentage:3.0f}% |{bar:70}| {n_fmt:>4}/{total_fmt}\')\n', (1242, 1409), False, 'from tqdm import tqdm\n'), ((3682, 3718), 'arxml_data_extractor.handler.value_handler.handle', 'value_handler.handle', (['query', 'element'], {}), '(query, element)\n', (3702, 3718), False, 'from arxml_data_extractor.handler import value_handler\n'), ((1601, 1615), 'lxml.etree.QName', 'QName', (['element'], {}), '(element)\n', (1606, 1615), False, 'from lxml.etree import Element, QName\n')] |
Chromico/bk-base | src/api/datamanage/pro/lifecycle/data_trace/data_set_create.py | be822d9bbee544a958bed4831348185a75604791 | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from copy import deepcopy
from datamanage.pro import exceptions as dm_pro_errors
from datamanage.utils.api import MetaApi
from datamanage.pro.utils.time import utc_to_local, str_to_datetime
from datamanage.pro.lifecycle.models_dict import (
DATASET_CREATE_MAPPINGS,
DATASET_CREATE_EVENT_INFO_DICT,
DataTraceShowType,
ComplexSearchBackendType,
DataTraceFinishStatus,
)
def get_dataset_create_info(dataset_id, dataset_type):
"""获取数据足迹中和数据创建相关信息
:param dataset_id: 数据id
:param dataset_type: 数据类型
:return: 数据创建相关信息
:rtype: list
"""
# 1)从dgraph中获取数据创建相关信息
data_set_create_info_statement = """
{
get_dataset_create_info(func: eq(%s, "%s")){created_by created_at}
}
""" % (
DATASET_CREATE_MAPPINGS[dataset_type]['data_set_pk'],
dataset_id,
)
query_result = MetaApi.complex_search(
{"backend_type": ComplexSearchBackendType.DGRAPH.value, "statement": data_set_create_info_statement}, raw=True
)
create_info_ret = query_result['data']['data']['get_dataset_create_info']
if not (isinstance(create_info_ret, list) and create_info_ret):
raise dm_pro_errors.GetDataSetCreateInfoError(message_kv={'dataset_id': dataset_id})
# 2)得到格式化创建信息
create_trace_dict = deepcopy(DATASET_CREATE_EVENT_INFO_DICT)
create_trace_dict.update(
{
"sub_type": dataset_type,
"sub_type_alias": DATASET_CREATE_MAPPINGS[dataset_type]['data_set_create_alias'],
"description": DATASET_CREATE_MAPPINGS[dataset_type]['data_set_create_alias'],
"created_at": utc_to_local(create_info_ret[0]['created_at']),
"created_by": create_info_ret[0]['created_by'],
"show_type": DataTraceShowType.DISPLAY.value,
"datetime": str_to_datetime(utc_to_local(create_info_ret[0]['created_at'])),
"status": DataTraceFinishStatus.STATUS,
"status_alias": DataTraceFinishStatus.STATUS_ALIAS,
}
)
return [create_trace_dict]
| [((2222, 2361), 'datamanage.utils.api.MetaApi.complex_search', 'MetaApi.complex_search', (["{'backend_type': ComplexSearchBackendType.DGRAPH.value, 'statement':\n data_set_create_info_statement}"], {'raw': '(True)'}), "({'backend_type': ComplexSearchBackendType.DGRAPH.\n value, 'statement': data_set_create_info_statement}, raw=True)\n", (2244, 2361), False, 'from datamanage.utils.api import MetaApi\n'), ((2653, 2693), 'copy.deepcopy', 'deepcopy', (['DATASET_CREATE_EVENT_INFO_DICT'], {}), '(DATASET_CREATE_EVENT_INFO_DICT)\n', (2661, 2693), False, 'from copy import deepcopy\n'), ((2531, 2609), 'datamanage.pro.exceptions.GetDataSetCreateInfoError', 'dm_pro_errors.GetDataSetCreateInfoError', ([], {'message_kv': "{'dataset_id': dataset_id}"}), "(message_kv={'dataset_id': dataset_id})\n", (2570, 2609), True, 'from datamanage.pro import exceptions as dm_pro_errors\n'), ((2983, 3029), 'datamanage.pro.utils.time.utc_to_local', 'utc_to_local', (["create_info_ret[0]['created_at']"], {}), "(create_info_ret[0]['created_at'])\n", (2995, 3029), False, 'from datamanage.pro.utils.time import utc_to_local, str_to_datetime\n'), ((3189, 3235), 'datamanage.pro.utils.time.utc_to_local', 'utc_to_local', (["create_info_ret[0]['created_at']"], {}), "(create_info_ret[0]['created_at'])\n", (3201, 3235), False, 'from datamanage.pro.utils.time import utc_to_local, str_to_datetime\n')] |
shivam13verma/judge-embeddings | vectors2.py | 9b861319a1240529d25c15799952e32dde2e894e | import locale
import glob
import os
import os.path
import requests
import tarfile
import sys
import re
import gensim
from gensim.models.doc2vec import TaggedDocument
from collections import namedtuple
from gensim.models import Doc2Vec
import gensim.models.doc2vec
from collections import OrderedDict
import multiprocessing
from gensim.test.test_doc2vec import ConcatenatedDoc2Vec
import pickle
reload(sys)
sys.setdefaultencoding("utf-8")
#dirname = '/scratch/ap4608/judge_data'
#locale.setlocale(locale.LC_ALL, 'C')
#
#
## Convert text to lower-case and strip punctuation/symbols from words
#def normalize_text(text):
# norm_text = text.lower()
#
# # Replace breaks with spaces
# norm_text = norm_text.replace('<br />', ' ')
#
# # Pad punctuation with spaces on both sides
# for char in ['.', '"', ',', '(', ')', '!', '?', ';', ':']:
# norm_text = norm_text.replace(char, ' ' + char + ' ')
#
# return norm_text
#
#
## Concat and normalize test/train data
#folders = os.listdir(dirname)
#alldata = ''
#
#for fol in folders:
# temp = ''
# output = fol.replace('/', '-') + '.txt'
#
# # Is there a better pattern to use?
# txt_files = glob.glob('/'.join([dirname, fol, '*.txt']))
#
# for txt in txt_files:
# with open(txt, 'r') as t:
# control_chars = [chr(0x85)]
# t_clean = t.read()
#
# t_clean = t_clean.replace('\n', ' ')
# t_clean = re.sub(r'[^\x00-\x7F]+',' ', t_clean)
#
# for c in control_chars:
# t_clean = t_clean.replace(c, ' ')
#
# temp += t_clean
#
# temp += "\n"
#
# temp_norm = normalize_text(temp)
#
# if len(temp_norm) == 1:
# continue
#
# with open('/'.join([dirname, output]), 'w') as n:
# n.write(temp_norm)
#
# alldata += temp_norm
#
#with open('/'.join([dirname, 'alldata-id.txt']), 'w') as f:
# for idx, line in enumerate(alldata.splitlines()):
# num_line = "_*{0} {1}\n".format(idx, line)
# f.write(num_line)
#
#SentimentDocument = namedtuple('SentimentDocument', 'words tags split sentiment')
#
#alldocs = [] # will hold all docs in original order
#with open(os.path.join(dirname, 'alldata-id.txt')) as alldata:
# for line_no, line in enumerate(alldata):
# tokens = gensim.utils.to_unicode(line).split()
# words = tokens[1:]
# tags = [line_no] # `tags = [tokens[0]]` would also work at extra memory cost
# split = ['train','test','extra','extra'][line_no//25000] # 25k train, 25k test, 25k extra
# sentiment = [1.0, 0.0, 1.0, 0.0, None, None, None, None][line_no//12500] # [12.5K pos, 12.5K neg]*2 then unknown
# alldocs.append(SentimentDocument(words, tags, split, sentiment))
#
#train_docs = [doc for doc in alldocs if doc.split == 'train']
#test_docs = [doc for doc in alldocs if doc.split == 'test']
#doc_list = alldocs[:] # for reshuffling per pass
#
#cores = multiprocessing.cpu_count()
#assert gensim.models.doc2vec.FAST_VERSION > -1, "this will be painfully slow otherwise"
#
#simple_models = [
# # PV-DM w/concatenation - window=5 (both sides) approximates paper's 10-word total window size
# Doc2Vec(dm=1, dm_concat=1, size=100, window=5, negative=5, hs=0, min_count=2, workers=cores),
# # PV-DBOW
# Doc2Vec(dm=0, size=100, negative=5, hs=0, min_count=2, workers=cores),
# # PV-DM w/average
# Doc2Vec(dm=1, dm_mean=1, size=100, window=10, negative=5, hs=0, min_count=2, workers=cores),
#]
#
## speed setup by sharing results of 1st model's vocabulary scan
#simple_models[0].build_vocab(alldocs) # PV-DM/concat requires one special NULL word so it serves as template
#for model in simple_models[1:]:
# model.reset_from(simple_models[0])
#
#models_by_name = OrderedDict((str(model), model) for model in simple_models)
#
#models_by_name['dbow+dmm'] = ConcatenatedDoc2Vec([simple_models[1], simple_models[2]])
#models_by_name['dbow+dmc'] = ConcatenatedDoc2Vec([simple_models[1], simple_models[0]])
#
## Create a document vector list and save it
#doc_vec_list = [x.docvecs for x in simple_models]
docvecs = pickle.load(open('docvecs.p', 'rb'))
print len(docvecs)
print len(docvecs[0])
print docvecs[0]
for i,x in enumerate(docvecs):
with open('docvecs_'+str(i)+'.txt','w') as f:
for vec in x:
f.write(vec)
f.write("\n")
# pickle.dump(models_by_name, open('model.p', 'wb'))
| [] |
crest-cassia/caravan | caravan_search_engine/test/test_task.py | 0a8e606e31d2d36a9379bdc00fafe55cf9144da6 | import unittest
from caravan.task import Task
from caravan.tables import Tables
class TestRun(unittest.TestCase):
def setUp(self):
self.t = Tables.get()
self.t.clear()
def test_task(self):
t = Task(1234, "echo hello world")
self.assertEqual(t.id(), 1234)
self.assertEqual(t.is_finished(), False)
self.assertEqual(t.command(), "echo hello world")
t._store_result([1.0, 2.0, 3.0], 0, 3, 111, 222)
self.assertTrue(t.is_finished())
self.assertEqual(t.rc(), 0)
self.assertEqual(t.rank(), 3)
self.assertEqual(t.start_at(), 111)
self.assertEqual(t.finish_at(), 222)
def test_create(self):
for i in range(10):
t = Task.create("echo %d" % i)
self.assertEqual(t.id(), i)
self.assertEqual(t.is_finished(), False)
self.assertEqual(len(Task.all()), 10)
def test_all(self):
tasks = [Task.create("echo %d" % i) for i in range(10)]
self.assertEqual(Task.all(), tasks)
def test_find(self):
tasks = [Task.create("echo %d" % i) for i in range(10)]
self.assertEqual(Task.find(5).id(), 5)
self.assertEqual(Task.find(5), tasks[5])
if __name__ == '__main__':
unittest.main()
| [((1256, 1271), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1269, 1271), False, 'import unittest\n'), ((154, 166), 'caravan.tables.Tables.get', 'Tables.get', ([], {}), '()\n', (164, 166), False, 'from caravan.tables import Tables\n'), ((228, 258), 'caravan.task.Task', 'Task', (['(1234)', '"""echo hello world"""'], {}), "(1234, 'echo hello world')\n", (232, 258), False, 'from caravan.task import Task\n'), ((738, 764), 'caravan.task.Task.create', 'Task.create', (["('echo %d' % i)"], {}), "('echo %d' % i)\n", (749, 764), False, 'from caravan.task import Task\n'), ((946, 972), 'caravan.task.Task.create', 'Task.create', (["('echo %d' % i)"], {}), "('echo %d' % i)\n", (957, 972), False, 'from caravan.task import Task\n'), ((1018, 1028), 'caravan.task.Task.all', 'Task.all', ([], {}), '()\n', (1026, 1028), False, 'from caravan.task import Task\n'), ((1080, 1106), 'caravan.task.Task.create', 'Task.create', (["('echo %d' % i)"], {}), "('echo %d' % i)\n", (1091, 1106), False, 'from caravan.task import Task\n'), ((1199, 1211), 'caravan.task.Task.find', 'Task.find', (['(5)'], {}), '(5)\n', (1208, 1211), False, 'from caravan.task import Task\n'), ((887, 897), 'caravan.task.Task.all', 'Task.all', ([], {}), '()\n', (895, 897), False, 'from caravan.task import Task\n'), ((1152, 1164), 'caravan.task.Task.find', 'Task.find', (['(5)'], {}), '(5)\n', (1161, 1164), False, 'from caravan.task import Task\n')] |
tashidexiaoL/splashnew | splash/render_options.py | 2bbb886bae8fa88c30a4460f41ca940c4b010287 | # -*- coding: utf-8 -*-
import os
import json
from splash import defaults
from splash.utils import to_bytes, path_join_secure
from splash.errors import BadOption
class RenderOptions(object):
"""
Options that control how to render a response.
"""
_REQUIRED = object()
def __init__(self, data, max_timeout):
self.data = data
self.max_timeout = max_timeout
@classmethod
def raise_error(cls, argument, description, type='bad_argument', **kwargs):
params = {
'type': type,
'argument': argument,
'description': description
}
params.update(kwargs)
raise BadOption(params)
@classmethod
def fromrequest(cls, request, max_timeout):
"""
Initialize options from a Twisted Request.
"""
# 1. GET / POST data
data = {key.decode('utf-8'): values[0].decode('utf-8') for key, values
in request.args.items()}
if request.method == b'POST':
content_type = request.getHeader(b'content-type')
if content_type:
request.content.seek(0)
# 2. application/json POST data
if b'application/json' in content_type:
try:
content = request.content.read().decode('utf-8')
data.update(json.loads(content))
except ValueError as e:
raise BadOption({
'type': 'invalid_json',
'description': "Can't decode JSON",
'message': str(e),
})
# 3. js_source from application/javascript POST requests
if b'application/javascript' in content_type:
data['js_source'] = request.content.read().decode('utf-8')
request.content.seek(0)
data['uid'] = id(request)
return cls(data, max_timeout)
def get_expired_args(self, cache):
"""
Return a list of argument names from load_args which can't be loaded
"""
return cache.get_missing(self.get_load_args().items())
def save_args_to_cache(self, cache):
"""
Process save_args and put all values to cache.
Return a list of (name, key) pairs.
"""
save_args = self.get_save_args()
save_values = [self.data.get(name) for name in save_args]
keys = cache.add_many(save_values)
return list(zip(save_args, keys))
def load_cached_args(self, cache):
load_args = self.get_load_args()
for name, key in (load_args or {}).items():
self.data[name] = cache[key]
def get(self, name, default=_REQUIRED, type=str, range=None):
value = self.data.get(name)
if value is not None:
if type is not None:
try:
value = type(value)
except ValueError:
msg = "Argument %r has a wrong type" % (name,)
self.raise_error(name, msg, required_type=type.__name__)
if range is not None and not (range[0] <= value <= range[1]):
self.raise_error(name, 'Argument is out of the allowed range',
min=range[0], max=range[1], value=value)
return value
elif default is self._REQUIRED:
self.raise_error(name, 'Required argument is missing: %s' % name,
type='argument_required')
else:
return default
def _get_bool(self, name, default=_REQUIRED):
return self.get(name, default, type=int, range=(0, 1))
def _get_url(self, name, default=_REQUIRED):
url = self.get(name, default, type=None)
if isinstance(url, bytes):
url = url.decode('utf8')
return url
def get_uid(self):
return self.get('uid')
def get_url(self):
return self._get_url("url")
def get_baseurl(self):
return self._get_url("baseurl", default=None)
def get_wait(self):
return self.get("wait", defaults.WAIT_TIME, type=float,
range=(0, self.get_timeout()))
def get_timeout(self):
default = min(self.max_timeout, defaults.TIMEOUT)
return self.get("timeout", default, type=float,
range=(0, self.max_timeout))
def get_resource_timeout(self):
return self.get("resource_timeout", defaults.RESOURCE_TIMEOUT,
type=float, range=(0, 1e6))
def get_response_body(self):
return self._get_bool("response_body", defaults.RESPONSE_BODY_ENABLED)
def get_request_body(self):
return self._get_bool("request_body", defaults.REQUEST_BODY_ENABLED)
def get_images(self):
return self._get_bool("images", defaults.AUTOLOAD_IMAGES)
def get_proxy(self):
return self.get("proxy", default=None)
def get_js_source(self):
return self.get("js_source", default=None)
def get_width(self):
return self.get("width", None, type=int, range=(1, defaults.MAX_WIDTH))
def get_height(self):
return self.get("height", None, type=int,
range=(1, defaults.MAX_HEIGTH))
def get_scale_method(self):
scale_method = self.get("scale_method", defaults.IMAGE_SCALE_METHOD)
allowed_scale_methods = ['raster', 'vector']
if scale_method not in allowed_scale_methods:
self.raise_error(
argument='scale_method',
description="Invalid 'scale_method': %s" % scale_method,
allowed=allowed_scale_methods,
received=scale_method,
)
return scale_method
def get_quality(self):
return self.get("quality", defaults.JPEG_QUALITY, type=int, range=(0, 100))
def get_http_method(self):
method = self.get("http_method", "GET")
if method.upper() not in ["POST", "GET"]:
self.raise_error("http_method", "Unsupported HTTP method {}".format(method))
return method
def get_body(self):
body = self.get("body", None, to_bytes)
method = self.get("http_method", "GET").upper()
if method == 'GET' and body:
self.raise_error("body", "GET request should not have a body")
return body
def get_render_all(self, wait=None):
result = self._get_bool("render_all", False)
if result == 1 and wait == 0:
self.raise_error("render_all",
"Pass non-zero 'wait' to render full webpage")
return result
def get_lua_source(self):
return self.get("lua_source")
def get_js_profile(self, js_profiles_path):
js_profile = self.get("js", default=None)
if not js_profile:
return js_profile
if js_profiles_path is None:
self.raise_error('js',
'Javascript profiles are not enabled on server')
try:
profile_dir = path_join_secure(js_profiles_path, js_profile)
except ValueError as e:
# security check fails
print(e)
self.raise_error('js', 'Javascript profile does not exist')
if not os.path.isdir(profile_dir):
self.raise_error('js', 'Javascript profile does not exist')
return profile_dir
def get_headers(self):
headers = self.get("headers", default=None, type=None)
if headers is None:
return headers
if not isinstance(headers, (list, tuple, dict)):
self.raise_error(
argument='headers',
description="'headers' must be either a JSON array of "
"(name, value) pairs or a JSON object"
)
if isinstance(headers, (list, tuple)):
for el in headers:
string_only = all(isinstance(e, str) for e in el)
if not (isinstance(el, (list, tuple)) and len(el) == 2 and string_only):
self.raise_error(
argument='headers',
description="'headers' must be either a JSON array of "
"(name, value) pairs or a JSON object"
)
return headers
def get_save_args(self):
save_args = self.get("save_args", default=None, type=None)
if save_args is None:
return []
if isinstance(save_args, str):
# comma-separated string
save_args = save_args.split(',')
if not isinstance(save_args, list):
self.raise_error(
argument="save_args",
description="'save_args' should be either a comma-separated "
"string or a JSON array with argument names",
)
# JSON array
if not all(isinstance(a, str) for a in save_args):
self.raise_error(
argument="save_args",
description="'save_args' should be a list of strings",
)
return save_args
def get_load_args(self):
load_args = self.get("load_args", default=None, type=None)
if load_args is None:
return {}
if isinstance(load_args, str):
try:
load_args = dict(
kv.split("=", 1) for kv in load_args.split(';')
)
except ValueError:
self.raise_error(
argument="load_args",
description="'load_args' string value is not a "
"semicolon-separated list of name=hash pairs"
)
if not isinstance(load_args, dict):
self.raise_error(
argument="load_args",
description="'load_args' should be either a JSON object with "
"argument hashes or a semicolon-separated list "
"of name=hash pairs"
)
return load_args
def get_viewport(self, wait=None):
viewport = self.get("viewport", defaults.VIEWPORT_SIZE)
if viewport == 'full':
if wait == 0:
self.raise_error("viewport",
"Pass non-zero 'wait' to render full webpage")
else:
try:
validate_size_str(viewport)
except ValueError as e:
self.raise_error("viewport", str(e))
return viewport
def get_filters(self, pool=None, adblock_rules=None):
filter_names = self.get('filters', '')
filter_names = [f for f in filter_names.split(',') if f]
if pool is None and adblock_rules is None: # skip validation
return filter_names
if not filter_names:
return filter_names
if pool is not None:
adblock_rules = pool.network_manager_factory.adblock_rules
if adblock_rules is None:
self.raise_error(
"filters",
"Invalid filter names: %s" % (filter_names,)
)
if adblock_rules is not None:
unknown_filters = adblock_rules.get_unknown_filters(filter_names)
if unknown_filters:
self.raise_error(
"filters",
"Invalid filter names: %s" % (unknown_filters,)
)
return filter_names
def get_allowed_domains(self):
allowed_domains = self.get("allowed_domains", default=None)
if allowed_domains is not None:
return allowed_domains.split(',')
def get_allowed_content_types(self):
content_types = self.get("allowed_content_types", default=['*'])
if isinstance(content_types, str):
content_types = list(filter(None, content_types.split(',')))
return content_types
def get_forbidden_content_types(self):
content_types = self.get("forbidden_content_types", default=[])
if isinstance(content_types, str):
content_types = list(filter(None, content_types.split(',')))
return content_types
def get_html5_media(self):
return self._get_bool("html5_media", defaults.HTML5_MEDIA_ENABLED)
def get_engine(self, browser_engines_enabled=None):
engine = self.get("engine", default="webkit", type=str)
if engine not in {"webkit", "chromium"}:
self.raise_error("engine", "Unknown render engine {}".format(engine))
if browser_engines_enabled is not None:
if engine not in browser_engines_enabled:
self.raise_error("engine", "Disabled render engine {}".format(engine))
return engine
def get_http2(self):
engine = self.get_engine()
if self.get_engine() == "webkit":
default = defaults.WEBKIT_HTTP2_ENABLED
else:
assert engine == 'chromium'
default = defaults.CHROMIUM_HTTP2_ENABLED
return self._get_bool("http2", default)
def get_common_params(self, js_profiles_path):
wait = self.get_wait()
return {
'url': self.get_url(),
'baseurl': self.get_baseurl(),
'wait': wait,
'resource_timeout': self.get_resource_timeout(),
'viewport': self.get_viewport(wait),
'render_all': self.get_render_all(wait),
'images': self.get_images(),
'headers': self.get_headers(),
'proxy': self.get_proxy(),
'js_profile': self.get_js_profile(js_profiles_path),
'js_source': self.get_js_source(),
'http_method': self.get_http_method(),
'body': self.get_body(),
'html5_media': self.get_html5_media(),
'http2': self.get_http2(),
# 'lua': self.get_lua(),
}
def get_image_params(self):
return {
'width': self.get_width(),
'height': self.get_height(),
'scale_method': self.get_scale_method()
}
def get_png_params(self):
return self.get_image_params()
def get_jpeg_params(self):
params = {'quality': self.get_quality()}
params.update(self.get_image_params())
return params
def get_include_params(self):
return dict(
html=self._get_bool("html", defaults.DO_HTML),
iframes=self._get_bool("iframes", defaults.DO_IFRAMES),
png=self._get_bool("png", defaults.DO_PNG),
jpeg=self._get_bool("jpeg", defaults.DO_JPEG),
script=self._get_bool("script", defaults.SHOW_SCRIPT),
console=self._get_bool("console", defaults.SHOW_CONSOLE),
history=self._get_bool("history", defaults.SHOW_HISTORY),
har=self._get_bool("har", defaults.SHOW_HAR),
)
def validate_size_str(size_str):
"""
Validate size string in WxH format.
Can be used to validate both viewport and window size strings. Does not
special-case ``'full'`` viewport. Raises ``ValueError`` if anything goes
wrong.
:param size_str: string to validate
"""
max_width = defaults.VIEWPORT_MAX_WIDTH
max_heigth = defaults.VIEWPORT_MAX_HEIGTH
max_area = defaults.VIEWPORT_MAX_AREA
try:
w, h = map(int, size_str.split('x'))
except ValueError:
raise ValueError("Invalid viewport format: %s" % size_str)
else:
if not ((0 < w <= max_width) and (0 < h <= max_heigth) and
(w * h < max_area)):
raise ValueError("Viewport (%dx%d, area=%d) is out of range (%dx%d, area=%d)" %
(w, h, w * h, max_width, max_heigth, max_area))
| [((665, 682), 'splash.errors.BadOption', 'BadOption', (['params'], {}), '(params)\n', (674, 682), False, 'from splash.errors import BadOption\n'), ((7117, 7163), 'splash.utils.path_join_secure', 'path_join_secure', (['js_profiles_path', 'js_profile'], {}), '(js_profiles_path, js_profile)\n', (7133, 7163), False, 'from splash.utils import to_bytes, path_join_secure\n'), ((7340, 7366), 'os.path.isdir', 'os.path.isdir', (['profile_dir'], {}), '(profile_dir)\n', (7353, 7366), False, 'import os\n'), ((1382, 1401), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (1392, 1401), False, 'import json\n')] |
sangumee/Opentutorials-Webn-Python | syntax/func.py | 9f813f8f342ea99ffee6e31f363f175fa023c489 | # code....
a = 1
b = 2
c = 3
s = a+b+c
r = s/3
print(r)
# code....
'''
def average():
a=1
b=2
c=3
s=a+b+c
r=s/3
print(r)
average()
'''
'''
#input
#parameter
#argument
def average(a,b,c):
s=a+b+c
r=s/3
print(r)
average(10,20,30)
'''
def average(a, b, c):
s = a+b+c
r = s/3
return r
print(average(10, 20, 30))
| [] |
ramezrawas/galaxy-1 | tools/evolution/codingSnps_filter.py | c03748dd49c060a68d07bce56eae33e0ba154414 | #!/usr/bin/env python
# runs after the job (and after the default post-filter)
from galaxy.tools.parameters import DataToolParameter
# Older py compatibility
try:
set()
except:
from sets import Set as set
def validate_input( trans, error_map, param_values, page_param_map ):
dbkeys = set()
data_param_names = set()
data_params = 0
for name, param in page_param_map.items():
if isinstance( param, DataToolParameter ):
# for each dataset parameter
if param_values.get(name, None) is not None:
dbkeys.add( param_values[name].dbkey )
data_params += 1
# check meta data
try:
param = param_values[name]
int( param.metadata.startCol )
int( param.metadata.endCol )
int( param.metadata.chromCol )
if param.metadata.strandCol is not None:
int( param.metadata.strandCol )
except:
error_msg = ("The attributes of this dataset are not properly set. "
"Click the pencil icon in the history item to set the chrom, start, end and strand columns.")
error_map[name] = error_msg
data_param_names.add( name )
if len( dbkeys ) > 1:
for name in data_param_names:
error_map[name] = "All datasets must belong to same genomic build, " \
"this dataset is linked to build '%s'" % param_values[name].dbkey
if data_params != len(data_param_names):
for name in data_param_names:
error_map[name] = "A dataset of the appropriate type is required"
| [((168, 173), 'sets.Set', 'set', ([], {}), '()\n', (171, 173), True, 'from sets import Set as set\n'), ((299, 304), 'sets.Set', 'set', ([], {}), '()\n', (302, 304), True, 'from sets import Set as set\n'), ((328, 333), 'sets.Set', 'set', ([], {}), '()\n', (331, 333), True, 'from sets import Set as set\n')] |
DeftNerd/bitcoinclassic | qa/rpc-tests/listtransactions.py | afff0155e0dd528145818c43f259743f54966d95 | #!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the listtransactions API
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
def check_array_result(object_array, to_match, expected):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
"""
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0:
raise AssertionError("No objects matched %s"%(str(to_match)))
class ListTransactionsTest(BitcoinTestFramework):
def run_test(self):
# Simple send, 0 to 1:
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
check_array_result(self.nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0})
check_array_result(self.nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0})
# mine a block, confirmations should change:
self.nodes[0].generate(1)
self.sync_all()
check_array_result(self.nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1})
check_array_result(self.nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1})
# send-to-self:
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
check_array_result(self.nodes[0].listtransactions(),
{"txid":txid, "category":"send"},
{"amount":Decimal("-0.2")})
check_array_result(self.nodes[0].listtransactions(),
{"txid":txid, "category":"receive"},
{"amount":Decimal("0.2")})
# sendmany from node1: twice to self, twice to node2:
send_to = { self.nodes[0].getnewaddress() : 0.11,
self.nodes[1].getnewaddress() : 0.22,
self.nodes[0].getaccountaddress("from1") : 0.33,
self.nodes[1].getaccountaddress("toself") : 0.44 }
txid = self.nodes[1].sendmany("", send_to)
self.sync_all()
check_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.11")},
{"txid":txid} )
check_array_result(self.nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.11")},
{"txid":txid} )
check_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.22")},
{"txid":txid} )
check_array_result(self.nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.22")},
{"txid":txid} )
check_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.33")},
{"txid":txid} )
check_array_result(self.nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.33")},
{"txid":txid, "account" : "from1"} )
check_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.44")},
{"txid":txid, "account" : ""} )
check_array_result(self.nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.44")},
{"txid":txid, "account" : "toself"} )
multisig = self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
self.nodes[1].generate(1)
self.sync_all()
assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0)
check_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True),
{"category":"receive","amount":Decimal("0.1")},
{"txid":txid, "account" : "watchonly"} )
if __name__ == '__main__':
ListTransactionsTest().main()
| [] |
aletourneau/salt | salt/modules/mount.py | d7013a2f64eb4b79592220d76274bc5dde609e08 | # -*- coding: utf-8 -*-
'''
Salt module to manage unix mounts and the fstab file
'''
from __future__ import absolute_import
# Import python libs
import os
import re
import logging
# Import salt libs
import salt.utils
from salt._compat import string_types
from salt.utils import which as _which
from salt.exceptions import CommandNotFoundError, CommandExecutionError
# Set up logger
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'mount'
def __virtual__():
'''
Only load on POSIX-like systems
'''
# Disable on Windows, a specific file module exists:
if salt.utils.is_windows():
return False
return True
def _list_mounts():
ret = {}
if __grains__['os'] in ['MacOS', 'Darwin']:
mounts = __salt__['cmd.run_stdout']('mount')
else:
mounts = __salt__['cmd.run_stdout']('mount -l')
for line in mounts.split('\n'):
comps = re.sub(r"\s+", " ", line).split()
ret[comps[2]] = comps[0]
return ret
def _active_mountinfo(ret):
_list = _list_mounts()
filename = '/proc/self/mountinfo'
if not os.access(filename, os.R_OK):
msg = 'File not readable {0}'
raise CommandExecutionError(msg.format(filename))
blkid_info = __salt__['disk.blkid']()
with salt.utils.fopen(filename) as ifile:
for line in ifile:
comps = line.split()
device = comps[2].split(':')
device_name = comps[8]
device_uuid = None
if device_name:
device_uuid = blkid_info.get(device_name, {}).get('UUID')
device_uuid = device_uuid and device_uuid.lower()
ret[comps[4]] = {'mountid': comps[0],
'parentid': comps[1],
'major': device[0],
'minor': device[1],
'root': comps[3],
'opts': comps[5].split(','),
'fstype': comps[7],
'device': device_name,
'alt_device': _list.get(comps[4], None),
'superopts': comps[9].split(','),
'device_uuid': device_uuid}
return ret
def _active_mounts(ret):
'''
List active mounts on Linux systems
'''
_list = _list_mounts()
filename = '/proc/self/mounts'
if not os.access(filename, os.R_OK):
msg = 'File not readable {0}'
raise CommandExecutionError(msg.format(filename))
with salt.utils.fopen(filename) as ifile:
for line in ifile:
comps = line.split()
ret[comps[1]] = {'device': comps[0],
'alt_device': _list.get(comps[1], None),
'fstype': comps[2],
'opts': comps[3].split(',')}
return ret
def _active_mounts_freebsd(ret):
'''
List active mounts on FreeBSD systems
'''
for line in __salt__['cmd.run_stdout']('mount -p').split('\n'):
comps = re.sub(r"\s+", " ", line).split()
ret[comps[1]] = {'device': comps[0],
'fstype': comps[2],
'opts': comps[3].split(',')}
return ret
def _active_mounts_solaris(ret):
'''
List active mounts on Solaris systems
'''
for line in __salt__['cmd.run_stdout']('mount -v').split('\n'):
comps = re.sub(r"\s+", " ", line).split()
ret[comps[2]] = {'device': comps[0],
'fstype': comps[4],
'opts': comps[5].split('/')}
return ret
def _active_mounts_openbsd(ret):
'''
List active mounts on OpenBSD systems
'''
for line in __salt__['cmd.run_stdout']('mount -v').split('\n'):
comps = re.sub(r"\s+", " ", line).split()
nod = __salt__['cmd.run_stdout']('ls -l {0}'.format(comps[0]))
nod = ' '.join(nod.split()).split(" ")
parens = re.findall(r'\((.*?)\)', line, re.DOTALL)
ret[comps[3]] = {'device': comps[0],
'fstype': comps[5],
'opts': parens[1].split(", "),
'major': str(nod[4].strip(",")),
'minor': str(nod[5]),
'device_uuid': parens[0]}
return ret
def _active_mounts_darwin(ret):
'''
List active mounts on Mac OS systems
'''
for line in __salt__['cmd.run_stdout']('mount').split('\n'):
comps = re.sub(r"\s+", " ", line).split()
parens = re.findall(r'\((.*?)\)', line, re.DOTALL)[0].split(", ")
ret[comps[2]] = {'device': comps[0],
'fstype': parens[0],
'opts': parens[1:]}
return ret
def active(extended=False):
'''
List the active mounts.
CLI Example:
.. code-block:: bash
salt '*' mount.active
'''
ret = {}
if __grains__['os'] == 'FreeBSD':
_active_mounts_freebsd(ret)
elif __grains__['os'] == 'Solaris':
_active_mounts_solaris(ret)
elif __grains__['os'] == 'OpenBSD':
_active_mounts_openbsd(ret)
elif __grains__['os'] in ['MacOS', 'Darwin']:
_active_mounts_darwin(ret)
else:
if extended:
try:
_active_mountinfo(ret)
except CommandExecutionError:
_active_mounts(ret)
else:
_active_mounts(ret)
return ret
def fstab(config='/etc/fstab'):
'''
List the contents of the fstab
CLI Example:
.. code-block:: bash
salt '*' mount.fstab
'''
ret = {}
if not os.path.isfile(config):
return ret
with salt.utils.fopen(config) as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
continue
if not line.strip():
# Blank line
continue
comps = line.split()
if len(comps) != 6:
# Invalid entry
continue
ret[comps[1]] = {'device': comps[0],
'fstype': comps[2],
'opts': comps[3].split(','),
'dump': comps[4],
'pass': comps[5]}
return ret
def rm_fstab(name, device, config='/etc/fstab'):
'''
Remove the mount point from the fstab
CLI Example:
.. code-block:: bash
salt '*' mount.rm_fstab /mnt/foo
'''
contents = fstab(config)
if name not in contents:
return True
# The entry is present, get rid of it
lines = []
try:
with salt.utils.fopen(config, 'r') as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
lines.append(line)
continue
if not line.strip():
# Blank line
lines.append(line)
continue
comps = line.split()
if len(comps) != 6:
# Invalid entry
lines.append(line)
continue
comps = line.split()
if device:
if comps[1] == name and comps[0] == device:
continue
else:
if comps[1] == name:
continue
lines.append(line)
except (IOError, OSError) as exc:
msg = "Couldn't read from {0}: {1}"
raise CommandExecutionError(msg.format(config, str(exc)))
try:
with salt.utils.fopen(config, 'w+') as ofile:
ofile.writelines(lines)
except (IOError, OSError) as exc:
msg = "Couldn't write to {0}: {1}"
raise CommandExecutionError(msg.format(config, str(exc)))
return True
def set_fstab(
name,
device,
fstype,
opts='defaults',
dump=0,
pass_num=0,
config='/etc/fstab',
test=False,
**kwargs):
'''
Verify that this mount is represented in the fstab, change the mount
to match the data passed, or add the mount if it is not present.
CLI Example:
.. code-block:: bash
salt '*' mount.set_fstab /mnt/foo /dev/sdz1 ext4
'''
# Fix the opts type if it is a list
if isinstance(opts, list):
opts = ','.join(opts)
lines = []
change = False
present = False
if not os.path.isfile(config):
raise CommandExecutionError('Bad config file "{0}"'.format(config))
try:
with salt.utils.fopen(config, 'r') as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
lines.append(line)
continue
if not line.strip():
# Blank line
lines.append(line)
continue
comps = line.split()
if len(comps) != 6:
# Invalid entry
lines.append(line)
continue
if comps[1] == name or comps[0] == device:
# check to see if there are changes
# and fix them if there are any
present = True
if comps[0] != device:
change = True
comps[0] = device
if comps[1] != name:
change = True
comps[1] = name
if comps[2] != fstype:
change = True
comps[2] = fstype
if comps[3] != opts:
change = True
comps[3] = opts
if comps[4] != str(dump):
change = True
comps[4] = str(dump)
if comps[5] != str(pass_num):
change = True
comps[5] = str(pass_num)
if change:
log.debug(
'fstab entry for mount point {0} needs to be '
'updated'.format(name)
)
newline = (
'{0}\t\t{1}\t{2}\t{3}\t{4} {5}\n'.format(
device, name, fstype, opts, dump, pass_num
)
)
lines.append(newline)
else:
lines.append(line)
except (IOError, OSError) as exc:
msg = 'Couldn\'t read from {0}: {1}'
raise CommandExecutionError(msg.format(config, str(exc)))
if change:
if not salt.utils.test_mode(test=test, **kwargs):
try:
with salt.utils.fopen(config, 'w+') as ofile:
# The line was changed, commit it!
ofile.writelines(lines)
except (IOError, OSError):
msg = 'File not writable {0}'
raise CommandExecutionError(msg.format(config))
return 'change'
if not change:
if present:
# The right entry is already here
return 'present'
else:
if not salt.utils.test_mode(test=test, **kwargs):
# The entry is new, add it to the end of the fstab
newline = '{0}\t\t{1}\t{2}\t{3}\t{4} {5}\n'.format(device,
name,
fstype,
opts,
dump,
pass_num)
lines.append(newline)
try:
with salt.utils.fopen(config, 'w+') as ofile:
# The line was changed, commit it!
ofile.writelines(lines)
except (IOError, OSError):
raise CommandExecutionError(
'File not writable {0}'.format(
config
)
)
return 'new'
def rm_automaster(name, device, config='/etc/auto_salt'):
'''
Remove the mount point from the auto_master
CLI Example:
.. code-block:: bash
salt '*' mount.rm_automaster /mnt/foo
'''
contents = automaster(config)
if name not in contents:
return True
# The entry is present, get rid of it
lines = []
try:
with salt.utils.fopen(config, 'r') as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
lines.append(line)
continue
if not line.strip():
# Blank line
lines.append(line)
continue
comps = line.split()
if len(comps) != 3:
# Invalid entry
lines.append(line)
continue
comps = line.split()
prefix = "/.."
name_chk = comps[0].replace(prefix, "")
device_fmt = comps[2].split(":")
if device:
if name_chk == name and device_fmt[1] == device:
continue
else:
if name_chk == name:
continue
lines.append(line)
except (IOError, OSError) as exc:
msg = "Couldn't read from {0}: {1}"
raise CommandExecutionError(msg.format(config, str(exc)))
try:
with salt.utils.fopen(config, 'w+') as ofile:
ofile.writelines(lines)
except (IOError, OSError) as exc:
msg = "Couldn't write to {0}: {1}"
raise CommandExecutionError(msg.format(config, str(exc)))
# Update automount
__salt__['cmd.run']('automount -cv')
return True
def set_automaster(
name,
device,
fstype,
opts='',
config='/etc/auto_salt',
test=False,
**kwargs):
'''
Verify that this mount is represented in the auto_salt, change the mount
to match the data passed, or add the mount if it is not present.
CLI Example:
.. code-block:: bash
salt '*' mount.set_automaster /mnt/foo /dev/sdz1 ext4
'''
# Fix the opts type if it is a list
if isinstance(opts, list):
opts = ','.join(opts)
lines = []
change = False
present = False
automaster_file = "/etc/auto_master"
if not os.path.isfile(config):
__salt__['file.touch'](config)
__salt__['file.append'](automaster_file, "/-\t\t\t{0}".format(config))
name = "/..{0}".format(name)
device_fmt = "{0}:{1}".format(fstype, device)
type_opts = "-fstype={0},{1}".format(fstype, opts)
if fstype == 'smbfs':
device_fmt = device_fmt.replace(fstype, "")
try:
with salt.utils.fopen(config, 'r') as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
lines.append(line)
continue
if not line.strip():
# Blank line
lines.append(line)
continue
comps = line.split()
if len(comps) != 3:
# Invalid entry
lines.append(line)
continue
if comps[0] == name or comps[2] == device_fmt:
# check to see if there are changes
# and fix them if there are any
present = True
if comps[0] != name:
change = True
comps[0] = name
if comps[1] != type_opts:
change = True
comps[1] = type_opts
if comps[2] != device_fmt:
change = True
comps[2] = device_fmt
if change:
log.debug(
'auto_master entry for mount point {0} needs to be '
'updated'.format(name)
)
newline = (
'{0}\t{1}\t{2}\n'.format(
name, type_opts, device_fmt)
)
lines.append(newline)
else:
lines.append(line)
except (IOError, OSError) as exc:
msg = 'Couldn\'t read from {0}: {1}'
raise CommandExecutionError(msg.format(config, str(exc)))
if change:
if not salt.utils.test_mode(test=test, **kwargs):
try:
with salt.utils.fopen(config, 'w+') as ofile:
# The line was changed, commit it!
ofile.writelines(lines)
except (IOError, OSError):
msg = 'File not writable {0}'
raise CommandExecutionError(msg.format(config))
return 'change'
if not change:
if present:
# The right entry is already here
return 'present'
else:
if not salt.utils.test_mode(test=test, **kwargs):
# The entry is new, add it to the end of the fstab
newline = (
'{0}\t{1}\t{2}\n'.format(
name, type_opts, device_fmt)
)
lines.append(newline)
try:
with salt.utils.fopen(config, 'w+') as ofile:
# The line was changed, commit it!
ofile.writelines(lines)
except (IOError, OSError):
raise CommandExecutionError(
'File not writable {0}'.format(
config
)
)
return 'new'
def automaster(config='/etc/auto_salt'):
'''
List the contents of the fstab
CLI Example:
.. code-block:: bash
salt '*' mount.fstab
'''
ret = {}
if not os.path.isfile(config):
return ret
with salt.utils.fopen(config) as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
continue
if not line.strip():
# Blank line
continue
comps = line.split()
if len(comps) != 3:
# Invalid entry
continue
prefix = "/.."
name = comps[0].replace(prefix, "")
device_fmt = comps[2].split(":")
opts = comps[1].split(',')
ret[name] = {'device': device_fmt[1],
'fstype': opts[0],
'opts': opts[1:]}
return ret
def mount(name, device, mkmnt=False, fstype='', opts='defaults', user=None):
'''
Mount a device
CLI Example:
.. code-block:: bash
salt '*' mount.mount /mnt/foo /dev/sdz1 True
'''
# Darwin doesn't expect defaults when mounting without other options
if 'defaults' in opts and __grains__['os'] in ['MacOS', 'Darwin']:
opts = None
if isinstance(opts, string_types):
opts = opts.split(',')
if not os.path.exists(name) and mkmnt:
__salt__['file.mkdir'](name=name, user=user)
args = ''
if opts is not None:
lopts = ','.join(opts)
args = '-o {0}'.format(lopts)
if fstype:
args += ' -t {0}'.format(fstype)
cmd = 'mount {0} {1} {2} '.format(args, device, name)
out = __salt__['cmd.run_all'](cmd, runas=user)
if out['retcode']:
return out['stderr']
return True
def remount(name, device, mkmnt=False, fstype='', opts='defaults', user=None):
'''
Attempt to remount a device, if the device is not already mounted, mount
is called
CLI Example:
.. code-block:: bash
salt '*' mount.remount /mnt/foo /dev/sdz1 True
'''
force_mount = False
if __grains__['os'] in ['MacOS', 'Darwin']:
if opts == 'defaults':
opts = 'noowners'
if fstype == 'smbfs':
force_mount = True
if isinstance(opts, string_types):
opts = opts.split(',')
mnts = active()
if name in mnts:
# The mount point is mounted, attempt to remount it with the given data
if 'remount' not in opts and __grains__['os'] not in ['OpenBSD', 'MacOS', 'Darwin']:
opts.append('remount')
if force_mount:
# We need to force the mount but first we should unmount
umount(name, device, user=user)
lopts = ','.join(opts)
args = '-o {0}'.format(lopts)
if fstype:
args += ' -t {0}'.format(fstype)
if __grains__['os'] not in ['OpenBSD', 'MacOS', 'Darwin'] or force_mount:
cmd = 'mount {0} {1} {2} '.format(args, device, name)
else:
cmd = 'mount -u {0} {1} {2} '.format(args, device, name)
out = __salt__['cmd.run_all'](cmd, runas=user)
if out['retcode']:
return out['stderr']
return True
# Mount a filesystem that isn't already
return mount(name, device, mkmnt, fstype, opts, user=user)
def umount(name, device=None, user=None):
'''
Attempt to unmount a device by specifying the directory it is mounted on
CLI Example:
.. code-block:: bash
salt '*' mount.umount /mnt/foo
.. versionadded:: Lithium
salt '*' mount.umount /mnt/foo /dev/xvdc1
'''
mnts = active()
if name not in mnts:
return "{0} does not have anything mounted".format(name)
if not device:
cmd = 'umount {0}'.format(name)
else:
cmd = 'umount {0}'.format(device)
out = __salt__['cmd.run_all'](cmd, runas=user)
if out['retcode']:
return out['stderr']
return True
def is_fuse_exec(cmd):
'''
Returns true if the command passed is a fuse mountable application.
CLI Example:
.. code-block:: bash
salt '*' mount.is_fuse_exec sshfs
'''
cmd_path = _which(cmd)
# No point in running ldd on a command that doesn't exist
if not cmd_path:
return False
elif not _which('ldd'):
raise CommandNotFoundError('ldd')
out = __salt__['cmd.run']('ldd {0}'.format(cmd_path))
return 'libfuse' in out
def swaps():
'''
Return a dict containing information on active swap
CLI Example:
.. code-block:: bash
salt '*' mount.swaps
'''
ret = {}
if __grains__['os'] != 'OpenBSD':
with salt.utils.fopen('/proc/swaps') as fp_:
for line in fp_:
if line.startswith('Filename'):
continue
comps = line.split()
ret[comps[0]] = {'type': comps[1],
'size': comps[2],
'used': comps[3],
'priority': comps[4]}
else:
for line in __salt__['cmd.run_stdout']('swapctl -kl').splitlines():
if line.startswith(('Device', 'Total')):
continue
swap_type = "file"
comps = line.split()
if comps[0].startswith('/dev/'):
swap_type = "partition"
ret[comps[0]] = {'type': swap_type,
'size': comps[1],
'used': comps[2],
'priority': comps[5]}
return ret
def swapon(name, priority=None):
'''
Activate a swap disk
CLI Example:
.. code-block:: bash
salt '*' mount.swapon /root/swapfile
'''
ret = {}
on_ = swaps()
if name in on_:
ret['stats'] = on_[name]
ret['new'] = False
return ret
cmd = 'swapon {0}'.format(name)
if priority:
cmd += ' -p {0}'.format(priority)
__salt__['cmd.run'](cmd)
on_ = swaps()
if name in on_:
ret['stats'] = on_[name]
ret['new'] = True
return ret
return ret
def swapoff(name):
'''
Deactivate a named swap mount
CLI Example:
.. code-block:: bash
salt '*' mount.swapoff /root/swapfile
'''
on_ = swaps()
if name in on_:
if __grains__['os'] != 'OpenBSD':
__salt__['cmd.run']('swapoff {0}'.format(name))
else:
__salt__['cmd.run']('swapctl -d {0}'.format(name))
on_ = swaps()
if name in on_:
return False
return True
return None
def is_mounted(name):
'''
.. versionadded:: 2014.7.0
Provide information if the path is mounted
CLI Example:
.. code-block:: bash
salt '*' mount.is_mounted /mnt/share
'''
active_ = active()
if name in active_:
return True
else:
return False
| [((391, 418), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (408, 418), False, 'import logging\n'), ((22597, 22608), 'salt.utils.which', '_which', (['cmd'], {}), '(cmd)\n', (22603, 22608), True, 'from salt.utils import which as _which\n'), ((1123, 1151), 'os.access', 'os.access', (['filename', 'os.R_OK'], {}), '(filename, os.R_OK)\n', (1132, 1151), False, 'import os\n'), ((2440, 2468), 'os.access', 'os.access', (['filename', 'os.R_OK'], {}), '(filename, os.R_OK)\n', (2449, 2468), False, 'import os\n'), ((4000, 4042), 're.findall', 're.findall', (['"""\\\\((.*?)\\\\)"""', 'line', 're.DOTALL'], {}), "('\\\\((.*?)\\\\)', line, re.DOTALL)\n", (4010, 4042), False, 'import re\n'), ((5672, 5694), 'os.path.isfile', 'os.path.isfile', (['config'], {}), '(config)\n', (5686, 5694), False, 'import os\n'), ((8551, 8573), 'os.path.isfile', 'os.path.isfile', (['config'], {}), '(config)\n', (8565, 8573), False, 'import os\n'), ((14923, 14945), 'os.path.isfile', 'os.path.isfile', (['config'], {}), '(config)\n', (14937, 14945), False, 'import os\n'), ((18576, 18598), 'os.path.isfile', 'os.path.isfile', (['config'], {}), '(config)\n', (18590, 18598), False, 'import os\n'), ((19762, 19782), 'os.path.exists', 'os.path.exists', (['name'], {}), '(name)\n', (19776, 19782), False, 'import os\n'), ((22727, 22740), 'salt.utils.which', '_which', (['"""ldd"""'], {}), "('ldd')\n", (22733, 22740), True, 'from salt.utils import which as _which\n'), ((22756, 22783), 'salt.exceptions.CommandNotFoundError', 'CommandNotFoundError', (['"""ldd"""'], {}), "('ldd')\n", (22776, 22783), False, 'from salt.exceptions import CommandNotFoundError, CommandExecutionError\n'), ((935, 960), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', 'line'], {}), "('\\\\s+', ' ', line)\n", (941, 960), False, 'import re\n'), ((3091, 3116), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', 'line'], {}), "('\\\\s+', ' ', line)\n", (3097, 3116), False, 'import re\n'), ((3461, 3486), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', 'line'], {}), "('\\\\s+', ' ', line)\n", (3467, 3486), False, 'import re\n'), ((3831, 3856), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', 'line'], {}), "('\\\\s+', ' ', line)\n", (3837, 3856), False, 'import re\n'), ((4531, 4556), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', 'line'], {}), "('\\\\s+', ' ', line)\n", (4537, 4556), False, 'import re\n'), ((4582, 4624), 're.findall', 're.findall', (['"""\\\\((.*?)\\\\)"""', 'line', 're.DOTALL'], {}), "('\\\\((.*?)\\\\)', line, re.DOTALL)\n", (4592, 4624), False, 'import re\n')] |
ExpertOfNone/expert_of_none | base/admin.py | 9ff4e4279a570712766546122c014c754f753485 | from django.contrib import admin
from base.models import Topic, Photo
class EONBaseAdmin(admin.ModelAdmin):
def get_changeform_initial_data(self, request):
initial = super().get_changeform_initial_data(request)
if 'add' in request.META['PATH_INFO']:
initial['created_by'] = request.user
initial['modified_by'] = request.user
return initial
def save_model(self, request, obj, form, change):
if not obj.created_by:
obj.created_by = request.user
return super().save_model(request, obj, form, change)
class TopicAdmin(EONBaseAdmin):
list_display = [
'name', 'parent_topic', 'top_level', 'modified_by', 'modified', 'created_by', 'created',
]
class PhotoAdmin(EONBaseAdmin):
# TODO Add Proper List Display
pass
admin.site.register(Topic, TopicAdmin)
admin.site.register(Photo, PhotoAdmin)
| [((829, 867), 'django.contrib.admin.site.register', 'admin.site.register', (['Topic', 'TopicAdmin'], {}), '(Topic, TopicAdmin)\n', (848, 867), False, 'from django.contrib import admin\n'), ((868, 906), 'django.contrib.admin.site.register', 'admin.site.register', (['Photo', 'PhotoAdmin'], {}), '(Photo, PhotoAdmin)\n', (887, 906), False, 'from django.contrib import admin\n')] |
z1digitalstudio/met | met/metadataparser/models/entity_type.py | 7840e7520bb4c3cb0328d5988468eefe6639f950 | #################################################################
# MET v2 Metadate Explorer Tool
#
# This Software is Open Source. See License: https://github.com/TERENA/met/blob/master/LICENSE.md
# Copyright (c) 2012, TERENA All rights reserved.
#
# This Software is based on MET v1 developed for TERENA by Yaco Sistemas, http://www.yaco.es/
# MET v2 was developed for TERENA by Tamim Ziai, DAASI International GmbH, http://www.daasi.de
# Current version of MET has been revised for performance improvements by Andrea Biancini,
# Consortium GARR, http://www.garr.it
##########################################################################
from django.db import models
from django.utils.translation import ugettext_lazy as _
class EntityType(models.Model):
"""
Model describing the type of an entity.
"""
name = models.CharField(blank=False, max_length=20, unique=True,
verbose_name=_(u'Name'), db_index=True)
xmlname = models.CharField(blank=False, max_length=20, unique=True,
verbose_name=_(u'Name in XML'), db_index=True)
def __unicode__(self):
return self.name
| [((934, 944), 'django.utils.translation.ugettext_lazy', '_', (['u"""Name"""'], {}), "(u'Name')\n", (935, 944), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1078, 1095), 'django.utils.translation.ugettext_lazy', '_', (['u"""Name in XML"""'], {}), "(u'Name in XML')\n", (1079, 1095), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
daimajia/wxpy | wxpy/bot.py | 2b56fb67b9ccb072538fd778a27a8fef8d9c93e6 | import traceback
from pprint import pformat
from threading import Thread
import itchat
import logging
from wxpy.chat import Chat
from wxpy.chats import Chats
from wxpy.friend import Friend
from wxpy.group import Group
from wxpy.message import MessageConfigs, Messages, Message, MessageConfig
from wxpy.mp import MP
from wxpy.response import ResponseError
from wxpy.user import User
from wxpy.utils.constants import SYSTEM
from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list
logger = logging.getLogger('wxpy')
class Robot(object):
"""
机器人对象,用于登陆和操作微信账号,涵盖大部分 Web 微信的功能
"""
def __init__(
self, save_path=None, console_qr=False, qr_path=None,
qr_callback=None, login_callback=None, logout_callback=None
):
"""
:param save_path:
| 用于保存或载入登陆状态的文件路径,例如: 'wxpy.pkl',为空则不尝试载入。
| 填写本参数后,可在短时间内重新载入登陆状态,避免重复扫码,失效时会重新要求登陆
:param console_qr: 在终端中显示登陆二维码,需要安装 Pillow 模块
:param qr_path: 保存二维码的路径
:param qr_callback: 获得二维码时的回调,接收参数: uuid, status, qrcode
:param login_callback: 登陆时的回调,接收参数同上
:param logout_callback: 登出时的回调,接收参数同上
"""
self.core = itchat.Core()
itchat.instanceList.append(self)
self.core.auto_login(
hotReload=bool(save_path), statusStorageDir=save_path,
enableCmdQR=console_qr, picDir=qr_path, qrCallback=qr_callback,
loginCallback=login_callback, exitCallback=logout_callback
)
self.message_configs = MessageConfigs(self)
self.messages = Messages(robot=self)
self.file_helper = Chat(wrap_user_name('filehelper'))
self.file_helper.robot = self
self.file_helper.nick_name = '文件传输助手'
self.self = Chat(self.core.loginInfo['User'])
self.self.robot = self
self.save_path = save_path
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.self.name)
@handle_response()
def logout(self):
"""
登出当前账号
"""
return self.core.logout()
@property
def alive(self):
"""
当前的登陆状态
:return: 若为登陆状态,则为 True,否则为 False
"""
return self.core.alive
@alive.setter
def alive(self, value):
self.core.alive = value
def dump_login_status(self, save_path=None):
return self.core.dump_login_status(save_path or self.save_path)
# chats
def except_self(self, chats_or_dicts):
"""
从聊天对象合集或用户字典列表中排除自身
:param chats_or_dicts: 聊天对象合集或用户字典列表
:return: 排除自身后的列表
"""
return list(filter(lambda x: get_user_name(x) != self.self.user_name, chats_or_dicts))
def chats(self, update=False):
"""
获取所有聊天对象
:param update: 是否更新
:return: 聊天对象合集
"""
return Chats(self.friends(update) + self.groups(update) + self.mps(update), self)
def friends(self, update=False):
"""
获取所有好友
:param update: 是否更新
:return: 聊天对象合集
"""
@handle_response(Friend)
def do():
return self.core.get_friends(update=update)
ret = do()
ret.source = self
return ret
@handle_response(Group)
def groups(self, update=False, contact_only=False):
"""
获取所有群聊
:param update: 是否更新
:param contact_only: 是否限于保存为联系人的群聊
:return: 群聊合集
"""
return self.core.get_chatrooms(update=update, contactOnly=contact_only)
@handle_response(MP)
def mps(self, update=False):
"""
获取所有公众号
:param update: 是否更新
:return: 聊天对象合集
"""
return self.core.get_mps(update=update)
@handle_response(User)
def user_details(self, user_or_users, chunk_size=50):
"""
获取单个或批量获取多个用户的详细信息(地区、性别、签名等),但不可用于群聊成员
:param user_or_users: 单个或多个用户对象或 user_name
:param chunk_size: 分配请求时的单批数量,目前为 50
:return: 单个或多个用户用户的详细信息
"""
def chunks():
total = ensure_list(user_or_users)
for i in range(0, len(total), chunk_size):
yield total[i:i + chunk_size]
@handle_response()
def process_one_chunk(_chunk):
return self.core.update_friend(userName=get_user_name(_chunk))
if isinstance(user_or_users, (list, tuple)):
ret = list()
for chunk in chunks():
chunk_ret = process_one_chunk(chunk)
if isinstance(chunk_ret, list):
ret += chunk_ret
else:
ret.append(chunk_ret)
return ret
else:
return process_one_chunk(user_or_users)
def search(self, name=None, **attributes):
"""
在所有类型的聊天对象中进行搜索
:param name: 名称 (可以是昵称、备注等)
:param attributes: 属性键值对,键可以是 sex(性别), province(省份), city(城市) 等。例如可指定 province='广东'
:return: 匹配的聊天对象合集
"""
return self.chats().search(name, **attributes)
# add / create
@handle_response()
def add_friend(self, user, verify_content=''):
"""
添加用户为好友
:param user: 用户对象或用户名
:param verify_content: 验证说明信息
"""
return self.core.add_friend(
userName=get_user_name(user),
status=2,
verifyContent=verify_content,
autoUpdate=True
)
@handle_response()
def accept_friend(self, user, verify_content=''):
"""
接受用户为好友
:param user: 用户对象或用户名
:param verify_content: 验证说明信息
"""
# Todo: 验证好友接口可用性,并在接受好友时直接返回新好友
return self.core.add_friend(
userName=get_user_name(user),
status=3,
verifyContent=verify_content,
autoUpdate=True
)
def create_group(self, users, topic=None):
"""
创建一个新的群聊
:param users: 用户列表
:param topic: 群名称
:return: 若建群成功,返回一个新的群聊对象
"""
@handle_response()
def request():
return self.core.create_chatroom(
memberList=wrap_user_name(users),
topic=topic or ''
)
ret = request()
user_name = ret.get('ChatRoomName')
if user_name:
return Group(self.core.update_chatroom(userName=user_name))
else:
raise ResponseError('Failed to create group:\n{}'.format(pformat(ret)))
# messages
def _process_message(self, msg):
"""
处理接收到的消息
"""
if not self.alive:
return
func, run_async = self.message_configs.get_func(msg)
if not func:
return
def process():
# noinspection PyBroadException
try:
ret = func(msg)
if ret is not None:
if isinstance(ret, (tuple, list)):
self.core.send(
msg=str(ret[0]),
toUserName=msg.chat.user_name,
mediaId=ret[1]
)
else:
self.core.send(
msg=str(ret),
toUserName=msg.chat.user_name
)
except:
logger.warning(
'An error occurred in registered function, '
'use `Robot().start(debug=True)` to show detailed information')
logger.debug(traceback.format_exc())
if run_async:
Thread(target=process).start()
else:
process()
def register(
self, chats=None, msg_types=None,
except_self=True, run_async=True, enabled=True
):
"""
装饰器:用于注册消息配置
:param chats: 单个或列表形式的多个聊天对象或聊天类型,为空时匹配所有聊天对象
:param msg_types: 单个或列表形式的多个消息类型,为空时匹配所有消息类型 (SYSTEM 类消息除外)
:param except_self: 排除自己在手机上发送的消息
:param run_async: 异步执行配置的函数,可提高响应速度
:param enabled: 当前配置的默认开启状态,可事后动态开启或关闭
"""
def register(func):
self.message_configs.append(MessageConfig(
robot=self, func=func, chats=chats, msg_types=msg_types,
except_self=except_self, run_async=run_async, enabled=enabled
))
return func
return register
def start(self, block=True):
"""
开始监听和处理消息
:param block: 是否堵塞线程,为 False 时将在新的线程中运行
"""
def listen():
logger.info('{} Auto-reply started.'.format(self))
try:
while self.alive:
msg = Message(self.core.msgList.get(), self)
if msg.type is not SYSTEM:
self.messages.append(msg)
self._process_message(msg)
except KeyboardInterrupt:
logger.info('KeyboardInterrupt received, ending...')
self.alive = False
if self.core.useHotReload:
self.dump_login_status()
logger.info('Bye.')
if block:
listen()
else:
t = Thread(target=listen, daemon=True)
t.start()
| [((523, 548), 'logging.getLogger', 'logging.getLogger', (['"""wxpy"""'], {}), "('wxpy')\n", (540, 548), False, 'import logging\n'), ((1995, 2012), 'wxpy.utils.tools.handle_response', 'handle_response', ([], {}), '()\n', (2010, 2012), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((3267, 3289), 'wxpy.utils.tools.handle_response', 'handle_response', (['Group'], {}), '(Group)\n', (3282, 3289), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((3565, 3584), 'wxpy.utils.tools.handle_response', 'handle_response', (['MP'], {}), '(MP)\n', (3580, 3584), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((3765, 3786), 'wxpy.utils.tools.handle_response', 'handle_response', (['User'], {}), '(User)\n', (3780, 3786), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((5098, 5115), 'wxpy.utils.tools.handle_response', 'handle_response', ([], {}), '()\n', (5113, 5115), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((5463, 5480), 'wxpy.utils.tools.handle_response', 'handle_response', ([], {}), '()\n', (5478, 5480), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((1213, 1226), 'itchat.Core', 'itchat.Core', ([], {}), '()\n', (1224, 1226), False, 'import itchat\n'), ((1235, 1267), 'itchat.instanceList.append', 'itchat.instanceList.append', (['self'], {}), '(self)\n', (1261, 1267), False, 'import itchat\n'), ((1555, 1575), 'wxpy.message.MessageConfigs', 'MessageConfigs', (['self'], {}), '(self)\n', (1569, 1575), False, 'from wxpy.message import MessageConfigs, Messages, Message, MessageConfig\n'), ((1600, 1620), 'wxpy.message.Messages', 'Messages', ([], {'robot': 'self'}), '(robot=self)\n', (1608, 1620), False, 'from wxpy.message import MessageConfigs, Messages, Message, MessageConfig\n'), ((1789, 1822), 'wxpy.chat.Chat', 'Chat', (["self.core.loginInfo['User']"], {}), "(self.core.loginInfo['User'])\n", (1793, 1822), False, 'from wxpy.chat import Chat\n'), ((3097, 3120), 'wxpy.utils.tools.handle_response', 'handle_response', (['Friend'], {}), '(Friend)\n', (3112, 3120), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((4227, 4244), 'wxpy.utils.tools.handle_response', 'handle_response', ([], {}), '()\n', (4242, 4244), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((6055, 6072), 'wxpy.utils.tools.handle_response', 'handle_response', ([], {}), '()\n', (6070, 6072), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((1654, 1682), 'wxpy.utils.tools.wrap_user_name', 'wrap_user_name', (['"""filehelper"""'], {}), "('filehelper')\n", (1668, 1682), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((4089, 4115), 'wxpy.utils.tools.ensure_list', 'ensure_list', (['user_or_users'], {}), '(user_or_users)\n', (4100, 4115), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((9255, 9289), 'threading.Thread', 'Thread', ([], {'target': 'listen', 'daemon': '(True)'}), '(target=listen, daemon=True)\n', (9261, 9289), False, 'from threading import Thread\n'), ((5334, 5353), 'wxpy.utils.tools.get_user_name', 'get_user_name', (['user'], {}), '(user)\n', (5347, 5353), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((5745, 5764), 'wxpy.utils.tools.get_user_name', 'get_user_name', (['user'], {}), '(user)\n', (5758, 5764), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((8216, 8353), 'wxpy.message.MessageConfig', 'MessageConfig', ([], {'robot': 'self', 'func': 'func', 'chats': 'chats', 'msg_types': 'msg_types', 'except_self': 'except_self', 'run_async': 'run_async', 'enabled': 'enabled'}), '(robot=self, func=func, chats=chats, msg_types=msg_types,\n except_self=except_self, run_async=run_async, enabled=enabled)\n', (8229, 8353), False, 'from wxpy.message import MessageConfigs, Messages, Message, MessageConfig\n'), ((4336, 4357), 'wxpy.utils.tools.get_user_name', 'get_user_name', (['_chunk'], {}), '(_chunk)\n', (4349, 4357), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((6169, 6190), 'wxpy.utils.tools.wrap_user_name', 'wrap_user_name', (['users'], {}), '(users)\n', (6183, 6190), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((6486, 6498), 'pprint.pformat', 'pformat', (['ret'], {}), '(ret)\n', (6493, 6498), False, 'from pprint import pformat\n'), ((7648, 7670), 'threading.Thread', 'Thread', ([], {'target': 'process'}), '(target=process)\n', (7654, 7670), False, 'from threading import Thread\n'), ((2679, 2695), 'wxpy.utils.tools.get_user_name', 'get_user_name', (['x'], {}), '(x)\n', (2692, 2695), False, 'from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list\n'), ((7589, 7611), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (7609, 7611), False, 'import traceback\n')] |
HPLegion/glue | glue/__init__.py | 1843787ccb4de852dfe103ff58473da13faccf5f | # Set up configuration variables
__all__ = ['custom_viewer', 'qglue', 'test']
import os
import sys
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution('glue-core').version
except DistributionNotFound:
__version__ = 'undefined'
from ._mpl_backend import MatplotlibBackendSetter
sys.meta_path.append(MatplotlibBackendSetter())
from glue.viewers.custom.helper import custom_viewer
# Load user's configuration file
from .config import load_configuration
env = load_configuration()
from .qglue import qglue
from .main import load_plugins # noqa
def test(no_optional_skip=False):
from pytest import main
root = os.path.abspath(os.path.dirname(__file__))
args = [root, '-x']
if no_optional_skip:
args.append('--no-optional-skip')
return main(args=args)
from glue._settings_helpers import load_settings
load_settings()
# In PyQt 5.5+, PyQt overrides the default exception catching and fatally
# crashes the Qt application without printing out any details about the error.
# Below we revert the exception hook to the original Python one. Note that we
# can't just do sys.excepthook = sys.__excepthook__ otherwise PyQt will detect
# the default excepthook is in place and override it.
def handle_exception(exc_type, exc_value, exc_traceback):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
sys.excepthook = handle_exception
| [((895, 910), 'glue._settings_helpers.load_settings', 'load_settings', ([], {}), '()\n', (908, 910), False, 'from glue._settings_helpers import load_settings\n'), ((828, 843), 'pytest.main', 'main', ([], {'args': 'args'}), '(args=args)\n', (832, 843), False, 'from pytest import main\n'), ((1341, 1395), 'sys.__excepthook__', 'sys.__excepthook__', (['exc_type', 'exc_value', 'exc_traceback'], {}), '(exc_type, exc_value, exc_traceback)\n', (1359, 1395), False, 'import sys\n'), ((192, 221), 'pkg_resources.get_distribution', 'get_distribution', (['"""glue-core"""'], {}), "('glue-core')\n", (208, 221), False, 'from pkg_resources import get_distribution, DistributionNotFound\n'), ((699, 724), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (714, 724), False, 'import os\n')] |
pran01/AlgoVision | run.py | 40e85f3c55266f43ee103dfa0852a63af306a8d4 | from algovision import app
if(__name__=="__main__"):
app.run(debug=True,host='0.0.0.0')
| [((58, 93), 'algovision.app.run', 'app.run', ([], {'debug': '(True)', 'host': '"""0.0.0.0"""'}), "(debug=True, host='0.0.0.0')\n", (65, 93), False, 'from algovision import app\n')] |
rffontenelle/readthedocs.org | readthedocs/settings/proxito/base.py | a7a9072215551156b9ddc22280cc085944eaa4b0 | """
Base settings for Proxito
Some of these settings will eventually be backported into the main settings file,
but currently we have them to be able to run the site with the old middleware for
a staged rollout of the proxito code.
"""
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
SECURE_REFERRER_POLICY = "no-referrer-when-downgrade"
# Allow cookies from cross-site requests on subdomains for now.
# As 'Lax' breaks when the page is embedded in an iframe.
SESSION_COOKIE_SAMESITE = None
@property
def DATABASES(self):
# This keeps connections to the DB alive,
# which reduces latency with connecting to postgres
dbs = getattr(super(), 'DATABASES', {})
for db in dbs:
dbs[db]['CONN_MAX_AGE'] = 86400
return dbs
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
classes.append('readthedocs.proxito.middleware.ProxitoMiddleware')
middleware_to_remove = (
'csp.middleware.CSPMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
else:
log.warning('Failed to remove middleware: %s', mw)
return classes
| [] |
jessica-tu/jupyter | model_selection/tests/test_search.py | 917e02bc29e0fa06bd8adb25fe5388ac381ec829 | """Test the search module"""
from collections.abc import Iterable, Sized
from io import StringIO
from itertools import chain, product
from functools import partial
import pickle
import sys
from types import GeneratorType
import re
import numpy as np
import scipy.sparse as sp
import pytest
from sklearn.utils.fixes import sp_version
from sklearn.utils._testing import assert_raises
from sklearn.utils._testing import assert_warns
from sklearn.utils._testing import assert_warns_message
from sklearn.utils._testing import assert_raise_message
from sklearn.utils._testing import assert_array_equal
from sklearn.utils._testing import assert_array_almost_equal
from sklearn.utils._testing import assert_allclose
from sklearn.utils._testing import assert_almost_equal
from sklearn.utils._testing import ignore_warnings
from sklearn.utils._mocking import CheckingClassifier, MockDataFrame
from scipy.stats import bernoulli, expon, uniform
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.base import clone
from sklearn.exceptions import NotFittedError
from sklearn.datasets import make_classification
from sklearn.datasets import make_blobs
from sklearn.datasets import make_multilabel_classification
from sklearn.model_selection import fit_grid_point
from sklearn.model_selection import train_test_split
from sklearn.model_selection import KFold
from sklearn.model_selection import StratifiedKFold
from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.model_selection import LeaveOneGroupOut
from sklearn.model_selection import LeavePGroupsOut
from sklearn.model_selection import GroupKFold
from sklearn.model_selection import GroupShuffleSplit
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import RandomizedSearchCV
from sklearn.model_selection import ParameterGrid
from sklearn.model_selection import ParameterSampler
from sklearn.model_selection._search import BaseSearchCV
from sklearn.model_selection._validation import FitFailedWarning
from sklearn.svm import LinearSVC, SVC
from sklearn.tree import DecisionTreeRegressor
from sklearn.tree import DecisionTreeClassifier
from sklearn.cluster import KMeans
from sklearn.neighbors import KernelDensity
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import f1_score
from sklearn.metrics import recall_score
from sklearn.metrics import accuracy_score
from sklearn.metrics import make_scorer
from sklearn.metrics import roc_auc_score
from sklearn.metrics.pairwise import euclidean_distances
from sklearn.impute import SimpleImputer
from sklearn.pipeline import Pipeline
from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression
from sklearn.experimental import enable_hist_gradient_boosting # noqa
from sklearn.ensemble import HistGradientBoostingClassifier
from sklearn.model_selection.tests.common import OneTimeSplitter
# Neither of the following two estimators inherit from BaseEstimator,
# to test hyperparameter search on user-defined classifiers.
class MockClassifier:
"""Dummy classifier to test the parameter search algorithms"""
def __init__(self, foo_param=0):
self.foo_param = foo_param
def fit(self, X, Y):
assert len(X) == len(Y)
self.classes_ = np.unique(Y)
return self
def predict(self, T):
return T.shape[0]
def transform(self, X):
return X + self.foo_param
def inverse_transform(self, X):
return X - self.foo_param
predict_proba = predict
predict_log_proba = predict
decision_function = predict
def score(self, X=None, Y=None):
if self.foo_param > 1:
score = 1.
else:
score = 0.
return score
def get_params(self, deep=False):
return {'foo_param': self.foo_param}
def set_params(self, **params):
self.foo_param = params['foo_param']
return self
class LinearSVCNoScore(LinearSVC):
"""An LinearSVC classifier that has no score method."""
@property
def score(self):
raise AttributeError
X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
y = np.array([1, 1, 2, 2])
def assert_grid_iter_equals_getitem(grid):
assert list(grid) == [grid[i] for i in range(len(grid))]
@pytest.mark.parametrize("klass", [ParameterGrid,
partial(ParameterSampler, n_iter=10)])
@pytest.mark.parametrize(
"input, error_type, error_message",
[(0, TypeError, r'Parameter .* is not a dict or a list \(0\)'),
([{'foo': [0]}, 0], TypeError, r'Parameter .* is not a dict \(0\)'),
({'foo': 0}, TypeError, "Parameter.* value is not iterable .*"
r"\(key='foo', value=0\)")]
)
def test_validate_parameter_input(klass, input, error_type, error_message):
with pytest.raises(error_type, match=error_message):
klass(input)
def test_parameter_grid():
# Test basic properties of ParameterGrid.
params1 = {"foo": [1, 2, 3]}
grid1 = ParameterGrid(params1)
assert isinstance(grid1, Iterable)
assert isinstance(grid1, Sized)
assert len(grid1) == 3
assert_grid_iter_equals_getitem(grid1)
params2 = {"foo": [4, 2],
"bar": ["ham", "spam", "eggs"]}
grid2 = ParameterGrid(params2)
assert len(grid2) == 6
# loop to assert we can iterate over the grid multiple times
for i in range(2):
# tuple + chain transforms {"a": 1, "b": 2} to ("a", 1, "b", 2)
points = set(tuple(chain(*(sorted(p.items())))) for p in grid2)
assert (points ==
set(("bar", x, "foo", y)
for x, y in product(params2["bar"], params2["foo"])))
assert_grid_iter_equals_getitem(grid2)
# Special case: empty grid (useful to get default estimator settings)
empty = ParameterGrid({})
assert len(empty) == 1
assert list(empty) == [{}]
assert_grid_iter_equals_getitem(empty)
assert_raises(IndexError, lambda: empty[1])
has_empty = ParameterGrid([{'C': [1, 10]}, {}, {'C': [.5]}])
assert len(has_empty) == 4
assert list(has_empty) == [{'C': 1}, {'C': 10}, {}, {'C': .5}]
assert_grid_iter_equals_getitem(has_empty)
def test_grid_search():
# Test that the best estimator contains the right value for foo_param
clf = MockClassifier()
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, cv=3, verbose=3)
# make sure it selects the smallest parameter in case of ties
old_stdout = sys.stdout
sys.stdout = StringIO()
grid_search.fit(X, y)
sys.stdout = old_stdout
assert grid_search.best_estimator_.foo_param == 2
assert_array_equal(grid_search.cv_results_["param_foo_param"].data,
[1, 2, 3])
# Smoke test the score etc:
grid_search.score(X, y)
grid_search.predict_proba(X)
grid_search.decision_function(X)
grid_search.transform(X)
# Test exception handling on scoring
grid_search.scoring = 'sklearn'
assert_raises(ValueError, grid_search.fit, X, y)
def test_grid_search_pipeline_steps():
# check that parameters that are estimators are cloned before fitting
pipe = Pipeline([('regressor', LinearRegression())])
param_grid = {'regressor': [LinearRegression(), Ridge()]}
grid_search = GridSearchCV(pipe, param_grid, cv=2)
grid_search.fit(X, y)
regressor_results = grid_search.cv_results_['param_regressor']
assert isinstance(regressor_results[0], LinearRegression)
assert isinstance(regressor_results[1], Ridge)
assert not hasattr(regressor_results[0], 'coef_')
assert not hasattr(regressor_results[1], 'coef_')
assert regressor_results[0] is not grid_search.best_estimator_
assert regressor_results[1] is not grid_search.best_estimator_
# check that we didn't modify the parameter grid that was passed
assert not hasattr(param_grid['regressor'][0], 'coef_')
assert not hasattr(param_grid['regressor'][1], 'coef_')
@pytest.mark.parametrize("SearchCV", [GridSearchCV, RandomizedSearchCV])
def test_SearchCV_with_fit_params(SearchCV):
X = np.arange(100).reshape(10, 10)
y = np.array([0] * 5 + [1] * 5)
clf = CheckingClassifier(expected_fit_params=['spam', 'eggs'])
searcher = SearchCV(
clf, {'foo_param': [1, 2, 3]}, cv=2, error_score="raise"
)
# The CheckingClassifier generates an assertion error if
# a parameter is missing or has length != len(X).
err_msg = r"Expected fit parameter\(s\) \['eggs'\] not seen."
with pytest.raises(AssertionError, match=err_msg):
searcher.fit(X, y, spam=np.ones(10))
err_msg = "Fit parameter spam has length 1; expected"
with pytest.raises(AssertionError, match=err_msg):
searcher.fit(X, y, spam=np.ones(1), eggs=np.zeros(10))
searcher.fit(X, y, spam=np.ones(10), eggs=np.zeros(10))
@ignore_warnings
def test_grid_search_no_score():
# Test grid-search on classifier that has no score function.
clf = LinearSVC(random_state=0)
X, y = make_blobs(random_state=0, centers=2)
Cs = [.1, 1, 10]
clf_no_score = LinearSVCNoScore(random_state=0)
grid_search = GridSearchCV(clf, {'C': Cs}, scoring='accuracy')
grid_search.fit(X, y)
grid_search_no_score = GridSearchCV(clf_no_score, {'C': Cs},
scoring='accuracy')
# smoketest grid search
grid_search_no_score.fit(X, y)
# check that best params are equal
assert grid_search_no_score.best_params_ == grid_search.best_params_
# check that we can call score and that it gives the correct result
assert grid_search.score(X, y) == grid_search_no_score.score(X, y)
# giving no scoring function raises an error
grid_search_no_score = GridSearchCV(clf_no_score, {'C': Cs})
assert_raise_message(TypeError, "no scoring", grid_search_no_score.fit,
[[1]])
def test_grid_search_score_method():
X, y = make_classification(n_samples=100, n_classes=2, flip_y=.2,
random_state=0)
clf = LinearSVC(random_state=0)
grid = {'C': [.1]}
search_no_scoring = GridSearchCV(clf, grid, scoring=None).fit(X, y)
search_accuracy = GridSearchCV(clf, grid, scoring='accuracy').fit(X, y)
search_no_score_method_auc = GridSearchCV(LinearSVCNoScore(), grid,
scoring='roc_auc'
).fit(X, y)
search_auc = GridSearchCV(clf, grid, scoring='roc_auc').fit(X, y)
# Check warning only occurs in situation where behavior changed:
# estimator requires score method to compete with scoring parameter
score_no_scoring = search_no_scoring.score(X, y)
score_accuracy = search_accuracy.score(X, y)
score_no_score_auc = search_no_score_method_auc.score(X, y)
score_auc = search_auc.score(X, y)
# ensure the test is sane
assert score_auc < 1.0
assert score_accuracy < 1.0
assert score_auc != score_accuracy
assert_almost_equal(score_accuracy, score_no_scoring)
assert_almost_equal(score_auc, score_no_score_auc)
def test_grid_search_groups():
# Check if ValueError (when groups is None) propagates to GridSearchCV
# And also check if groups is correctly passed to the cv object
rng = np.random.RandomState(0)
X, y = make_classification(n_samples=15, n_classes=2, random_state=0)
groups = rng.randint(0, 3, 15)
clf = LinearSVC(random_state=0)
grid = {'C': [1]}
group_cvs = [LeaveOneGroupOut(), LeavePGroupsOut(2),
GroupKFold(n_splits=3), GroupShuffleSplit()]
for cv in group_cvs:
gs = GridSearchCV(clf, grid, cv=cv)
assert_raise_message(ValueError,
"The 'groups' parameter should not be None.",
gs.fit, X, y)
gs.fit(X, y, groups=groups)
non_group_cvs = [StratifiedKFold(), StratifiedShuffleSplit()]
for cv in non_group_cvs:
gs = GridSearchCV(clf, grid, cv=cv)
# Should not raise an error
gs.fit(X, y)
def test_classes__property():
# Test that classes_ property matches best_estimator_.classes_
X = np.arange(100).reshape(10, 10)
y = np.array([0] * 5 + [1] * 5)
Cs = [.1, 1, 10]
grid_search = GridSearchCV(LinearSVC(random_state=0), {'C': Cs})
grid_search.fit(X, y)
assert_array_equal(grid_search.best_estimator_.classes_,
grid_search.classes_)
# Test that regressors do not have a classes_ attribute
grid_search = GridSearchCV(Ridge(), {'alpha': [1.0, 2.0]})
grid_search.fit(X, y)
assert not hasattr(grid_search, 'classes_')
# Test that the grid searcher has no classes_ attribute before it's fit
grid_search = GridSearchCV(LinearSVC(random_state=0), {'C': Cs})
assert not hasattr(grid_search, 'classes_')
# Test that the grid searcher has no classes_ attribute without a refit
grid_search = GridSearchCV(LinearSVC(random_state=0),
{'C': Cs}, refit=False)
grid_search.fit(X, y)
assert not hasattr(grid_search, 'classes_')
def test_trivial_cv_results_attr():
# Test search over a "grid" with only one point.
clf = MockClassifier()
grid_search = GridSearchCV(clf, {'foo_param': [1]}, cv=3)
grid_search.fit(X, y)
assert hasattr(grid_search, "cv_results_")
random_search = RandomizedSearchCV(clf, {'foo_param': [0]}, n_iter=1, cv=3)
random_search.fit(X, y)
assert hasattr(grid_search, "cv_results_")
def test_no_refit():
# Test that GSCV can be used for model selection alone without refitting
clf = MockClassifier()
for scoring in [None, ['accuracy', 'precision']]:
grid_search = GridSearchCV(
clf, {'foo_param': [1, 2, 3]}, refit=False, cv=3
)
grid_search.fit(X, y)
assert not hasattr(grid_search, "best_estimator_") and \
hasattr(grid_search, "best_index_") and \
hasattr(grid_search, "best_params_")
# Make sure the functions predict/transform etc raise meaningful
# error messages
for fn_name in ('predict', 'predict_proba', 'predict_log_proba',
'transform', 'inverse_transform'):
assert_raise_message(NotFittedError,
('refit=False. %s is available only after '
'refitting on the best parameters'
% fn_name), getattr(grid_search, fn_name), X)
# Test that an invalid refit param raises appropriate error messages
for refit in ["", 5, True, 'recall', 'accuracy']:
assert_raise_message(ValueError, "For multi-metric scoring, the "
"parameter refit must be set to a scorer key",
GridSearchCV(clf, {}, refit=refit,
scoring={'acc': 'accuracy',
'prec': 'precision'}
).fit,
X, y)
def test_grid_search_error():
# Test that grid search will capture errors on data with different length
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
clf = LinearSVC()
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
assert_raises(ValueError, cv.fit, X_[:180], y_)
def test_grid_search_one_grid_point():
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
param_dict = {"C": [1.0], "kernel": ["rbf"], "gamma": [0.1]}
clf = SVC(gamma='auto')
cv = GridSearchCV(clf, param_dict)
cv.fit(X_, y_)
clf = SVC(C=1.0, kernel="rbf", gamma=0.1)
clf.fit(X_, y_)
assert_array_equal(clf.dual_coef_, cv.best_estimator_.dual_coef_)
def test_grid_search_when_param_grid_includes_range():
# Test that the best estimator contains the right value for foo_param
clf = MockClassifier()
grid_search = None
grid_search = GridSearchCV(clf, {'foo_param': range(1, 4)}, cv=3)
grid_search.fit(X, y)
assert grid_search.best_estimator_.foo_param == 2
def test_grid_search_bad_param_grid():
param_dict = {"C": 1}
clf = SVC(gamma='auto')
assert_raise_message(
ValueError,
"Parameter grid for parameter (C) needs to"
" be a list or numpy array, but got (<class 'int'>)."
" Single values need to be wrapped in a list"
" with one element.",
GridSearchCV, clf, param_dict)
param_dict = {"C": []}
clf = SVC()
assert_raise_message(
ValueError,
"Parameter values for parameter (C) need to be a non-empty sequence.",
GridSearchCV, clf, param_dict)
param_dict = {"C": "1,2,3"}
clf = SVC(gamma='auto')
assert_raise_message(
ValueError,
"Parameter grid for parameter (C) needs to"
" be a list or numpy array, but got (<class 'str'>)."
" Single values need to be wrapped in a list"
" with one element.",
GridSearchCV, clf, param_dict)
param_dict = {"C": np.ones((3, 2))}
clf = SVC()
assert_raises(ValueError, GridSearchCV, clf, param_dict)
def test_grid_search_sparse():
# Test that grid search works with both dense and sparse matrices
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
clf = LinearSVC()
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
cv.fit(X_[:180], y_[:180])
y_pred = cv.predict(X_[180:])
C = cv.best_estimator_.C
X_ = sp.csr_matrix(X_)
clf = LinearSVC()
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
cv.fit(X_[:180].tocoo(), y_[:180])
y_pred2 = cv.predict(X_[180:])
C2 = cv.best_estimator_.C
assert np.mean(y_pred == y_pred2) >= .9
assert C == C2
def test_grid_search_sparse_scoring():
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
clf = LinearSVC()
cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, scoring="f1")
cv.fit(X_[:180], y_[:180])
y_pred = cv.predict(X_[180:])
C = cv.best_estimator_.C
X_ = sp.csr_matrix(X_)
clf = LinearSVC()
cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, scoring="f1")
cv.fit(X_[:180], y_[:180])
y_pred2 = cv.predict(X_[180:])
C2 = cv.best_estimator_.C
assert_array_equal(y_pred, y_pred2)
assert C == C2
# Smoke test the score
# np.testing.assert_allclose(f1_score(cv.predict(X_[:180]), y[:180]),
# cv.score(X_[:180], y[:180]))
# test loss where greater is worse
def f1_loss(y_true_, y_pred_):
return -f1_score(y_true_, y_pred_)
F1Loss = make_scorer(f1_loss, greater_is_better=False)
cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, scoring=F1Loss)
cv.fit(X_[:180], y_[:180])
y_pred3 = cv.predict(X_[180:])
C3 = cv.best_estimator_.C
assert C == C3
assert_array_equal(y_pred, y_pred3)
def test_grid_search_precomputed_kernel():
# Test that grid search works when the input features are given in the
# form of a precomputed kernel matrix
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
# compute the training kernel matrix corresponding to the linear kernel
K_train = np.dot(X_[:180], X_[:180].T)
y_train = y_[:180]
clf = SVC(kernel='precomputed')
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
cv.fit(K_train, y_train)
assert cv.best_score_ >= 0
# compute the test kernel matrix
K_test = np.dot(X_[180:], X_[:180].T)
y_test = y_[180:]
y_pred = cv.predict(K_test)
assert np.mean(y_pred == y_test) >= 0
# test error is raised when the precomputed kernel is not array-like
# or sparse
assert_raises(ValueError, cv.fit, K_train.tolist(), y_train)
def test_grid_search_precomputed_kernel_error_nonsquare():
# Test that grid search returns an error with a non-square precomputed
# training kernel matrix
K_train = np.zeros((10, 20))
y_train = np.ones((10, ))
clf = SVC(kernel='precomputed')
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
assert_raises(ValueError, cv.fit, K_train, y_train)
class BrokenClassifier(BaseEstimator):
"""Broken classifier that cannot be fit twice"""
def __init__(self, parameter=None):
self.parameter = parameter
def fit(self, X, y):
assert not hasattr(self, 'has_been_fit_')
self.has_been_fit_ = True
def predict(self, X):
return np.zeros(X.shape[0])
@ignore_warnings
def test_refit():
# Regression test for bug in refitting
# Simulates re-fitting a broken estimator; this used to break with
# sparse SVMs.
X = np.arange(100).reshape(10, 10)
y = np.array([0] * 5 + [1] * 5)
clf = GridSearchCV(BrokenClassifier(), [{'parameter': [0, 1]}],
scoring="precision", refit=True)
clf.fit(X, y)
def test_refit_callable():
"""
Test refit=callable, which adds flexibility in identifying the
"best" estimator.
"""
def refit_callable(cv_results):
"""
A dummy function tests `refit=callable` interface.
Return the index of a model that has the least
`mean_test_score`.
"""
# Fit a dummy clf with `refit=True` to get a list of keys in
# clf.cv_results_.
X, y = make_classification(n_samples=100, n_features=4,
random_state=42)
clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.01, 0.1, 1]},
scoring='precision', refit=True)
clf.fit(X, y)
# Ensure that `best_index_ != 0` for this dummy clf
assert clf.best_index_ != 0
# Assert every key matches those in `cv_results`
for key in clf.cv_results_.keys():
assert key in cv_results
return cv_results['mean_test_score'].argmin()
X, y = make_classification(n_samples=100, n_features=4,
random_state=42)
clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.01, 0.1, 1]},
scoring='precision', refit=refit_callable)
clf.fit(X, y)
assert clf.best_index_ == 0
# Ensure `best_score_` is disabled when using `refit=callable`
assert not hasattr(clf, 'best_score_')
def test_refit_callable_invalid_type():
"""
Test implementation catches the errors when 'best_index_' returns an
invalid result.
"""
def refit_callable_invalid_type(cv_results):
"""
A dummy function tests when returned 'best_index_' is not integer.
"""
return None
X, y = make_classification(n_samples=100, n_features=4,
random_state=42)
clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.1, 1]},
scoring='precision', refit=refit_callable_invalid_type)
with pytest.raises(TypeError,
match='best_index_ returned is not an integer'):
clf.fit(X, y)
@pytest.mark.parametrize('out_bound_value', [-1, 2])
@pytest.mark.parametrize('search_cv', [RandomizedSearchCV, GridSearchCV])
def test_refit_callable_out_bound(out_bound_value, search_cv):
"""
Test implementation catches the errors when 'best_index_' returns an
out of bound result.
"""
def refit_callable_out_bound(cv_results):
"""
A dummy function tests when returned 'best_index_' is out of bounds.
"""
return out_bound_value
X, y = make_classification(n_samples=100, n_features=4,
random_state=42)
clf = search_cv(LinearSVC(random_state=42), {'C': [0.1, 1]},
scoring='precision', refit=refit_callable_out_bound)
with pytest.raises(IndexError, match='best_index_ index out of range'):
clf.fit(X, y)
def test_refit_callable_multi_metric():
"""
Test refit=callable in multiple metric evaluation setting
"""
def refit_callable(cv_results):
"""
A dummy function tests `refit=callable` interface.
Return the index of a model that has the least
`mean_test_prec`.
"""
assert 'mean_test_prec' in cv_results
return cv_results['mean_test_prec'].argmin()
X, y = make_classification(n_samples=100, n_features=4,
random_state=42)
scoring = {'Accuracy': make_scorer(accuracy_score), 'prec': 'precision'}
clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.01, 0.1, 1]},
scoring=scoring, refit=refit_callable)
clf.fit(X, y)
assert clf.best_index_ == 0
# Ensure `best_score_` is disabled when using `refit=callable`
assert not hasattr(clf, 'best_score_')
def test_gridsearch_nd():
# Pass X as list in GridSearchCV
X_4d = np.arange(10 * 5 * 3 * 2).reshape(10, 5, 3, 2)
y_3d = np.arange(10 * 7 * 11).reshape(10, 7, 11)
check_X = lambda x: x.shape[1:] == (5, 3, 2)
check_y = lambda x: x.shape[1:] == (7, 11)
clf = CheckingClassifier(
check_X=check_X, check_y=check_y, methods_to_check=["fit"],
)
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]})
grid_search.fit(X_4d, y_3d).score(X, y)
assert hasattr(grid_search, "cv_results_")
def test_X_as_list():
# Pass X as list in GridSearchCV
X = np.arange(100).reshape(10, 10)
y = np.array([0] * 5 + [1] * 5)
clf = CheckingClassifier(
check_X=lambda x: isinstance(x, list), methods_to_check=["fit"],
)
cv = KFold(n_splits=3)
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, cv=cv)
grid_search.fit(X.tolist(), y).score(X, y)
assert hasattr(grid_search, "cv_results_")
def test_y_as_list():
# Pass y as list in GridSearchCV
X = np.arange(100).reshape(10, 10)
y = np.array([0] * 5 + [1] * 5)
clf = CheckingClassifier(
check_y=lambda x: isinstance(x, list), methods_to_check=["fit"],
)
cv = KFold(n_splits=3)
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, cv=cv)
grid_search.fit(X, y.tolist()).score(X, y)
assert hasattr(grid_search, "cv_results_")
@ignore_warnings
def test_pandas_input():
# check cross_val_score doesn't destroy pandas dataframe
types = [(MockDataFrame, MockDataFrame)]
try:
from pandas import Series, DataFrame
types.append((DataFrame, Series))
except ImportError:
pass
X = np.arange(100).reshape(10, 10)
y = np.array([0] * 5 + [1] * 5)
for InputFeatureType, TargetType in types:
# X dataframe, y series
X_df, y_ser = InputFeatureType(X), TargetType(y)
def check_df(x):
return isinstance(x, InputFeatureType)
def check_series(x):
return isinstance(x, TargetType)
clf = CheckingClassifier(check_X=check_df, check_y=check_series)
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]})
grid_search.fit(X_df, y_ser).score(X_df, y_ser)
grid_search.predict(X_df)
assert hasattr(grid_search, "cv_results_")
def test_unsupervised_grid_search():
# test grid-search with unsupervised estimator
X, y = make_blobs(n_samples=50, random_state=0)
km = KMeans(random_state=0, init="random", n_init=1)
# Multi-metric evaluation unsupervised
scoring = ['adjusted_rand_score', 'fowlkes_mallows_score']
for refit in ['adjusted_rand_score', 'fowlkes_mallows_score']:
grid_search = GridSearchCV(km, param_grid=dict(n_clusters=[2, 3, 4]),
scoring=scoring, refit=refit)
grid_search.fit(X, y)
# Both ARI and FMS can find the right number :)
assert grid_search.best_params_["n_clusters"] == 3
# Single metric evaluation unsupervised
grid_search = GridSearchCV(km, param_grid=dict(n_clusters=[2, 3, 4]),
scoring='fowlkes_mallows_score')
grid_search.fit(X, y)
assert grid_search.best_params_["n_clusters"] == 3
# Now without a score, and without y
grid_search = GridSearchCV(km, param_grid=dict(n_clusters=[2, 3, 4]))
grid_search.fit(X)
assert grid_search.best_params_["n_clusters"] == 4
def test_gridsearch_no_predict():
# test grid-search with an estimator without predict.
# slight duplication of a test from KDE
def custom_scoring(estimator, X):
return 42 if estimator.bandwidth == .1 else 0
X, _ = make_blobs(cluster_std=.1, random_state=1,
centers=[[0, 1], [1, 0], [0, 0]])
search = GridSearchCV(KernelDensity(),
param_grid=dict(bandwidth=[.01, .1, 1]),
scoring=custom_scoring)
search.fit(X)
assert search.best_params_['bandwidth'] == .1
assert search.best_score_ == 42
def test_param_sampler():
# test basic properties of param sampler
param_distributions = {"kernel": ["rbf", "linear"],
"C": uniform(0, 1)}
sampler = ParameterSampler(param_distributions=param_distributions,
n_iter=10, random_state=0)
samples = [x for x in sampler]
assert len(samples) == 10
for sample in samples:
assert sample["kernel"] in ["rbf", "linear"]
assert 0 <= sample["C"] <= 1
# test that repeated calls yield identical parameters
param_distributions = {"C": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]}
sampler = ParameterSampler(param_distributions=param_distributions,
n_iter=3, random_state=0)
assert [x for x in sampler] == [x for x in sampler]
if sp_version >= (0, 16):
param_distributions = {"C": uniform(0, 1)}
sampler = ParameterSampler(param_distributions=param_distributions,
n_iter=10, random_state=0)
assert [x for x in sampler] == [x for x in sampler]
def check_cv_results_array_types(search, param_keys, score_keys):
# Check if the search `cv_results`'s array are of correct types
cv_results = search.cv_results_
assert all(isinstance(cv_results[param], np.ma.MaskedArray)
for param in param_keys)
assert all(cv_results[key].dtype == object for key in param_keys)
assert not any(isinstance(cv_results[key], np.ma.MaskedArray)
for key in score_keys)
assert all(cv_results[key].dtype == np.float64
for key in score_keys if not key.startswith('rank'))
scorer_keys = search.scorer_.keys() if search.multimetric_ else ['score']
for key in scorer_keys:
assert cv_results['rank_test_%s' % key].dtype == np.int32
def check_cv_results_keys(cv_results, param_keys, score_keys, n_cand):
# Test the search.cv_results_ contains all the required results
assert_array_equal(sorted(cv_results.keys()),
sorted(param_keys + score_keys + ('params',)))
assert all(cv_results[key].shape == (n_cand,)
for key in param_keys + score_keys)
def test_grid_search_cv_results():
X, y = make_classification(n_samples=50, n_features=4,
random_state=42)
n_splits = 3
n_grid_points = 6
params = [dict(kernel=['rbf', ], C=[1, 10], gamma=[0.1, 1]),
dict(kernel=['poly', ], degree=[1, 2])]
param_keys = ('param_C', 'param_degree', 'param_gamma', 'param_kernel')
score_keys = ('mean_test_score', 'mean_train_score',
'rank_test_score',
'split0_test_score', 'split1_test_score',
'split2_test_score',
'split0_train_score', 'split1_train_score',
'split2_train_score',
'std_test_score', 'std_train_score',
'mean_fit_time', 'std_fit_time',
'mean_score_time', 'std_score_time')
n_candidates = n_grid_points
search = GridSearchCV(SVC(), cv=n_splits, param_grid=params,
return_train_score=True)
search.fit(X, y)
cv_results = search.cv_results_
# Check if score and timing are reasonable
assert all(cv_results['rank_test_score'] >= 1)
assert (all(cv_results[k] >= 0) for k in score_keys
if k != 'rank_test_score')
assert (all(cv_results[k] <= 1) for k in score_keys
if 'time' not in k and
k != 'rank_test_score')
# Check cv_results structure
check_cv_results_array_types(search, param_keys, score_keys)
check_cv_results_keys(cv_results, param_keys, score_keys, n_candidates)
# Check masking
cv_results = search.cv_results_
n_candidates = len(search.cv_results_['params'])
assert all((cv_results['param_C'].mask[i] and
cv_results['param_gamma'].mask[i] and
not cv_results['param_degree'].mask[i])
for i in range(n_candidates)
if cv_results['param_kernel'][i] == 'linear')
assert all((not cv_results['param_C'].mask[i] and
not cv_results['param_gamma'].mask[i] and
cv_results['param_degree'].mask[i])
for i in range(n_candidates)
if cv_results['param_kernel'][i] == 'rbf')
def test_random_search_cv_results():
X, y = make_classification(n_samples=50, n_features=4, random_state=42)
n_splits = 3
n_search_iter = 30
params = [{'kernel': ['rbf'], 'C': expon(scale=10),
'gamma': expon(scale=0.1)},
{'kernel': ['poly'], 'degree': [2, 3]}]
param_keys = ('param_C', 'param_degree', 'param_gamma', 'param_kernel')
score_keys = ('mean_test_score', 'mean_train_score',
'rank_test_score',
'split0_test_score', 'split1_test_score',
'split2_test_score',
'split0_train_score', 'split1_train_score',
'split2_train_score',
'std_test_score', 'std_train_score',
'mean_fit_time', 'std_fit_time',
'mean_score_time', 'std_score_time')
n_cand = n_search_iter
search = RandomizedSearchCV(SVC(), n_iter=n_search_iter,
cv=n_splits,
param_distributions=params,
return_train_score=True)
search.fit(X, y)
cv_results = search.cv_results_
# Check results structure
check_cv_results_array_types(search, param_keys, score_keys)
check_cv_results_keys(cv_results, param_keys, score_keys, n_cand)
n_candidates = len(search.cv_results_['params'])
assert all((cv_results['param_C'].mask[i] and
cv_results['param_gamma'].mask[i] and
not cv_results['param_degree'].mask[i])
for i in range(n_candidates)
if cv_results['param_kernel'][i] == 'linear')
assert all((not cv_results['param_C'].mask[i] and
not cv_results['param_gamma'].mask[i] and
cv_results['param_degree'].mask[i])
for i in range(n_candidates)
if cv_results['param_kernel'][i] == 'rbf')
@pytest.mark.parametrize(
"SearchCV, specialized_params",
[(GridSearchCV, {'param_grid': {'C': [1, 10]}}),
(RandomizedSearchCV,
{'param_distributions': {'C': [1, 10]}, 'n_iter': 2})]
)
def test_search_default_iid(SearchCV, specialized_params):
# Test the IID parameter TODO: Clearly this test does something else???
# noise-free simple 2d-data
X, y = make_blobs(centers=[[0, 0], [1, 0], [0, 1], [1, 1]], random_state=0,
cluster_std=0.1, shuffle=False, n_samples=80)
# split dataset into two folds that are not iid
# first one contains data of all 4 blobs, second only from two.
mask = np.ones(X.shape[0], dtype=np.bool)
mask[np.where(y == 1)[0][::2]] = 0
mask[np.where(y == 2)[0][::2]] = 0
# this leads to perfect classification on one fold and a score of 1/3 on
# the other
# create "cv" for splits
cv = [[mask, ~mask], [~mask, mask]]
common_params = {'estimator': SVC(), 'cv': cv,
'return_train_score': True}
search = SearchCV(**common_params, **specialized_params)
search.fit(X, y)
test_cv_scores = np.array(
[search.cv_results_['split%d_test_score' % s][0]
for s in range(search.n_splits_)]
)
test_mean = search.cv_results_['mean_test_score'][0]
test_std = search.cv_results_['std_test_score'][0]
train_cv_scores = np.array(
[search.cv_results_['split%d_train_score' % s][0]
for s in range(search.n_splits_)]
)
train_mean = search.cv_results_['mean_train_score'][0]
train_std = search.cv_results_['std_train_score'][0]
assert search.cv_results_['param_C'][0] == 1
# scores are the same as above
assert_allclose(test_cv_scores, [1, 1. / 3.])
assert_allclose(train_cv_scores, [1, 1])
# Unweighted mean/std is used
assert test_mean == pytest.approx(np.mean(test_cv_scores))
assert test_std == pytest.approx(np.std(test_cv_scores))
# For the train scores, we do not take a weighted mean irrespective of
# i.i.d. or not
assert train_mean == pytest.approx(1)
assert train_std == pytest.approx(0)
def test_grid_search_cv_results_multimetric():
X, y = make_classification(n_samples=50, n_features=4, random_state=42)
n_splits = 3
params = [dict(kernel=['rbf', ], C=[1, 10], gamma=[0.1, 1]),
dict(kernel=['poly', ], degree=[1, 2])]
grid_searches = []
for scoring in ({'accuracy': make_scorer(accuracy_score),
'recall': make_scorer(recall_score)},
'accuracy', 'recall'):
grid_search = GridSearchCV(SVC(), cv=n_splits,
param_grid=params,
scoring=scoring, refit=False)
grid_search.fit(X, y)
grid_searches.append(grid_search)
compare_cv_results_multimetric_with_single(*grid_searches)
def test_random_search_cv_results_multimetric():
X, y = make_classification(n_samples=50, n_features=4, random_state=42)
n_splits = 3
n_search_iter = 30
# Scipy 0.12's stats dists do not accept seed, hence we use param grid
params = dict(C=np.logspace(-4, 1, 3),
gamma=np.logspace(-5, 0, 3, base=0.1))
for refit in (True, False):
random_searches = []
for scoring in (('accuracy', 'recall'), 'accuracy', 'recall'):
# If True, for multi-metric pass refit='accuracy'
if refit:
probability = True
refit = 'accuracy' if isinstance(scoring, tuple) else refit
else:
probability = False
clf = SVC(probability=probability, random_state=42)
random_search = RandomizedSearchCV(clf, n_iter=n_search_iter,
cv=n_splits,
param_distributions=params,
scoring=scoring,
refit=refit, random_state=0)
random_search.fit(X, y)
random_searches.append(random_search)
compare_cv_results_multimetric_with_single(*random_searches)
compare_refit_methods_when_refit_with_acc(
random_searches[0], random_searches[1], refit)
def compare_cv_results_multimetric_with_single(
search_multi, search_acc, search_rec):
"""Compare multi-metric cv_results with the ensemble of multiple
single metric cv_results from single metric grid/random search"""
assert search_multi.multimetric_
assert_array_equal(sorted(search_multi.scorer_),
('accuracy', 'recall'))
cv_results_multi = search_multi.cv_results_
cv_results_acc_rec = {re.sub('_score$', '_accuracy', k): v
for k, v in search_acc.cv_results_.items()}
cv_results_acc_rec.update({re.sub('_score$', '_recall', k): v
for k, v in search_rec.cv_results_.items()})
# Check if score and timing are reasonable, also checks if the keys
# are present
assert all((np.all(cv_results_multi[k] <= 1) for k in (
'mean_score_time', 'std_score_time', 'mean_fit_time',
'std_fit_time')))
# Compare the keys, other than time keys, among multi-metric and
# single metric grid search results. np.testing.assert_equal performs a
# deep nested comparison of the two cv_results dicts
np.testing.assert_equal({k: v for k, v in cv_results_multi.items()
if not k.endswith('_time')},
{k: v for k, v in cv_results_acc_rec.items()
if not k.endswith('_time')})
def compare_refit_methods_when_refit_with_acc(search_multi, search_acc, refit):
"""Compare refit multi-metric search methods with single metric methods"""
assert search_acc.refit == refit
if refit:
assert search_multi.refit == 'accuracy'
else:
assert not search_multi.refit
return # search cannot predict/score without refit
X, y = make_blobs(n_samples=100, n_features=4, random_state=42)
for method in ('predict', 'predict_proba', 'predict_log_proba'):
assert_almost_equal(getattr(search_multi, method)(X),
getattr(search_acc, method)(X))
assert_almost_equal(search_multi.score(X, y), search_acc.score(X, y))
for key in ('best_index_', 'best_score_', 'best_params_'):
assert getattr(search_multi, key) == getattr(search_acc, key)
def test_search_cv_results_rank_tie_breaking():
X, y = make_blobs(n_samples=50, random_state=42)
# The two C values are close enough to give similar models
# which would result in a tie of their mean cv-scores
param_grid = {'C': [1, 1.001, 0.001]}
grid_search = GridSearchCV(SVC(), param_grid=param_grid,
return_train_score=True)
random_search = RandomizedSearchCV(SVC(), n_iter=3,
param_distributions=param_grid,
return_train_score=True)
for search in (grid_search, random_search):
search.fit(X, y)
cv_results = search.cv_results_
# Check tie breaking strategy -
# Check that there is a tie in the mean scores between
# candidates 1 and 2 alone
assert_almost_equal(cv_results['mean_test_score'][0],
cv_results['mean_test_score'][1])
assert_almost_equal(cv_results['mean_train_score'][0],
cv_results['mean_train_score'][1])
assert not np.allclose(cv_results['mean_test_score'][1],
cv_results['mean_test_score'][2])
assert not np.allclose(cv_results['mean_train_score'][1],
cv_results['mean_train_score'][2])
# 'min' rank should be assigned to the tied candidates
assert_almost_equal(search.cv_results_['rank_test_score'], [1, 1, 3])
def test_search_cv_results_none_param():
X, y = [[1], [2], [3], [4], [5]], [0, 0, 0, 0, 1]
estimators = (DecisionTreeRegressor(), DecisionTreeClassifier())
est_parameters = {"random_state": [0, None]}
cv = KFold()
for est in estimators:
grid_search = GridSearchCV(est, est_parameters, cv=cv,
).fit(X, y)
assert_array_equal(grid_search.cv_results_['param_random_state'],
[0, None])
@ignore_warnings()
def test_search_cv_timing():
svc = LinearSVC(random_state=0)
X = [[1, ], [2, ], [3, ], [4, ]]
y = [0, 1, 1, 0]
gs = GridSearchCV(svc, {'C': [0, 1]}, cv=2, error_score=0)
rs = RandomizedSearchCV(svc, {'C': [0, 1]}, cv=2, error_score=0, n_iter=2)
for search in (gs, rs):
search.fit(X, y)
for key in ['mean_fit_time', 'std_fit_time']:
# NOTE The precision of time.time in windows is not high
# enough for the fit/score times to be non-zero for trivial X and y
assert np.all(search.cv_results_[key] >= 0)
assert np.all(search.cv_results_[key] < 1)
for key in ['mean_score_time', 'std_score_time']:
assert search.cv_results_[key][1] >= 0
assert search.cv_results_[key][0] == 0.0
assert np.all(search.cv_results_[key] < 1)
assert hasattr(search, "refit_time_")
assert isinstance(search.refit_time_, float)
assert search.refit_time_ >= 0
def test_grid_search_correct_score_results():
# test that correct scores are used
n_splits = 3
clf = LinearSVC(random_state=0)
X, y = make_blobs(random_state=0, centers=2)
Cs = [.1, 1, 10]
for score in ['f1', 'roc_auc']:
grid_search = GridSearchCV(clf, {'C': Cs}, scoring=score, cv=n_splits)
cv_results = grid_search.fit(X, y).cv_results_
# Test scorer names
result_keys = list(cv_results.keys())
expected_keys = (("mean_test_score", "rank_test_score") +
tuple("split%d_test_score" % cv_i
for cv_i in range(n_splits)))
assert all(np.in1d(expected_keys, result_keys))
cv = StratifiedKFold(n_splits=n_splits)
n_splits = grid_search.n_splits_
for candidate_i, C in enumerate(Cs):
clf.set_params(C=C)
cv_scores = np.array(
list(grid_search.cv_results_['split%d_test_score'
% s][candidate_i]
for s in range(n_splits)))
for i, (train, test) in enumerate(cv.split(X, y)):
clf.fit(X[train], y[train])
if score == "f1":
correct_score = f1_score(y[test], clf.predict(X[test]))
elif score == "roc_auc":
dec = clf.decision_function(X[test])
correct_score = roc_auc_score(y[test], dec)
assert_almost_equal(correct_score, cv_scores[i])
# FIXME remove test_fit_grid_point as the function will be removed on 0.25
@ignore_warnings(category=FutureWarning)
def test_fit_grid_point():
X, y = make_classification(random_state=0)
cv = StratifiedKFold()
svc = LinearSVC(random_state=0)
scorer = make_scorer(accuracy_score)
for params in ({'C': 0.1}, {'C': 0.01}, {'C': 0.001}):
for train, test in cv.split(X, y):
this_scores, this_params, n_test_samples = fit_grid_point(
X, y, clone(svc), params, train, test,
scorer, verbose=False)
est = clone(svc).set_params(**params)
est.fit(X[train], y[train])
expected_score = scorer(est, X[test], y[test])
# Test the return values of fit_grid_point
assert_almost_equal(this_scores, expected_score)
assert params == this_params
assert n_test_samples == test.size
# Should raise an error upon multimetric scorer
assert_raise_message(ValueError, "For evaluating multiple scores, use "
"sklearn.model_selection.cross_validate instead.",
fit_grid_point, X, y, svc, params, train, test,
{'score': scorer}, verbose=True)
# FIXME remove test_fit_grid_point_deprecated as
# fit_grid_point will be removed on 0.25
def test_fit_grid_point_deprecated():
X, y = make_classification(random_state=0)
svc = LinearSVC(random_state=0)
scorer = make_scorer(accuracy_score)
msg = ("fit_grid_point is deprecated in version 0.23 "
"and will be removed in version 0.25")
params = {'C': 0.1}
train, test = next(StratifiedKFold().split(X, y))
with pytest.warns(FutureWarning, match=msg):
fit_grid_point(X, y, svc, params, train, test, scorer, verbose=False)
def test_pickle():
# Test that a fit search can be pickled
clf = MockClassifier()
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, refit=True, cv=3)
grid_search.fit(X, y)
grid_search_pickled = pickle.loads(pickle.dumps(grid_search))
assert_array_almost_equal(grid_search.predict(X),
grid_search_pickled.predict(X))
random_search = RandomizedSearchCV(clf, {'foo_param': [1, 2, 3]},
refit=True, n_iter=3, cv=3)
random_search.fit(X, y)
random_search_pickled = pickle.loads(pickle.dumps(random_search))
assert_array_almost_equal(random_search.predict(X),
random_search_pickled.predict(X))
def test_grid_search_with_multioutput_data():
# Test search with multi-output estimator
X, y = make_multilabel_classification(return_indicator=True,
random_state=0)
est_parameters = {"max_depth": [1, 2, 3, 4]}
cv = KFold()
estimators = [DecisionTreeRegressor(random_state=0),
DecisionTreeClassifier(random_state=0)]
# Test with grid search cv
for est in estimators:
grid_search = GridSearchCV(est, est_parameters, cv=cv)
grid_search.fit(X, y)
res_params = grid_search.cv_results_['params']
for cand_i in range(len(res_params)):
est.set_params(**res_params[cand_i])
for i, (train, test) in enumerate(cv.split(X, y)):
est.fit(X[train], y[train])
correct_score = est.score(X[test], y[test])
assert_almost_equal(
correct_score,
grid_search.cv_results_['split%d_test_score' % i][cand_i])
# Test with a randomized search
for est in estimators:
random_search = RandomizedSearchCV(est, est_parameters,
cv=cv, n_iter=3)
random_search.fit(X, y)
res_params = random_search.cv_results_['params']
for cand_i in range(len(res_params)):
est.set_params(**res_params[cand_i])
for i, (train, test) in enumerate(cv.split(X, y)):
est.fit(X[train], y[train])
correct_score = est.score(X[test], y[test])
assert_almost_equal(
correct_score,
random_search.cv_results_['split%d_test_score'
% i][cand_i])
def test_predict_proba_disabled():
# Test predict_proba when disabled on estimator.
X = np.arange(20).reshape(5, -1)
y = [0, 0, 1, 1, 1]
clf = SVC(probability=False)
gs = GridSearchCV(clf, {}, cv=2).fit(X, y)
assert not hasattr(gs, "predict_proba")
def test_grid_search_allows_nans():
# Test GridSearchCV with SimpleImputer
X = np.arange(20, dtype=np.float64).reshape(5, -1)
X[2, :] = np.nan
y = [0, 0, 1, 1, 1]
p = Pipeline([
('imputer', SimpleImputer(strategy='mean', missing_values=np.nan)),
('classifier', MockClassifier()),
])
GridSearchCV(p, {'classifier__foo_param': [1, 2, 3]}, cv=2).fit(X, y)
class FailingClassifier(BaseEstimator):
"""Classifier that raises a ValueError on fit()"""
FAILING_PARAMETER = 2
def __init__(self, parameter=None):
self.parameter = parameter
def fit(self, X, y=None):
if self.parameter == FailingClassifier.FAILING_PARAMETER:
raise ValueError("Failing classifier failed as required")
def predict(self, X):
return np.zeros(X.shape[0])
def score(self, X=None, Y=None):
return 0.
def test_grid_search_failing_classifier():
# GridSearchCV with on_error != 'raise'
# Ensures that a warning is raised and score reset where appropriate.
X, y = make_classification(n_samples=20, n_features=10, random_state=0)
clf = FailingClassifier()
# refit=False because we only want to check that errors caused by fits
# to individual folds will be caught and warnings raised instead. If
# refit was done, then an exception would be raised on refit and not
# caught by grid_search (expected behavior), and this would cause an
# error in this test.
gs = GridSearchCV(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy',
refit=False, error_score=0.0)
assert_warns(FitFailedWarning, gs.fit, X, y)
n_candidates = len(gs.cv_results_['params'])
# Ensure that grid scores were set to zero as required for those fits
# that are expected to fail.
def get_cand_scores(i):
return np.array(list(gs.cv_results_['split%d_test_score' % s][i]
for s in range(gs.n_splits_)))
assert all((np.all(get_cand_scores(cand_i) == 0.0)
for cand_i in range(n_candidates)
if gs.cv_results_['param_parameter'][cand_i] ==
FailingClassifier.FAILING_PARAMETER))
gs = GridSearchCV(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy',
refit=False, error_score=float('nan'))
assert_warns(FitFailedWarning, gs.fit, X, y)
n_candidates = len(gs.cv_results_['params'])
assert all(np.all(np.isnan(get_cand_scores(cand_i)))
for cand_i in range(n_candidates)
if gs.cv_results_['param_parameter'][cand_i] ==
FailingClassifier.FAILING_PARAMETER)
ranks = gs.cv_results_['rank_test_score']
# Check that succeeded estimators have lower ranks
assert ranks[0] <= 2 and ranks[1] <= 2
# Check that failed estimator has the highest rank
assert ranks[clf.FAILING_PARAMETER] == 3
assert gs.best_index_ != clf.FAILING_PARAMETER
def test_grid_search_failing_classifier_raise():
# GridSearchCV with on_error == 'raise' raises the error
X, y = make_classification(n_samples=20, n_features=10, random_state=0)
clf = FailingClassifier()
# refit=False because we want to test the behaviour of the grid search part
gs = GridSearchCV(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy',
refit=False, error_score='raise')
# FailingClassifier issues a ValueError so this is what we look for.
assert_raises(ValueError, gs.fit, X, y)
def test_parameters_sampler_replacement():
# raise warning if n_iter is bigger than total parameter space
params = [{'first': [0, 1], 'second': ['a', 'b', 'c']},
{'third': ['two', 'values']}]
sampler = ParameterSampler(params, n_iter=9)
n_iter = 9
grid_size = 8
expected_warning = ('The total space of parameters %d is smaller '
'than n_iter=%d. Running %d iterations. For '
'exhaustive searches, use GridSearchCV.'
% (grid_size, n_iter, grid_size))
assert_warns_message(UserWarning, expected_warning,
list, sampler)
# degenerates to GridSearchCV if n_iter the same as grid_size
sampler = ParameterSampler(params, n_iter=8)
samples = list(sampler)
assert len(samples) == 8
for values in ParameterGrid(params):
assert values in samples
# test sampling without replacement in a large grid
params = {'a': range(10), 'b': range(10), 'c': range(10)}
sampler = ParameterSampler(params, n_iter=99, random_state=42)
samples = list(sampler)
assert len(samples) == 99
hashable_samples = ["a%db%dc%d" % (p['a'], p['b'], p['c'])
for p in samples]
assert len(set(hashable_samples)) == 99
# doesn't go into infinite loops
params_distribution = {'first': bernoulli(.5), 'second': ['a', 'b', 'c']}
sampler = ParameterSampler(params_distribution, n_iter=7)
samples = list(sampler)
assert len(samples) == 7
def test_stochastic_gradient_loss_param():
# Make sure the predict_proba works when loss is specified
# as one of the parameters in the param_grid.
param_grid = {
'loss': ['log'],
}
X = np.arange(24).reshape(6, -1)
y = [0, 0, 0, 1, 1, 1]
clf = GridSearchCV(estimator=SGDClassifier(loss='hinge'),
param_grid=param_grid, cv=3)
# When the estimator is not fitted, `predict_proba` is not available as the
# loss is 'hinge'.
assert not hasattr(clf, "predict_proba")
clf.fit(X, y)
clf.predict_proba(X)
clf.predict_log_proba(X)
# Make sure `predict_proba` is not available when setting loss=['hinge']
# in param_grid
param_grid = {
'loss': ['hinge'],
}
clf = GridSearchCV(estimator=SGDClassifier(loss='hinge'),
param_grid=param_grid, cv=3)
assert not hasattr(clf, "predict_proba")
clf.fit(X, y)
assert not hasattr(clf, "predict_proba")
def test_search_train_scores_set_to_false():
X = np.arange(6).reshape(6, -1)
y = [0, 0, 0, 1, 1, 1]
clf = LinearSVC(random_state=0)
gs = GridSearchCV(clf, param_grid={'C': [0.1, 0.2]}, cv=3)
gs.fit(X, y)
def test_grid_search_cv_splits_consistency():
# Check if a one time iterable is accepted as a cv parameter.
n_samples = 100
n_splits = 5
X, y = make_classification(n_samples=n_samples, random_state=0)
gs = GridSearchCV(LinearSVC(random_state=0),
param_grid={'C': [0.1, 0.2, 0.3]},
cv=OneTimeSplitter(n_splits=n_splits,
n_samples=n_samples),
return_train_score=True)
gs.fit(X, y)
gs2 = GridSearchCV(LinearSVC(random_state=0),
param_grid={'C': [0.1, 0.2, 0.3]},
cv=KFold(n_splits=n_splits), return_train_score=True)
gs2.fit(X, y)
# Give generator as a cv parameter
assert isinstance(KFold(n_splits=n_splits,
shuffle=True, random_state=0).split(X, y),
GeneratorType)
gs3 = GridSearchCV(LinearSVC(random_state=0),
param_grid={'C': [0.1, 0.2, 0.3]},
cv=KFold(n_splits=n_splits, shuffle=True,
random_state=0).split(X, y),
return_train_score=True)
gs3.fit(X, y)
gs4 = GridSearchCV(LinearSVC(random_state=0),
param_grid={'C': [0.1, 0.2, 0.3]},
cv=KFold(n_splits=n_splits, shuffle=True,
random_state=0), return_train_score=True)
gs4.fit(X, y)
def _pop_time_keys(cv_results):
for key in ('mean_fit_time', 'std_fit_time',
'mean_score_time', 'std_score_time'):
cv_results.pop(key)
return cv_results
# Check if generators are supported as cv and
# that the splits are consistent
np.testing.assert_equal(_pop_time_keys(gs3.cv_results_),
_pop_time_keys(gs4.cv_results_))
# OneTimeSplitter is a non-re-entrant cv where split can be called only
# once if ``cv.split`` is called once per param setting in GridSearchCV.fit
# the 2nd and 3rd parameter will not be evaluated as no train/test indices
# will be generated for the 2nd and subsequent cv.split calls.
# This is a check to make sure cv.split is not called once per param
# setting.
np.testing.assert_equal({k: v for k, v in gs.cv_results_.items()
if not k.endswith('_time')},
{k: v for k, v in gs2.cv_results_.items()
if not k.endswith('_time')})
# Check consistency of folds across the parameters
gs = GridSearchCV(LinearSVC(random_state=0),
param_grid={'C': [0.1, 0.1, 0.2, 0.2]},
cv=KFold(n_splits=n_splits, shuffle=True),
return_train_score=True)
gs.fit(X, y)
# As the first two param settings (C=0.1) and the next two param
# settings (C=0.2) are same, the test and train scores must also be
# same as long as the same train/test indices are generated for all
# the cv splits, for both param setting
for score_type in ('train', 'test'):
per_param_scores = {}
for param_i in range(4):
per_param_scores[param_i] = list(
gs.cv_results_['split%d_%s_score' % (s, score_type)][param_i]
for s in range(5))
assert_array_almost_equal(per_param_scores[0],
per_param_scores[1])
assert_array_almost_equal(per_param_scores[2],
per_param_scores[3])
def test_transform_inverse_transform_round_trip():
clf = MockClassifier()
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, cv=3, verbose=3)
grid_search.fit(X, y)
X_round_trip = grid_search.inverse_transform(grid_search.transform(X))
assert_array_equal(X, X_round_trip)
def test_custom_run_search():
def check_results(results, gscv):
exp_results = gscv.cv_results_
assert sorted(results.keys()) == sorted(exp_results)
for k in results:
if not k.endswith('_time'):
# XXX: results['params'] is a list :|
results[k] = np.asanyarray(results[k])
if results[k].dtype.kind == 'O':
assert_array_equal(exp_results[k], results[k],
err_msg='Checking ' + k)
else:
assert_allclose(exp_results[k], results[k],
err_msg='Checking ' + k)
def fit_grid(param_grid):
return GridSearchCV(clf, param_grid,
return_train_score=True).fit(X, y)
class CustomSearchCV(BaseSearchCV):
def __init__(self, estimator, **kwargs):
super().__init__(estimator, **kwargs)
def _run_search(self, evaluate):
results = evaluate([{'max_depth': 1}, {'max_depth': 2}])
check_results(results, fit_grid({'max_depth': [1, 2]}))
results = evaluate([{'min_samples_split': 5},
{'min_samples_split': 10}])
check_results(results, fit_grid([{'max_depth': [1, 2]},
{'min_samples_split': [5, 10]}]))
# Using regressor to make sure each score differs
clf = DecisionTreeRegressor(random_state=0)
X, y = make_classification(n_samples=100, n_informative=4,
random_state=0)
mycv = CustomSearchCV(clf, return_train_score=True).fit(X, y)
gscv = fit_grid([{'max_depth': [1, 2]},
{'min_samples_split': [5, 10]}])
results = mycv.cv_results_
check_results(results, gscv)
for attr in dir(gscv):
if (attr[0].islower() and attr[-1:] == '_' and
attr not in {'cv_results_', 'best_estimator_',
'refit_time_', 'classes_'}):
assert getattr(gscv, attr) == getattr(mycv, attr), \
"Attribute %s not equal" % attr
def test__custom_fit_no_run_search():
class NoRunSearchSearchCV(BaseSearchCV):
def __init__(self, estimator, **kwargs):
super().__init__(estimator, **kwargs)
def fit(self, X, y=None, groups=None, **fit_params):
return self
# this should not raise any exceptions
NoRunSearchSearchCV(SVC()).fit(X, y)
class BadSearchCV(BaseSearchCV):
def __init__(self, estimator, **kwargs):
super().__init__(estimator, **kwargs)
with pytest.raises(NotImplementedError,
match="_run_search not implemented."):
# this should raise a NotImplementedError
BadSearchCV(SVC()).fit(X, y)
def test_empty_cv_iterator_error():
# Use global X, y
# create cv
cv = KFold(n_splits=3).split(X)
# pop all of it, this should cause the expected ValueError
[u for u in cv]
# cv is empty now
train_size = 100
ridge = RandomizedSearchCV(Ridge(), {'alpha': [1e-3, 1e-2, 1e-1]},
cv=cv, n_jobs=4)
# assert that this raises an error
with pytest.raises(ValueError,
match='No fits were performed. '
'Was the CV iterator empty\\? '
'Were there no candidates\\?'):
ridge.fit(X[:train_size], y[:train_size])
def test_random_search_bad_cv():
# Use global X, y
class BrokenKFold(KFold):
def get_n_splits(self, *args, **kw):
return 1
# create bad cv
cv = BrokenKFold(n_splits=3)
train_size = 100
ridge = RandomizedSearchCV(Ridge(), {'alpha': [1e-3, 1e-2, 1e-1]},
cv=cv, n_jobs=4)
# assert that this raises an error
with pytest.raises(ValueError,
match='cv.split and cv.get_n_splits returned '
'inconsistent results. Expected \\d+ '
'splits, got \\d+'):
ridge.fit(X[:train_size], y[:train_size])
def test_n_features_in():
# make sure grid search and random search delegate n_features_in to the
# best estimator
n_features = 4
X, y = make_classification(n_features=n_features)
gbdt = HistGradientBoostingClassifier()
param_grid = {'max_iter': [3, 4]}
gs = GridSearchCV(gbdt, param_grid)
rs = RandomizedSearchCV(gbdt, param_grid, n_iter=1)
assert not hasattr(gs, 'n_features_in_')
assert not hasattr(rs, 'n_features_in_')
gs.fit(X, y)
rs.fit(X, y)
assert gs.n_features_in_ == n_features
assert rs.n_features_in_ == n_features
def test_search_cv__pairwise_property_delegated_to_base_estimator():
"""
Test implementation of BaseSearchCV has the _pairwise property
which matches the _pairwise property of its estimator.
This test make sure _pairwise is delegated to the base estimator.
Non-regression test for issue #13920.
"""
est = BaseEstimator()
attr_message = "BaseSearchCV _pairwise property must match estimator"
for _pairwise_setting in [True, False]:
setattr(est, '_pairwise', _pairwise_setting)
cv = GridSearchCV(est, {'n_neighbors': [10]})
assert _pairwise_setting == cv._pairwise, attr_message
def test_search_cv__pairwise_property_equivalence_of_precomputed():
"""
Test implementation of BaseSearchCV has the _pairwise property
which matches the _pairwise property of its estimator.
This test ensures the equivalence of 'precomputed'.
Non-regression test for issue #13920.
"""
n_samples = 50
n_splits = 2
X, y = make_classification(n_samples=n_samples, random_state=0)
grid_params = {'n_neighbors': [10]}
# defaults to euclidean metric (minkowski p = 2)
clf = KNeighborsClassifier()
cv = GridSearchCV(clf, grid_params, cv=n_splits)
cv.fit(X, y)
preds_original = cv.predict(X)
# precompute euclidean metric to validate _pairwise is working
X_precomputed = euclidean_distances(X)
clf = KNeighborsClassifier(metric='precomputed')
cv = GridSearchCV(clf, grid_params, cv=n_splits)
cv.fit(X_precomputed, y)
preds_precomputed = cv.predict(X_precomputed)
attr_message = "GridSearchCV not identical with precomputed metric"
assert (preds_original == preds_precomputed).all(), attr_message
@pytest.mark.parametrize(
"SearchCV, param_search",
[(GridSearchCV, {'a': [0.1, 0.01]}),
(RandomizedSearchCV, {'a': uniform(1, 3)})]
)
def test_scalar_fit_param(SearchCV, param_search):
# unofficially sanctioned tolerance for scalar values in fit_params
# non-regression test for:
# https://github.com/scikit-learn/scikit-learn/issues/15805
class TestEstimator(BaseEstimator, ClassifierMixin):
def __init__(self, a=None):
self.a = a
def fit(self, X, y, r=None):
self.r_ = r
def predict(self, X):
return np.zeros(shape=(len(X)))
model = SearchCV(TestEstimator(), param_search)
X, y = make_classification(random_state=42)
model.fit(X, y, r=42)
assert model.best_estimator_.r_ == 42
@pytest.mark.parametrize(
"SearchCV, param_search",
[(GridSearchCV, {'alpha': [0.1, 0.01]}),
(RandomizedSearchCV, {'alpha': uniform(0.01, 0.1)})]
)
def test_scalar_fit_param_compat(SearchCV, param_search):
# check support for scalar values in fit_params, for instance in LightGBM
# that do not exactly respect the scikit-learn API contract but that we do
# not want to break without an explicit deprecation cycle and API
# recommendations for implementing early stopping with a user provided
# validation set. non-regression test for:
# https://github.com/scikit-learn/scikit-learn/issues/15805
X_train, X_valid, y_train, y_valid = train_test_split(
*make_classification(random_state=42), random_state=42
)
class _FitParamClassifier(SGDClassifier):
def fit(self, X, y, sample_weight=None, tuple_of_arrays=None,
scalar_param=None, callable_param=None):
super().fit(X, y, sample_weight=sample_weight)
assert scalar_param > 0
assert callable(callable_param)
# The tuple of arrays should be preserved as tuple.
assert isinstance(tuple_of_arrays, tuple)
assert tuple_of_arrays[0].ndim == 2
assert tuple_of_arrays[1].ndim == 1
return self
def _fit_param_callable():
pass
model = SearchCV(
_FitParamClassifier(), param_search
)
# NOTE: `fit_params` should be data dependent (e.g. `sample_weight`) which
# is not the case for the following parameters. But this abuse is common in
# popular third-party libraries and we should tolerate this behavior for
# now and be careful not to break support for those without following
# proper deprecation cycle.
fit_params = {
'tuple_of_arrays': (X_valid, y_valid),
'callable_param': _fit_param_callable,
'scalar_param': 42,
}
model.fit(X_train, y_train, **fit_params)
| [((4079, 4125), 'numpy.array', 'np.array', (['[[-1, -1], [-2, -1], [1, 1], [2, 1]]'], {}), '([[-1, -1], [-2, -1], [1, 1], [2, 1]])\n', (4087, 4125), True, 'import numpy as np\n'), ((4130, 4152), 'numpy.array', 'np.array', (['[1, 1, 2, 2]'], {}), '([1, 1, 2, 2])\n', (4138, 4152), True, 'import numpy as np\n'), ((4385, 4686), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input, error_type, error_message"""', '[(0, TypeError, \'Parameter .* is not a dict or a list \\\\(0\\\\)\'), ([{\'foo\':\n [0]}, 0], TypeError, \'Parameter .* is not a dict \\\\(0\\\\)\'), ({\'foo\': 0},\n TypeError, "Parameter.* value is not iterable .*\\\\(key=\'foo\', value=0\\\\)")]'], {}), '(\'input, error_type, error_message\', [(0, TypeError,\n \'Parameter .* is not a dict or a list \\\\(0\\\\)\'), ([{\'foo\': [0]}, 0],\n TypeError, \'Parameter .* is not a dict \\\\(0\\\\)\'), ({\'foo\': 0},\n TypeError, "Parameter.* value is not iterable .*\\\\(key=\'foo\', value=0\\\\)")]\n )\n', (4408, 4686), False, 'import pytest\n'), ((7934, 8005), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""SearchCV"""', '[GridSearchCV, RandomizedSearchCV]'], {}), "('SearchCV', [GridSearchCV, RandomizedSearchCV])\n", (7957, 8005), False, 'import pytest\n'), ((22950, 23001), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""out_bound_value"""', '[-1, 2]'], {}), "('out_bound_value', [-1, 2])\n", (22973, 23001), False, 'import pytest\n'), ((23003, 23075), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""search_cv"""', '[RandomizedSearchCV, GridSearchCV]'], {}), "('search_cv', [RandomizedSearchCV, GridSearchCV])\n", (23026, 23075), False, 'import pytest\n'), ((35001, 35192), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""SearchCV, specialized_params"""', "[(GridSearchCV, {'param_grid': {'C': [1, 10]}}), (RandomizedSearchCV, {\n 'param_distributions': {'C': [1, 10]}, 'n_iter': 2})]"], {}), "('SearchCV, specialized_params', [(GridSearchCV, {\n 'param_grid': {'C': [1, 10]}}), (RandomizedSearchCV, {\n 'param_distributions': {'C': [1, 10]}, 'n_iter': 2})])\n", (35024, 35192), False, 'import pytest\n'), ((43521, 43538), 'sklearn.utils._testing.ignore_warnings', 'ignore_warnings', ([], {}), '()\n', (43536, 43538), False, 'from sklearn.utils._testing import ignore_warnings\n'), ((46129, 46168), 'sklearn.utils._testing.ignore_warnings', 'ignore_warnings', ([], {'category': 'FutureWarning'}), '(category=FutureWarning)\n', (46144, 46168), False, 'from sklearn.utils._testing import ignore_warnings\n'), ((4971, 4993), 'sklearn.model_selection.ParameterGrid', 'ParameterGrid', (['params1'], {}), '(params1)\n', (4984, 4993), False, 'from sklearn.model_selection import ParameterGrid\n'), ((5229, 5251), 'sklearn.model_selection.ParameterGrid', 'ParameterGrid', (['params2'], {}), '(params2)\n', (5242, 5251), False, 'from sklearn.model_selection import ParameterGrid\n'), ((5793, 5810), 'sklearn.model_selection.ParameterGrid', 'ParameterGrid', (['{}'], {}), '({})\n', (5806, 5810), False, 'from sklearn.model_selection import ParameterGrid\n'), ((5916, 5960), 'sklearn.utils._testing.assert_raises', 'assert_raises', (['IndexError', '(lambda : empty[1])'], {}), '(IndexError, lambda : empty[1])\n', (5929, 5960), False, 'from sklearn.utils._testing import assert_raises\n'), ((5977, 6026), 'sklearn.model_selection.ParameterGrid', 'ParameterGrid', (["[{'C': [1, 10]}, {}, {'C': [0.5]}]"], {}), "([{'C': [1, 10]}, {}, {'C': [0.5]}])\n", (5990, 6026), False, 'from sklearn.model_selection import ParameterGrid\n'), ((6316, 6376), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'foo_param': [1, 2, 3]}"], {'cv': '(3)', 'verbose': '(3)'}), "(clf, {'foo_param': [1, 2, 3]}, cv=3, verbose=3)\n", (6328, 6376), False, 'from sklearn.model_selection import GridSearchCV\n'), ((6488, 6498), 'io.StringIO', 'StringIO', ([], {}), '()\n', (6496, 6498), False, 'from io import StringIO\n'), ((6612, 6690), 'sklearn.utils._testing.assert_array_equal', 'assert_array_equal', (["grid_search.cv_results_['param_foo_param'].data", '[1, 2, 3]'], {}), "(grid_search.cv_results_['param_foo_param'].data, [1, 2, 3])\n", (6630, 6690), False, 'from sklearn.utils._testing import assert_array_equal\n'), ((6956, 7004), 'sklearn.utils._testing.assert_raises', 'assert_raises', (['ValueError', 'grid_search.fit', 'X', 'y'], {}), '(ValueError, grid_search.fit, X, y)\n', (6969, 7004), False, 'from sklearn.utils._testing import assert_raises\n'), ((7257, 7293), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['pipe', 'param_grid'], {'cv': '(2)'}), '(pipe, param_grid, cv=2)\n', (7269, 7293), False, 'from sklearn.model_selection import GridSearchCV\n'), ((8098, 8125), 'numpy.array', 'np.array', (['([0] * 5 + [1] * 5)'], {}), '([0] * 5 + [1] * 5)\n', (8106, 8125), True, 'import numpy as np\n'), ((8136, 8192), 'sklearn.utils._mocking.CheckingClassifier', 'CheckingClassifier', ([], {'expected_fit_params': "['spam', 'eggs']"}), "(expected_fit_params=['spam', 'eggs'])\n", (8154, 8192), False, 'from sklearn.utils._mocking import CheckingClassifier, MockDataFrame\n'), ((8935, 8960), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (8944, 8960), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((8972, 9009), 'sklearn.datasets.make_blobs', 'make_blobs', ([], {'random_state': '(0)', 'centers': '(2)'}), '(random_state=0, centers=2)\n', (8982, 9009), False, 'from sklearn.datasets import make_blobs\n'), ((9101, 9149), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'C': Cs}"], {'scoring': '"""accuracy"""'}), "(clf, {'C': Cs}, scoring='accuracy')\n", (9113, 9149), False, 'from sklearn.model_selection import GridSearchCV\n'), ((9204, 9261), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf_no_score', "{'C': Cs}"], {'scoring': '"""accuracy"""'}), "(clf_no_score, {'C': Cs}, scoring='accuracy')\n", (9216, 9261), False, 'from sklearn.model_selection import GridSearchCV\n'), ((9698, 9735), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf_no_score', "{'C': Cs}"], {}), "(clf_no_score, {'C': Cs})\n", (9710, 9735), False, 'from sklearn.model_selection import GridSearchCV\n'), ((9740, 9818), 'sklearn.utils._testing.assert_raise_message', 'assert_raise_message', (['TypeError', '"""no scoring"""', 'grid_search_no_score.fit', '[[1]]'], {}), "(TypeError, 'no scoring', grid_search_no_score.fit, [[1]])\n", (9760, 9818), False, 'from sklearn.utils._testing import assert_raise_message\n'), ((9894, 9969), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(100)', 'n_classes': '(2)', 'flip_y': '(0.2)', 'random_state': '(0)'}), '(n_samples=100, n_classes=2, flip_y=0.2, random_state=0)\n', (9913, 9969), False, 'from sklearn.datasets import make_classification\n'), ((10010, 10035), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (10019, 10035), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((10953, 11006), 'sklearn.utils._testing.assert_almost_equal', 'assert_almost_equal', (['score_accuracy', 'score_no_scoring'], {}), '(score_accuracy, score_no_scoring)\n', (10972, 11006), False, 'from sklearn.utils._testing import assert_almost_equal\n'), ((11011, 11061), 'sklearn.utils._testing.assert_almost_equal', 'assert_almost_equal', (['score_auc', 'score_no_score_auc'], {}), '(score_auc, score_no_score_auc)\n', (11030, 11061), False, 'from sklearn.utils._testing import assert_almost_equal\n'), ((11248, 11272), 'numpy.random.RandomState', 'np.random.RandomState', (['(0)'], {}), '(0)\n', (11269, 11272), True, 'import numpy as np\n'), ((11285, 11347), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(15)', 'n_classes': '(2)', 'random_state': '(0)'}), '(n_samples=15, n_classes=2, random_state=0)\n', (11304, 11347), False, 'from sklearn.datasets import make_classification\n'), ((11394, 11419), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (11403, 11419), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((12169, 12196), 'numpy.array', 'np.array', (['([0] * 5 + [1] * 5)'], {}), '([0] * 5 + [1] * 5)\n', (12177, 12196), True, 'import numpy as np\n'), ((12318, 12396), 'sklearn.utils._testing.assert_array_equal', 'assert_array_equal', (['grid_search.best_estimator_.classes_', 'grid_search.classes_'], {}), '(grid_search.best_estimator_.classes_, grid_search.classes_)\n', (12336, 12396), False, 'from sklearn.utils._testing import assert_array_equal\n'), ((13212, 13255), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'foo_param': [1]}"], {'cv': '(3)'}), "(clf, {'foo_param': [1]}, cv=3)\n", (13224, 13255), False, 'from sklearn.model_selection import GridSearchCV\n'), ((13350, 13409), 'sklearn.model_selection.RandomizedSearchCV', 'RandomizedSearchCV', (['clf', "{'foo_param': [0]}"], {'n_iter': '(1)', 'cv': '(3)'}), "(clf, {'foo_param': [0]}, n_iter=1, cv=3)\n", (13368, 13409), False, 'from sklearn.model_selection import RandomizedSearchCV\n'), ((15168, 15234), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(200)', 'n_features': '(100)', 'random_state': '(0)'}), '(n_samples=200, n_features=100, random_state=0)\n', (15187, 15234), False, 'from sklearn.datasets import make_classification\n'), ((15246, 15257), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {}), '()\n', (15255, 15257), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((15267, 15303), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'C': [0.1, 1.0]}"], {}), "(clf, {'C': [0.1, 1.0]})\n", (15279, 15303), False, 'from sklearn.model_selection import GridSearchCV\n'), ((15308, 15355), 'sklearn.utils._testing.assert_raises', 'assert_raises', (['ValueError', 'cv.fit', 'X_[:180]', 'y_'], {}), '(ValueError, cv.fit, X_[:180], y_)\n', (15321, 15355), False, 'from sklearn.utils._testing import assert_raises\n'), ((15410, 15476), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(200)', 'n_features': '(100)', 'random_state': '(0)'}), '(n_samples=200, n_features=100, random_state=0)\n', (15429, 15476), False, 'from sklearn.datasets import make_classification\n'), ((15553, 15570), 'sklearn.svm.SVC', 'SVC', ([], {'gamma': '"""auto"""'}), "(gamma='auto')\n", (15556, 15570), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((15580, 15609), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', 'param_dict'], {}), '(clf, param_dict)\n', (15592, 15609), False, 'from sklearn.model_selection import GridSearchCV\n'), ((15640, 15675), 'sklearn.svm.SVC', 'SVC', ([], {'C': '(1.0)', 'kernel': '"""rbf"""', 'gamma': '(0.1)'}), "(C=1.0, kernel='rbf', gamma=0.1)\n", (15643, 15675), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((15701, 15766), 'sklearn.utils._testing.assert_array_equal', 'assert_array_equal', (['clf.dual_coef_', 'cv.best_estimator_.dual_coef_'], {}), '(clf.dual_coef_, cv.best_estimator_.dual_coef_)\n', (15719, 15766), False, 'from sklearn.utils._testing import assert_array_equal\n'), ((16175, 16192), 'sklearn.svm.SVC', 'SVC', ([], {'gamma': '"""auto"""'}), "(gamma='auto')\n", (16178, 16192), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((16197, 16426), 'sklearn.utils._testing.assert_raise_message', 'assert_raise_message', (['ValueError', '"""Parameter grid for parameter (C) needs to be a list or numpy array, but got (<class \'int\'>). Single values need to be wrapped in a list with one element."""', 'GridSearchCV', 'clf', 'param_dict'], {}), '(ValueError,\n "Parameter grid for parameter (C) needs to be a list or numpy array, but got (<class \'int\'>). Single values need to be wrapped in a list with one element."\n , GridSearchCV, clf, param_dict)\n', (16217, 16426), False, 'from sklearn.utils._testing import assert_raise_message\n'), ((16514, 16519), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (16517, 16519), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((16524, 16666), 'sklearn.utils._testing.assert_raise_message', 'assert_raise_message', (['ValueError', '"""Parameter values for parameter (C) need to be a non-empty sequence."""', 'GridSearchCV', 'clf', 'param_dict'], {}), "(ValueError,\n 'Parameter values for parameter (C) need to be a non-empty sequence.',\n GridSearchCV, clf, param_dict)\n", (16544, 16666), False, 'from sklearn.utils._testing import assert_raise_message\n'), ((16727, 16744), 'sklearn.svm.SVC', 'SVC', ([], {'gamma': '"""auto"""'}), "(gamma='auto')\n", (16730, 16744), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((16749, 16978), 'sklearn.utils._testing.assert_raise_message', 'assert_raise_message', (['ValueError', '"""Parameter grid for parameter (C) needs to be a list or numpy array, but got (<class \'str\'>). Single values need to be wrapped in a list with one element."""', 'GridSearchCV', 'clf', 'param_dict'], {}), '(ValueError,\n "Parameter grid for parameter (C) needs to be a list or numpy array, but got (<class \'str\'>). Single values need to be wrapped in a list with one element."\n , GridSearchCV, clf, param_dict)\n', (16769, 16978), False, 'from sklearn.utils._testing import assert_raise_message\n'), ((17079, 17084), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (17082, 17084), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((17089, 17145), 'sklearn.utils._testing.assert_raises', 'assert_raises', (['ValueError', 'GridSearchCV', 'clf', 'param_dict'], {}), '(ValueError, GridSearchCV, clf, param_dict)\n', (17102, 17145), False, 'from sklearn.utils._testing import assert_raises\n'), ((17262, 17328), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(200)', 'n_features': '(100)', 'random_state': '(0)'}), '(n_samples=200, n_features=100, random_state=0)\n', (17281, 17328), False, 'from sklearn.datasets import make_classification\n'), ((17340, 17351), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {}), '()\n', (17349, 17351), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((17361, 17397), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'C': [0.1, 1.0]}"], {}), "(clf, {'C': [0.1, 1.0]})\n", (17373, 17397), False, 'from sklearn.model_selection import GridSearchCV\n'), ((17502, 17519), 'scipy.sparse.csr_matrix', 'sp.csr_matrix', (['X_'], {}), '(X_)\n', (17515, 17519), True, 'import scipy.sparse as sp\n'), ((17530, 17541), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {}), '()\n', (17539, 17541), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((17551, 17587), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'C': [0.1, 1.0]}"], {}), "(clf, {'C': [0.1, 1.0]})\n", (17563, 17587), False, 'from sklearn.model_selection import GridSearchCV\n'), ((17810, 17876), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(200)', 'n_features': '(100)', 'random_state': '(0)'}), '(n_samples=200, n_features=100, random_state=0)\n', (17829, 17876), False, 'from sklearn.datasets import make_classification\n'), ((17888, 17899), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {}), '()\n', (17897, 17899), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((17909, 17959), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'C': [0.1, 1.0]}"], {'scoring': '"""f1"""'}), "(clf, {'C': [0.1, 1.0]}, scoring='f1')\n", (17921, 17959), False, 'from sklearn.model_selection import GridSearchCV\n'), ((18064, 18081), 'scipy.sparse.csr_matrix', 'sp.csr_matrix', (['X_'], {}), '(X_)\n', (18077, 18081), True, 'import scipy.sparse as sp\n'), ((18092, 18103), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {}), '()\n', (18101, 18103), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((18113, 18163), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'C': [0.1, 1.0]}"], {'scoring': '"""f1"""'}), "(clf, {'C': [0.1, 1.0]}, scoring='f1')\n", (18125, 18163), False, 'from sklearn.model_selection import GridSearchCV\n'), ((18265, 18300), 'sklearn.utils._testing.assert_array_equal', 'assert_array_equal', (['y_pred', 'y_pred2'], {}), '(y_pred, y_pred2)\n', (18283, 18300), False, 'from sklearn.utils._testing import assert_array_equal\n'), ((18614, 18659), 'sklearn.metrics.make_scorer', 'make_scorer', (['f1_loss'], {'greater_is_better': '(False)'}), '(f1_loss, greater_is_better=False)\n', (18625, 18659), False, 'from sklearn.metrics import make_scorer\n'), ((18669, 18721), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'C': [0.1, 1.0]}"], {'scoring': 'F1Loss'}), "(clf, {'C': [0.1, 1.0]}, scoring=F1Loss)\n", (18681, 18721), False, 'from sklearn.model_selection import GridSearchCV\n'), ((18842, 18877), 'sklearn.utils._testing.assert_array_equal', 'assert_array_equal', (['y_pred', 'y_pred3'], {}), '(y_pred, y_pred3)\n', (18860, 18877), False, 'from sklearn.utils._testing import assert_array_equal\n'), ((19053, 19119), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(200)', 'n_features': '(100)', 'random_state': '(0)'}), '(n_samples=200, n_features=100, random_state=0)\n', (19072, 19119), False, 'from sklearn.datasets import make_classification\n'), ((19211, 19239), 'numpy.dot', 'np.dot', (['X_[:180]', 'X_[:180].T'], {}), '(X_[:180], X_[:180].T)\n', (19217, 19239), True, 'import numpy as np\n'), ((19274, 19299), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""precomputed"""'}), "(kernel='precomputed')\n", (19277, 19299), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((19309, 19345), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'C': [0.1, 1.0]}"], {}), "(clf, {'C': [0.1, 1.0]})\n", (19321, 19345), False, 'from sklearn.model_selection import GridSearchCV\n'), ((19458, 19486), 'numpy.dot', 'np.dot', (['X_[180:]', 'X_[:180].T'], {}), '(X_[180:], X_[:180].T)\n', (19464, 19486), True, 'import numpy as np\n'), ((19919, 19937), 'numpy.zeros', 'np.zeros', (['(10, 20)'], {}), '((10, 20))\n', (19927, 19937), True, 'import numpy as np\n'), ((19952, 19966), 'numpy.ones', 'np.ones', (['(10,)'], {}), '((10,))\n', (19959, 19966), True, 'import numpy as np\n'), ((19978, 20003), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""precomputed"""'}), "(kernel='precomputed')\n", (19981, 20003), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((20013, 20049), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'C': [0.1, 1.0]}"], {}), "(clf, {'C': [0.1, 1.0]})\n", (20025, 20049), False, 'from sklearn.model_selection import GridSearchCV\n'), ((20054, 20105), 'sklearn.utils._testing.assert_raises', 'assert_raises', (['ValueError', 'cv.fit', 'K_train', 'y_train'], {}), '(ValueError, cv.fit, K_train, y_train)\n', (20067, 20105), False, 'from sklearn.utils._testing import assert_raises\n'), ((20666, 20693), 'numpy.array', 'np.array', (['([0] * 5 + [1] * 5)'], {}), '([0] * 5 + [1] * 5)\n', (20674, 20693), True, 'import numpy as np\n'), ((21845, 21910), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(100)', 'n_features': '(4)', 'random_state': '(42)'}), '(n_samples=100, n_features=4, random_state=42)\n', (21864, 21910), False, 'from sklearn.datasets import make_classification\n'), ((22574, 22639), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(100)', 'n_features': '(4)', 'random_state': '(42)'}), '(n_samples=100, n_features=4, random_state=42)\n', (22593, 22639), False, 'from sklearn.datasets import make_classification\n'), ((23443, 23508), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(100)', 'n_features': '(4)', 'random_state': '(42)'}), '(n_samples=100, n_features=4, random_state=42)\n', (23462, 23508), False, 'from sklearn.datasets import make_classification\n'), ((24208, 24273), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(100)', 'n_features': '(4)', 'random_state': '(42)'}), '(n_samples=100, n_features=4, random_state=42)\n', (24227, 24273), False, 'from sklearn.datasets import make_classification\n'), ((24961, 25039), 'sklearn.utils._mocking.CheckingClassifier', 'CheckingClassifier', ([], {'check_X': 'check_X', 'check_y': 'check_y', 'methods_to_check': "['fit']"}), "(check_X=check_X, check_y=check_y, methods_to_check=['fit'])\n", (24979, 25039), False, 'from sklearn.utils._mocking import CheckingClassifier, MockDataFrame\n'), ((25073, 25116), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'foo_param': [1, 2, 3]}"], {}), "(clf, {'foo_param': [1, 2, 3]})\n", (25085, 25116), False, 'from sklearn.model_selection import GridSearchCV\n'), ((25316, 25343), 'numpy.array', 'np.array', (['([0] * 5 + [1] * 5)'], {}), '([0] * 5 + [1] * 5)\n', (25324, 25343), True, 'import numpy as np\n'), ((25463, 25480), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': '(3)'}), '(n_splits=3)\n', (25468, 25480), False, 'from sklearn.model_selection import KFold\n'), ((25499, 25549), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'foo_param': [1, 2, 3]}"], {'cv': 'cv'}), "(clf, {'foo_param': [1, 2, 3]}, cv=cv)\n", (25511, 25549), False, 'from sklearn.model_selection import GridSearchCV\n'), ((25752, 25779), 'numpy.array', 'np.array', (['([0] * 5 + [1] * 5)'], {}), '([0] * 5 + [1] * 5)\n', (25760, 25779), True, 'import numpy as np\n'), ((25899, 25916), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': '(3)'}), '(n_splits=3)\n', (25904, 25916), False, 'from sklearn.model_selection import KFold\n'), ((25935, 25985), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'foo_param': [1, 2, 3]}"], {'cv': 'cv'}), "(clf, {'foo_param': [1, 2, 3]}, cv=cv)\n", (25947, 25985), False, 'from sklearn.model_selection import GridSearchCV\n'), ((26411, 26438), 'numpy.array', 'np.array', (['([0] * 5 + [1] * 5)'], {}), '([0] * 5 + [1] * 5)\n', (26419, 26438), True, 'import numpy as np\n'), ((27111, 27151), 'sklearn.datasets.make_blobs', 'make_blobs', ([], {'n_samples': '(50)', 'random_state': '(0)'}), '(n_samples=50, random_state=0)\n', (27121, 27151), False, 'from sklearn.datasets import make_blobs\n'), ((27161, 27208), 'sklearn.cluster.KMeans', 'KMeans', ([], {'random_state': '(0)', 'init': '"""random"""', 'n_init': '(1)'}), "(random_state=0, init='random', n_init=1)\n", (27167, 27208), False, 'from sklearn.cluster import KMeans\n'), ((28370, 28447), 'sklearn.datasets.make_blobs', 'make_blobs', ([], {'cluster_std': '(0.1)', 'random_state': '(1)', 'centers': '[[0, 1], [1, 0], [0, 0]]'}), '(cluster_std=0.1, random_state=1, centers=[[0, 1], [1, 0], [0, 0]])\n', (28380, 28447), False, 'from sklearn.datasets import make_blobs\n'), ((28923, 29011), 'sklearn.model_selection.ParameterSampler', 'ParameterSampler', ([], {'param_distributions': 'param_distributions', 'n_iter': '(10)', 'random_state': '(0)'}), '(param_distributions=param_distributions, n_iter=10,\n random_state=0)\n', (28939, 29011), False, 'from sklearn.model_selection import ParameterSampler\n'), ((29362, 29449), 'sklearn.model_selection.ParameterSampler', 'ParameterSampler', ([], {'param_distributions': 'param_distributions', 'n_iter': '(3)', 'random_state': '(0)'}), '(param_distributions=param_distributions, n_iter=3,\n random_state=0)\n', (29378, 29449), False, 'from sklearn.model_selection import ParameterSampler\n'), ((30970, 31034), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(50)', 'n_features': '(4)', 'random_state': '(42)'}), '(n_samples=50, n_features=4, random_state=42)\n', (30989, 31034), False, 'from sklearn.datasets import make_classification\n'), ((33149, 33213), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(50)', 'n_features': '(4)', 'random_state': '(42)'}), '(n_samples=50, n_features=4, random_state=42)\n', (33168, 33213), False, 'from sklearn.datasets import make_classification\n'), ((35383, 35501), 'sklearn.datasets.make_blobs', 'make_blobs', ([], {'centers': '[[0, 0], [1, 0], [0, 1], [1, 1]]', 'random_state': '(0)', 'cluster_std': '(0.1)', 'shuffle': '(False)', 'n_samples': '(80)'}), '(centers=[[0, 0], [1, 0], [0, 1], [1, 1]], random_state=0,\n cluster_std=0.1, shuffle=False, n_samples=80)\n', (35393, 35501), False, 'from sklearn.datasets import make_blobs\n'), ((35651, 35685), 'numpy.ones', 'np.ones', (['X.shape[0]'], {'dtype': 'np.bool'}), '(X.shape[0], dtype=np.bool)\n', (35658, 35685), True, 'import numpy as np\n'), ((36704, 36751), 'sklearn.utils._testing.assert_allclose', 'assert_allclose', (['test_cv_scores', '[1, 1.0 / 3.0]'], {}), '(test_cv_scores, [1, 1.0 / 3.0])\n', (36719, 36751), False, 'from sklearn.utils._testing import assert_allclose\n'), ((36754, 36794), 'sklearn.utils._testing.assert_allclose', 'assert_allclose', (['train_cv_scores', '[1, 1]'], {}), '(train_cv_scores, [1, 1])\n', (36769, 36794), False, 'from sklearn.utils._testing import assert_allclose\n'), ((37192, 37256), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(50)', 'n_features': '(4)', 'random_state': '(42)'}), '(n_samples=50, n_features=4, random_state=42)\n', (37211, 37256), False, 'from sklearn.datasets import make_classification\n'), ((37954, 38018), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(50)', 'n_features': '(4)', 'random_state': '(42)'}), '(n_samples=50, n_features=4, random_state=42)\n', (37973, 38018), False, 'from sklearn.datasets import make_classification\n'), ((41100, 41156), 'sklearn.datasets.make_blobs', 'make_blobs', ([], {'n_samples': '(100)', 'n_features': '(4)', 'random_state': '(42)'}), '(n_samples=100, n_features=4, random_state=42)\n', (41110, 41156), False, 'from sklearn.datasets import make_blobs\n'), ((41616, 41657), 'sklearn.datasets.make_blobs', 'make_blobs', ([], {'n_samples': '(50)', 'random_state': '(42)'}), '(n_samples=50, random_state=42)\n', (41626, 41657), False, 'from sklearn.datasets import make_blobs\n'), ((43260, 43267), 'sklearn.model_selection.KFold', 'KFold', ([], {}), '()\n', (43265, 43267), False, 'from sklearn.model_selection import KFold\n'), ((43578, 43603), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (43587, 43603), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((43673, 43726), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['svc', "{'C': [0, 1]}"], {'cv': '(2)', 'error_score': '(0)'}), "(svc, {'C': [0, 1]}, cv=2, error_score=0)\n", (43685, 43726), False, 'from sklearn.model_selection import GridSearchCV\n'), ((43736, 43805), 'sklearn.model_selection.RandomizedSearchCV', 'RandomizedSearchCV', (['svc', "{'C': [0, 1]}"], {'cv': '(2)', 'error_score': '(0)', 'n_iter': '(2)'}), "(svc, {'C': [0, 1]}, cv=2, error_score=0, n_iter=2)\n", (43754, 43805), False, 'from sklearn.model_selection import RandomizedSearchCV\n'), ((44646, 44671), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (44655, 44671), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((44683, 44720), 'sklearn.datasets.make_blobs', 'make_blobs', ([], {'random_state': '(0)', 'centers': '(2)'}), '(random_state=0, centers=2)\n', (44693, 44720), False, 'from sklearn.datasets import make_blobs\n'), ((46207, 46242), 'sklearn.datasets.make_classification', 'make_classification', ([], {'random_state': '(0)'}), '(random_state=0)\n', (46226, 46242), False, 'from sklearn.datasets import make_classification\n'), ((46252, 46269), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {}), '()\n', (46267, 46269), False, 'from sklearn.model_selection import StratifiedKFold\n'), ((46280, 46305), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (46289, 46305), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((46319, 46346), 'sklearn.metrics.make_scorer', 'make_scorer', (['accuracy_score'], {}), '(accuracy_score)\n', (46330, 46346), False, 'from sklearn.metrics import make_scorer\n'), ((47027, 47240), 'sklearn.utils._testing.assert_raise_message', 'assert_raise_message', (['ValueError', '"""For evaluating multiple scores, use sklearn.model_selection.cross_validate instead."""', 'fit_grid_point', 'X', 'y', 'svc', 'params', 'train', 'test', "{'score': scorer}"], {'verbose': '(True)'}), "(ValueError,\n 'For evaluating multiple scores, use sklearn.model_selection.cross_validate instead.'\n , fit_grid_point, X, y, svc, params, train, test, {'score': scorer},\n verbose=True)\n", (47047, 47240), False, 'from sklearn.utils._testing import assert_raise_message\n'), ((47447, 47482), 'sklearn.datasets.make_classification', 'make_classification', ([], {'random_state': '(0)'}), '(random_state=0)\n', (47466, 47482), False, 'from sklearn.datasets import make_classification\n'), ((47493, 47518), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (47502, 47518), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((47532, 47559), 'sklearn.metrics.make_scorer', 'make_scorer', (['accuracy_score'], {}), '(accuracy_score)\n', (47543, 47559), False, 'from sklearn.metrics import make_scorer\n'), ((47985, 48046), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'foo_param': [1, 2, 3]}"], {'refit': '(True)', 'cv': '(3)'}), "(clf, {'foo_param': [1, 2, 3]}, refit=True, cv=3)\n", (47997, 48046), False, 'from sklearn.model_selection import GridSearchCV\n'), ((48276, 48353), 'sklearn.model_selection.RandomizedSearchCV', 'RandomizedSearchCV', (['clf', "{'foo_param': [1, 2, 3]}"], {'refit': '(True)', 'n_iter': '(3)', 'cv': '(3)'}), "(clf, {'foo_param': [1, 2, 3]}, refit=True, n_iter=3, cv=3)\n", (48294, 48353), False, 'from sklearn.model_selection import RandomizedSearchCV\n'), ((48717, 48786), 'sklearn.datasets.make_multilabel_classification', 'make_multilabel_classification', ([], {'return_indicator': '(True)', 'random_state': '(0)'}), '(return_indicator=True, random_state=0)\n', (48747, 48786), False, 'from sklearn.datasets import make_multilabel_classification\n'), ((48888, 48895), 'sklearn.model_selection.KFold', 'KFold', ([], {}), '()\n', (48893, 48895), False, 'from sklearn.model_selection import KFold\n'), ((50533, 50555), 'sklearn.svm.SVC', 'SVC', ([], {'probability': '(False)'}), '(probability=False)\n', (50536, 50555), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((51707, 51771), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(20)', 'n_features': '(10)', 'random_state': '(0)'}), '(n_samples=20, n_features=10, random_state=0)\n', (51726, 51771), False, 'from sklearn.datasets import make_classification\n'), ((52133, 52233), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "[{'parameter': [0, 1, 2]}]"], {'scoring': '"""accuracy"""', 'refit': '(False)', 'error_score': '(0.0)'}), "(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy', refit=\n False, error_score=0.0)\n", (52145, 52233), False, 'from sklearn.model_selection import GridSearchCV\n'), ((52255, 52299), 'sklearn.utils._testing.assert_warns', 'assert_warns', (['FitFailedWarning', 'gs.fit', 'X', 'y'], {}), '(FitFailedWarning, gs.fit, X, y)\n', (52267, 52299), False, 'from sklearn.utils._testing import assert_warns\n'), ((52983, 53027), 'sklearn.utils._testing.assert_warns', 'assert_warns', (['FitFailedWarning', 'gs.fit', 'X', 'y'], {}), '(FitFailedWarning, gs.fit, X, y)\n', (52995, 53027), False, 'from sklearn.utils._testing import assert_warns\n'), ((53719, 53783), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(20)', 'n_features': '(10)', 'random_state': '(0)'}), '(n_samples=20, n_features=10, random_state=0)\n', (53738, 53783), False, 'from sklearn.datasets import make_classification\n'), ((53905, 54009), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "[{'parameter': [0, 1, 2]}]"], {'scoring': '"""accuracy"""', 'refit': '(False)', 'error_score': '"""raise"""'}), "(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy', refit=\n False, error_score='raise')\n", (53917, 54009), False, 'from sklearn.model_selection import GridSearchCV\n'), ((54105, 54144), 'sklearn.utils._testing.assert_raises', 'assert_raises', (['ValueError', 'gs.fit', 'X', 'y'], {}), '(ValueError, gs.fit, X, y)\n', (54118, 54144), False, 'from sklearn.utils._testing import assert_raises\n'), ((54375, 54409), 'sklearn.model_selection.ParameterSampler', 'ParameterSampler', (['params'], {'n_iter': '(9)'}), '(params, n_iter=9)\n', (54391, 54409), False, 'from sklearn.model_selection import ParameterSampler\n'), ((54711, 54777), 'sklearn.utils._testing.assert_warns_message', 'assert_warns_message', (['UserWarning', 'expected_warning', 'list', 'sampler'], {}), '(UserWarning, expected_warning, list, sampler)\n', (54731, 54777), False, 'from sklearn.utils._testing import assert_warns_message\n'), ((54884, 54918), 'sklearn.model_selection.ParameterSampler', 'ParameterSampler', (['params'], {'n_iter': '(8)'}), '(params, n_iter=8)\n', (54900, 54918), False, 'from sklearn.model_selection import ParameterSampler\n'), ((54994, 55015), 'sklearn.model_selection.ParameterGrid', 'ParameterGrid', (['params'], {}), '(params)\n', (55007, 55015), False, 'from sklearn.model_selection import ParameterGrid\n'), ((55183, 55235), 'sklearn.model_selection.ParameterSampler', 'ParameterSampler', (['params'], {'n_iter': '(99)', 'random_state': '(42)'}), '(params, n_iter=99, random_state=42)\n', (55199, 55235), False, 'from sklearn.model_selection import ParameterSampler\n'), ((55573, 55620), 'sklearn.model_selection.ParameterSampler', 'ParameterSampler', (['params_distribution'], {'n_iter': '(7)'}), '(params_distribution, n_iter=7)\n', (55589, 55620), False, 'from sklearn.model_selection import ParameterSampler\n'), ((56777, 56802), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (56786, 56802), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((56813, 56866), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf'], {'param_grid': "{'C': [0.1, 0.2]}", 'cv': '(3)'}), "(clf, param_grid={'C': [0.1, 0.2]}, cv=3)\n", (56825, 56866), False, 'from sklearn.model_selection import GridSearchCV\n'), ((57046, 57102), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': 'n_samples', 'random_state': '(0)'}), '(n_samples=n_samples, random_state=0)\n', (57065, 57102), False, 'from sklearn.datasets import make_classification\n'), ((60560, 60620), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'foo_param': [1, 2, 3]}"], {'cv': '(3)', 'verbose': '(3)'}), "(clf, {'foo_param': [1, 2, 3]}, cv=3, verbose=3)\n", (60572, 60620), False, 'from sklearn.model_selection import GridSearchCV\n'), ((60727, 60762), 'sklearn.utils._testing.assert_array_equal', 'assert_array_equal', (['X', 'X_round_trip'], {}), '(X, X_round_trip)\n', (60745, 60762), False, 'from sklearn.utils._testing import assert_array_equal\n'), ((62223, 62260), 'sklearn.tree.DecisionTreeRegressor', 'DecisionTreeRegressor', ([], {'random_state': '(0)'}), '(random_state=0)\n', (62244, 62260), False, 'from sklearn.tree import DecisionTreeRegressor\n'), ((62272, 62339), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(100)', 'n_informative': '(4)', 'random_state': '(0)'}), '(n_samples=100, n_informative=4, random_state=0)\n', (62291, 62339), False, 'from sklearn.datasets import make_classification\n'), ((65082, 65124), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_features': 'n_features'}), '(n_features=n_features)\n', (65101, 65124), False, 'from sklearn.datasets import make_classification\n'), ((65136, 65168), 'sklearn.ensemble.HistGradientBoostingClassifier', 'HistGradientBoostingClassifier', ([], {}), '()\n', (65166, 65168), False, 'from sklearn.ensemble import HistGradientBoostingClassifier\n'), ((65216, 65246), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['gbdt', 'param_grid'], {}), '(gbdt, param_grid)\n', (65228, 65246), False, 'from sklearn.model_selection import GridSearchCV\n'), ((65256, 65302), 'sklearn.model_selection.RandomizedSearchCV', 'RandomizedSearchCV', (['gbdt', 'param_grid'], {'n_iter': '(1)'}), '(gbdt, param_grid, n_iter=1)\n', (65274, 65302), False, 'from sklearn.model_selection import RandomizedSearchCV\n'), ((65849, 65864), 'sklearn.base.BaseEstimator', 'BaseEstimator', ([], {}), '()\n', (65862, 65864), False, 'from sklearn.base import BaseEstimator, ClassifierMixin\n'), ((66512, 66568), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': 'n_samples', 'random_state': '(0)'}), '(n_samples=n_samples, random_state=0)\n', (66531, 66568), False, 'from sklearn.datasets import make_classification\n'), ((66673, 66695), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {}), '()\n', (66693, 66695), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((66705, 66748), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', 'grid_params'], {'cv': 'n_splits'}), '(clf, grid_params, cv=n_splits)\n', (66717, 66748), False, 'from sklearn.model_selection import GridSearchCV\n'), ((66889, 66911), 'sklearn.metrics.pairwise.euclidean_distances', 'euclidean_distances', (['X'], {}), '(X)\n', (66908, 66911), False, 'from sklearn.metrics.pairwise import euclidean_distances\n'), ((66922, 66964), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {'metric': '"""precomputed"""'}), "(metric='precomputed')\n", (66942, 66964), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((66974, 67017), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', 'grid_params'], {'cv': 'n_splits'}), '(clf, grid_params, cv=n_splits)\n', (66986, 67017), False, 'from sklearn.model_selection import GridSearchCV\n'), ((67924, 67960), 'sklearn.datasets.make_classification', 'make_classification', ([], {'random_state': '(42)'}), '(random_state=42)\n', (67943, 67960), False, 'from sklearn.datasets import make_classification\n'), ((3263, 3275), 'numpy.unique', 'np.unique', (['Y'], {}), '(Y)\n', (3272, 3275), True, 'import numpy as np\n'), ((4781, 4827), 'pytest.raises', 'pytest.raises', (['error_type'], {'match': 'error_message'}), '(error_type, match=error_message)\n', (4794, 4827), False, 'import pytest\n'), ((4345, 4381), 'functools.partial', 'partial', (['ParameterSampler'], {'n_iter': '(10)'}), '(ParameterSampler, n_iter=10)\n', (4352, 4381), False, 'from functools import partial\n'), ((8480, 8524), 'pytest.raises', 'pytest.raises', (['AssertionError'], {'match': 'err_msg'}), '(AssertionError, match=err_msg)\n', (8493, 8524), False, 'import pytest\n'), ((8639, 8683), 'pytest.raises', 'pytest.raises', (['AssertionError'], {'match': 'err_msg'}), '(AssertionError, match=err_msg)\n', (8652, 8683), False, 'import pytest\n'), ((11460, 11478), 'sklearn.model_selection.LeaveOneGroupOut', 'LeaveOneGroupOut', ([], {}), '()\n', (11476, 11478), False, 'from sklearn.model_selection import LeaveOneGroupOut\n'), ((11480, 11498), 'sklearn.model_selection.LeavePGroupsOut', 'LeavePGroupsOut', (['(2)'], {}), '(2)\n', (11495, 11498), False, 'from sklearn.model_selection import LeavePGroupsOut\n'), ((11517, 11539), 'sklearn.model_selection.GroupKFold', 'GroupKFold', ([], {'n_splits': '(3)'}), '(n_splits=3)\n', (11527, 11539), False, 'from sklearn.model_selection import GroupKFold\n'), ((11541, 11560), 'sklearn.model_selection.GroupShuffleSplit', 'GroupShuffleSplit', ([], {}), '()\n', (11558, 11560), False, 'from sklearn.model_selection import GroupShuffleSplit\n'), ((11600, 11630), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', 'grid'], {'cv': 'cv'}), '(clf, grid, cv=cv)\n', (11612, 11630), False, 'from sklearn.model_selection import GridSearchCV\n'), ((11639, 11735), 'sklearn.utils._testing.assert_raise_message', 'assert_raise_message', (['ValueError', '"""The \'groups\' parameter should not be None."""', 'gs.fit', 'X', 'y'], {}), '(ValueError,\n "The \'groups\' parameter should not be None.", gs.fit, X, y)\n', (11659, 11735), False, 'from sklearn.utils._testing import assert_raise_message\n'), ((11848, 11865), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {}), '()\n', (11863, 11865), False, 'from sklearn.model_selection import StratifiedKFold\n'), ((11867, 11891), 'sklearn.model_selection.StratifiedShuffleSplit', 'StratifiedShuffleSplit', ([], {}), '()\n', (11889, 11891), False, 'from sklearn.model_selection import StratifiedShuffleSplit\n'), ((11935, 11965), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', 'grid'], {'cv': 'cv'}), '(clf, grid, cv=cv)\n', (11947, 11965), False, 'from sklearn.model_selection import GridSearchCV\n'), ((12250, 12275), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (12259, 12275), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((12512, 12519), 'sklearn.linear_model.Ridge', 'Ridge', ([], {}), '()\n', (12517, 12519), False, 'from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression\n'), ((12726, 12751), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (12735, 12751), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((12920, 12945), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (12929, 12945), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((13688, 13750), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'foo_param': [1, 2, 3]}"], {'refit': '(False)', 'cv': '(3)'}), "(clf, {'foo_param': [1, 2, 3]}, refit=False, cv=3)\n", (13700, 13750), False, 'from sklearn.model_selection import GridSearchCV\n'), ((17052, 17067), 'numpy.ones', 'np.ones', (['(3, 2)'], {}), '((3, 2))\n', (17059, 17067), True, 'import numpy as np\n'), ((17704, 17730), 'numpy.mean', 'np.mean', (['(y_pred == y_pred2)'], {}), '(y_pred == y_pred2)\n', (17711, 17730), True, 'import numpy as np\n'), ((19554, 19579), 'numpy.mean', 'np.mean', (['(y_pred == y_test)'], {}), '(y_pred == y_test)\n', (19561, 19579), True, 'import numpy as np\n'), ((20428, 20448), 'numpy.zeros', 'np.zeros', (['X.shape[0]'], {}), '(X.shape[0])\n', (20436, 20448), True, 'import numpy as np\n'), ((21283, 21348), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(100)', 'n_features': '(4)', 'random_state': '(42)'}), '(n_samples=100, n_features=4, random_state=42)\n', (21302, 21348), False, 'from sklearn.datasets import make_classification\n'), ((21965, 21991), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(42)'}), '(random_state=42)\n', (21974, 21991), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((22695, 22721), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(42)'}), '(random_state=42)\n', (22704, 22721), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((22828, 22900), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""best_index_ returned is not an integer"""'}), "(TypeError, match='best_index_ returned is not an integer')\n", (22841, 22900), False, 'import pytest\n'), ((23561, 23587), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(42)'}), '(random_state=42)\n', (23570, 23587), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((23688, 23753), 'pytest.raises', 'pytest.raises', (['IndexError'], {'match': '"""best_index_ index out of range"""'}), "(IndexError, match='best_index_ index out of range')\n", (23701, 23753), False, 'import pytest\n'), ((24332, 24359), 'sklearn.metrics.make_scorer', 'make_scorer', (['accuracy_score'], {}), '(accuracy_score)\n', (24343, 24359), False, 'from sklearn.metrics import make_scorer\n'), ((24405, 24431), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(42)'}), '(random_state=42)\n', (24414, 24431), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((26743, 26801), 'sklearn.utils._mocking.CheckingClassifier', 'CheckingClassifier', ([], {'check_X': 'check_df', 'check_y': 'check_series'}), '(check_X=check_df, check_y=check_series)\n', (26761, 26801), False, 'from sklearn.utils._mocking import CheckingClassifier, MockDataFrame\n'), ((26825, 26868), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'foo_param': [1, 2, 3]}"], {}), "(clf, {'foo_param': [1, 2, 3]})\n", (26837, 26868), False, 'from sklearn.model_selection import GridSearchCV\n'), ((28495, 28510), 'sklearn.neighbors.KernelDensity', 'KernelDensity', ([], {}), '()\n', (28508, 28510), False, 'from sklearn.neighbors import KernelDensity\n'), ((28894, 28907), 'scipy.stats.uniform', 'uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (28901, 28907), False, 'from scipy.stats import bernoulli, expon, uniform\n'), ((29633, 29721), 'sklearn.model_selection.ParameterSampler', 'ParameterSampler', ([], {'param_distributions': 'param_distributions', 'n_iter': '(10)', 'random_state': '(0)'}), '(param_distributions=param_distributions, n_iter=10,\n random_state=0)\n', (29649, 29721), False, 'from sklearn.model_selection import ParameterSampler\n'), ((31818, 31823), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (31821, 31823), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((34001, 34006), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (34004, 34006), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((35961, 35966), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (35964, 35966), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((37074, 37090), 'pytest.approx', 'pytest.approx', (['(1)'], {}), '(1)\n', (37087, 37090), False, 'import pytest\n'), ((37115, 37131), 'pytest.approx', 'pytest.approx', (['(0)'], {}), '(0)\n', (37128, 37131), False, 'import pytest\n'), ((39745, 39778), 're.sub', 're.sub', (['"""_score$"""', '"""_accuracy"""', 'k'], {}), "('_score$', '_accuracy', k)\n", (39751, 39778), False, 'import re\n'), ((41854, 41859), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (41857, 41859), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((41979, 41984), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (41982, 41984), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((42391, 42483), 'sklearn.utils._testing.assert_almost_equal', 'assert_almost_equal', (["cv_results['mean_test_score'][0]", "cv_results['mean_test_score'][1]"], {}), "(cv_results['mean_test_score'][0], cv_results[\n 'mean_test_score'][1])\n", (42410, 42483), False, 'from sklearn.utils._testing import assert_almost_equal\n'), ((42515, 42609), 'sklearn.utils._testing.assert_almost_equal', 'assert_almost_equal', (["cv_results['mean_train_score'][0]", "cv_results['mean_train_score'][1]"], {}), "(cv_results['mean_train_score'][0], cv_results[\n 'mean_train_score'][1])\n", (42534, 42609), False, 'from sklearn.utils._testing import assert_almost_equal\n'), ((42966, 43035), 'sklearn.utils._testing.assert_almost_equal', 'assert_almost_equal', (["search.cv_results_['rank_test_score']", '[1, 1, 3]'], {}), "(search.cv_results_['rank_test_score'], [1, 1, 3])\n", (42985, 43035), False, 'from sklearn.utils._testing import assert_almost_equal\n'), ((43151, 43174), 'sklearn.tree.DecisionTreeRegressor', 'DecisionTreeRegressor', ([], {}), '()\n', (43172, 43174), False, 'from sklearn.tree import DecisionTreeRegressor\n'), ((43176, 43200), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {}), '()\n', (43198, 43200), False, 'from sklearn.tree import DecisionTreeClassifier\n'), ((43414, 43490), 'sklearn.utils._testing.assert_array_equal', 'assert_array_equal', (["grid_search.cv_results_['param_random_state']", '[0, None]'], {}), "(grid_search.cv_results_['param_random_state'], [0, None])\n", (43432, 43490), False, 'from sklearn.utils._testing import assert_array_equal\n'), ((44800, 44856), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', "{'C': Cs}"], {'scoring': 'score', 'cv': 'n_splits'}), "(clf, {'C': Cs}, scoring=score, cv=n_splits)\n", (44812, 44856), False, 'from sklearn.model_selection import GridSearchCV\n'), ((45243, 45277), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {'n_splits': 'n_splits'}), '(n_splits=n_splits)\n', (45258, 45277), False, 'from sklearn.model_selection import StratifiedKFold\n'), ((47757, 47795), 'pytest.warns', 'pytest.warns', (['FutureWarning'], {'match': 'msg'}), '(FutureWarning, match=msg)\n', (47769, 47795), False, 'import pytest\n'), ((47805, 47874), 'sklearn.model_selection.fit_grid_point', 'fit_grid_point', (['X', 'y', 'svc', 'params', 'train', 'test', 'scorer'], {'verbose': '(False)'}), '(X, y, svc, params, train, test, scorer, verbose=False)\n', (47819, 47874), False, 'from sklearn.model_selection import fit_grid_point\n'), ((48112, 48137), 'pickle.dumps', 'pickle.dumps', (['grid_search'], {}), '(grid_search)\n', (48124, 48137), False, 'import pickle\n'), ((48462, 48489), 'pickle.dumps', 'pickle.dumps', (['random_search'], {}), '(random_search)\n', (48474, 48489), False, 'import pickle\n'), ((48915, 48952), 'sklearn.tree.DecisionTreeRegressor', 'DecisionTreeRegressor', ([], {'random_state': '(0)'}), '(random_state=0)\n', (48936, 48952), False, 'from sklearn.tree import DecisionTreeRegressor\n'), ((48972, 49010), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {'random_state': '(0)'}), '(random_state=0)\n', (48994, 49010), False, 'from sklearn.tree import DecisionTreeClassifier\n'), ((49093, 49133), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['est', 'est_parameters'], {'cv': 'cv'}), '(est, est_parameters, cv=cv)\n', (49105, 49133), False, 'from sklearn.model_selection import GridSearchCV\n'), ((49721, 49777), 'sklearn.model_selection.RandomizedSearchCV', 'RandomizedSearchCV', (['est', 'est_parameters'], {'cv': 'cv', 'n_iter': '(3)'}), '(est, est_parameters, cv=cv, n_iter=3)\n', (49739, 49777), False, 'from sklearn.model_selection import RandomizedSearchCV\n'), ((51455, 51475), 'numpy.zeros', 'np.zeros', (['X.shape[0]'], {}), '(X.shape[0])\n', (51463, 51475), True, 'import numpy as np\n'), ((55517, 55531), 'scipy.stats.bernoulli', 'bernoulli', (['(0.5)'], {}), '(0.5)\n', (55526, 55531), False, 'from scipy.stats import bernoulli, expon, uniform\n'), ((57126, 57151), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (57135, 57151), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((57421, 57446), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (57430, 57446), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((57819, 57844), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (57828, 57844), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((58120, 58145), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (58129, 58145), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((59502, 59527), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(0)'}), '(random_state=0)\n', (59511, 59527), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((60250, 60317), 'sklearn.utils._testing.assert_array_almost_equal', 'assert_array_almost_equal', (['per_param_scores[0]', 'per_param_scores[1]'], {}), '(per_param_scores[0], per_param_scores[1])\n', (60275, 60317), False, 'from sklearn.utils._testing import assert_array_almost_equal\n'), ((60360, 60427), 'sklearn.utils._testing.assert_array_almost_equal', 'assert_array_almost_equal', (['per_param_scores[2]', 'per_param_scores[3]'], {}), '(per_param_scores[2], per_param_scores[3])\n', (60385, 60427), False, 'from sklearn.utils._testing import assert_array_almost_equal\n'), ((63418, 63490), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {'match': '"""_run_search not implemented."""'}), "(NotImplementedError, match='_run_search not implemented.')\n", (63431, 63490), False, 'import pytest\n'), ((63874, 63881), 'sklearn.linear_model.Ridge', 'Ridge', ([], {}), '()\n', (63879, 63881), False, 'from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression\n'), ((64011, 64136), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""No fits were performed. Was the CV iterator empty\\\\? Were there no candidates\\\\?"""'}), "(ValueError, match=\n 'No fits were performed. Was the CV iterator empty\\\\? Were there no candidates\\\\?'\n )\n", (64024, 64136), False, 'import pytest\n'), ((64526, 64533), 'sklearn.linear_model.Ridge', 'Ridge', ([], {}), '()\n', (64531, 64533), False, 'from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression\n'), ((64663, 64798), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""cv.split and cv.get_n_splits returned inconsistent results. Expected \\\\d+ splits, got \\\\d+"""'}), "(ValueError, match=\n 'cv.split and cv.get_n_splits returned inconsistent results. Expected \\\\d+ splits, got \\\\d+'\n )\n", (64676, 64798), False, 'import pytest\n'), ((66050, 66090), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['est', "{'n_neighbors': [10]}"], {}), "(est, {'n_neighbors': [10]})\n", (66062, 66090), False, 'from sklearn.model_selection import GridSearchCV\n'), ((7209, 7227), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (7225, 7227), False, 'from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression\n'), ((7229, 7236), 'sklearn.linear_model.Ridge', 'Ridge', ([], {}), '()\n', (7234, 7236), False, 'from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression\n'), ((8059, 8073), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (8068, 8073), True, 'import numpy as np\n'), ((8776, 8787), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (8783, 8787), True, 'import numpy as np\n'), ((8794, 8806), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (8802, 8806), True, 'import numpy as np\n'), ((10084, 10121), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', 'grid'], {'scoring': 'None'}), '(clf, grid, scoring=None)\n', (10096, 10121), False, 'from sklearn.model_selection import GridSearchCV\n'), ((10154, 10197), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', 'grid'], {'scoring': '"""accuracy"""'}), "(clf, grid, scoring='accuracy')\n", (10166, 10197), False, 'from sklearn.model_selection import GridSearchCV\n'), ((10419, 10461), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', 'grid'], {'scoring': '"""roc_auc"""'}), "(clf, grid, scoring='roc_auc')\n", (10431, 10461), False, 'from sklearn.model_selection import GridSearchCV\n'), ((12130, 12144), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (12139, 12144), True, 'import numpy as np\n'), ((18574, 18600), 'sklearn.metrics.f1_score', 'f1_score', (['y_true_', 'y_pred_'], {}), '(y_true_, y_pred_)\n', (18582, 18600), False, 'from sklearn.metrics import f1_score\n'), ((20627, 20641), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (20636, 20641), True, 'import numpy as np\n'), ((21411, 21437), 'sklearn.svm.LinearSVC', 'LinearSVC', ([], {'random_state': '(42)'}), '(random_state=42)\n', (21420, 21437), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((24755, 24780), 'numpy.arange', 'np.arange', (['(10 * 5 * 3 * 2)'], {}), '(10 * 5 * 3 * 2)\n', (24764, 24780), True, 'import numpy as np\n'), ((24813, 24835), 'numpy.arange', 'np.arange', (['(10 * 7 * 11)'], {}), '(10 * 7 * 11)\n', (24822, 24835), True, 'import numpy as np\n'), ((25277, 25291), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (25286, 25291), True, 'import numpy as np\n'), ((25713, 25727), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (25722, 25727), True, 'import numpy as np\n'), ((26372, 26386), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (26381, 26386), True, 'import numpy as np\n'), ((29600, 29613), 'scipy.stats.uniform', 'uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (29607, 29613), False, 'from scipy.stats import bernoulli, expon, uniform\n'), ((33295, 33310), 'scipy.stats.expon', 'expon', ([], {'scale': '(10)'}), '(scale=10)\n', (33300, 33310), False, 'from scipy.stats import bernoulli, expon, uniform\n'), ((33336, 33352), 'scipy.stats.expon', 'expon', ([], {'scale': '(0.1)'}), '(scale=0.1)\n', (33341, 33352), False, 'from scipy.stats import bernoulli, expon, uniform\n'), ((36867, 36890), 'numpy.mean', 'np.mean', (['test_cv_scores'], {}), '(test_cv_scores)\n', (36874, 36890), True, 'import numpy as np\n'), ((36929, 36951), 'numpy.std', 'np.std', (['test_cv_scores'], {}), '(test_cv_scores)\n', (36935, 36951), True, 'import numpy as np\n'), ((37451, 37478), 'sklearn.metrics.make_scorer', 'make_scorer', (['accuracy_score'], {}), '(accuracy_score)\n', (37462, 37478), False, 'from sklearn.metrics import make_scorer\n'), ((37511, 37536), 'sklearn.metrics.make_scorer', 'make_scorer', (['recall_score'], {}), '(recall_score)\n', (37522, 37536), False, 'from sklearn.metrics import make_scorer\n'), ((37617, 37622), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (37620, 37622), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((38156, 38177), 'numpy.logspace', 'np.logspace', (['(-4)', '(1)', '(3)'], {}), '(-4, 1, 3)\n', (38167, 38177), True, 'import numpy as np\n'), ((38203, 38234), 'numpy.logspace', 'np.logspace', (['(-5)', '(0)', '(3)'], {'base': '(0.1)'}), '(-5, 0, 3, base=0.1)\n', (38214, 38234), True, 'import numpy as np\n'), ((38635, 38680), 'sklearn.svm.SVC', 'SVC', ([], {'probability': 'probability', 'random_state': '(42)'}), '(probability=probability, random_state=42)\n', (38638, 38680), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((38709, 38845), 'sklearn.model_selection.RandomizedSearchCV', 'RandomizedSearchCV', (['clf'], {'n_iter': 'n_search_iter', 'cv': 'n_splits', 'param_distributions': 'params', 'scoring': 'scoring', 'refit': 'refit', 'random_state': '(0)'}), '(clf, n_iter=n_search_iter, cv=n_splits,\n param_distributions=params, scoring=scoring, refit=refit, random_state=0)\n', (38727, 38845), False, 'from sklearn.model_selection import RandomizedSearchCV\n'), ((39883, 39914), 're.sub', 're.sub', (['"""_score$"""', '"""_recall"""', 'k'], {}), "('_score$', '_recall', k)\n", (39889, 39914), False, 'import re\n'), ((40101, 40133), 'numpy.all', 'np.all', (['(cv_results_multi[k] <= 1)'], {}), '(cv_results_multi[k] <= 1)\n', (40107, 40133), True, 'import numpy as np\n'), ((42652, 42731), 'numpy.allclose', 'np.allclose', (["cv_results['mean_test_score'][1]", "cv_results['mean_test_score'][2]"], {}), "(cv_results['mean_test_score'][1], cv_results['mean_test_score'][2])\n", (42663, 42731), True, 'import numpy as np\n'), ((42782, 42868), 'numpy.allclose', 'np.allclose', (["cv_results['mean_train_score'][1]", "cv_results['mean_train_score'][2]"], {}), "(cv_results['mean_train_score'][1], cv_results[\n 'mean_train_score'][2])\n", (42793, 42868), True, 'import numpy as np\n'), ((44082, 44118), 'numpy.all', 'np.all', (['(search.cv_results_[key] >= 0)'], {}), '(search.cv_results_[key] >= 0)\n', (44088, 44118), True, 'import numpy as np\n'), ((44138, 44173), 'numpy.all', 'np.all', (['(search.cv_results_[key] < 1)'], {}), '(search.cv_results_[key] < 1)\n', (44144, 44173), True, 'import numpy as np\n'), ((44356, 44391), 'numpy.all', 'np.all', (['(search.cv_results_[key] < 1)'], {}), '(search.cv_results_[key] < 1)\n', (44362, 44391), True, 'import numpy as np\n'), ((45192, 45227), 'numpy.in1d', 'np.in1d', (['expected_keys', 'result_keys'], {}), '(expected_keys, result_keys)\n', (45199, 45227), True, 'import numpy as np\n'), ((46833, 46881), 'sklearn.utils._testing.assert_almost_equal', 'assert_almost_equal', (['this_scores', 'expected_score'], {}), '(this_scores, expected_score)\n', (46852, 46881), False, 'from sklearn.utils._testing import assert_almost_equal\n'), ((50470, 50483), 'numpy.arange', 'np.arange', (['(20)'], {}), '(20)\n', (50479, 50483), True, 'import numpy as np\n'), ((50565, 50592), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', '{}'], {'cv': '(2)'}), '(clf, {}, cv=2)\n', (50577, 50592), False, 'from sklearn.model_selection import GridSearchCV\n'), ((50736, 50767), 'numpy.arange', 'np.arange', (['(20)'], {'dtype': 'np.float64'}), '(20, dtype=np.float64)\n', (50745, 50767), True, 'import numpy as np\n'), ((50976, 51035), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['p', "{'classifier__foo_param': [1, 2, 3]}"], {'cv': '(2)'}), "(p, {'classifier__foo_param': [1, 2, 3]}, cv=2)\n", (50988, 51035), False, 'from sklearn.model_selection import GridSearchCV\n'), ((55894, 55907), 'numpy.arange', 'np.arange', (['(24)'], {}), '(24)\n', (55903, 55907), True, 'import numpy as np\n'), ((55983, 56010), 'sklearn.linear_model.SGDClassifier', 'SGDClassifier', ([], {'loss': '"""hinge"""'}), "(loss='hinge')\n", (55996, 56010), False, 'from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression\n'), ((56468, 56495), 'sklearn.linear_model.SGDClassifier', 'SGDClassifier', ([], {'loss': '"""hinge"""'}), "(loss='hinge')\n", (56481, 56495), False, 'from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression\n'), ((56712, 56724), 'numpy.arange', 'np.arange', (['(6)'], {}), '(6)\n', (56721, 56724), True, 'import numpy as np\n'), ((57235, 57290), 'sklearn.model_selection.tests.common.OneTimeSplitter', 'OneTimeSplitter', ([], {'n_splits': 'n_splits', 'n_samples': 'n_samples'}), '(n_splits=n_splits, n_samples=n_samples)\n', (57250, 57290), False, 'from sklearn.model_selection.tests.common import OneTimeSplitter\n'), ((57532, 57556), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'n_splits'}), '(n_splits=n_splits)\n', (57537, 57556), False, 'from sklearn.model_selection import KFold\n'), ((58231, 58285), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'n_splits', 'shuffle': '(True)', 'random_state': '(0)'}), '(n_splits=n_splits, shuffle=True, random_state=0)\n', (58236, 58285), False, 'from sklearn.model_selection import KFold\n'), ((59616, 59654), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'n_splits', 'shuffle': '(True)'}), '(n_splits=n_splits, shuffle=True)\n', (59621, 59654), False, 'from sklearn.model_selection import KFold\n'), ((63688, 63705), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': '(3)'}), '(n_splits=3)\n', (63693, 63705), False, 'from sklearn.model_selection import KFold\n'), ((68731, 68767), 'sklearn.datasets.make_classification', 'make_classification', ([], {'random_state': '(42)'}), '(random_state=42)\n', (68750, 68767), False, 'from sklearn.datasets import make_classification\n'), ((7155, 7173), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (7171, 7173), False, 'from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression\n'), ((8558, 8569), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (8565, 8569), True, 'import numpy as np\n'), ((8717, 8727), 'numpy.ones', 'np.ones', (['(1)'], {}), '(1)\n', (8724, 8727), True, 'import numpy as np\n'), ((8734, 8746), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (8742, 8746), True, 'import numpy as np\n'), ((14784, 14872), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', '{}'], {'refit': 'refit', 'scoring': "{'acc': 'accuracy', 'prec': 'precision'}"}), "(clf, {}, refit=refit, scoring={'acc': 'accuracy', 'prec':\n 'precision'})\n", (14796, 14872), False, 'from sklearn.model_selection import GridSearchCV\n'), ((43318, 43358), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['est', 'est_parameters'], {'cv': 'cv'}), '(est, est_parameters, cv=cv)\n', (43330, 43358), False, 'from sklearn.model_selection import GridSearchCV\n'), ((46002, 46050), 'sklearn.utils._testing.assert_almost_equal', 'assert_almost_equal', (['correct_score', 'cv_scores[i]'], {}), '(correct_score, cv_scores[i])\n', (46021, 46050), False, 'from sklearn.utils._testing import assert_almost_equal\n'), ((46543, 46553), 'sklearn.base.clone', 'clone', (['svc'], {}), '(svc)\n', (46548, 46553), False, 'from sklearn.base import clone\n'), ((47716, 47733), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {}), '()\n', (47731, 47733), False, 'from sklearn.model_selection import StratifiedKFold\n'), ((49498, 49596), 'sklearn.utils._testing.assert_almost_equal', 'assert_almost_equal', (['correct_score', "grid_search.cv_results_['split%d_test_score' % i][cand_i]"], {}), "(correct_score, grid_search.cv_results_[\n 'split%d_test_score' % i][cand_i])\n", (49517, 49596), False, 'from sklearn.utils._testing import assert_almost_equal\n'), ((50189, 50289), 'sklearn.utils._testing.assert_almost_equal', 'assert_almost_equal', (['correct_score', "random_search.cv_results_['split%d_test_score' % i][cand_i]"], {}), "(correct_score, random_search.cv_results_[\n 'split%d_test_score' % i][cand_i])\n", (50208, 50289), False, 'from sklearn.utils._testing import assert_almost_equal\n'), ((50867, 50920), 'sklearn.impute.SimpleImputer', 'SimpleImputer', ([], {'strategy': '"""mean"""', 'missing_values': 'np.nan'}), "(strategy='mean', missing_values=np.nan)\n", (50880, 50920), False, 'from sklearn.impute import SimpleImputer\n'), ((57663, 57717), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'n_splits', 'shuffle': '(True)', 'random_state': '(0)'}), '(n_splits=n_splits, shuffle=True, random_state=0)\n', (57668, 57717), False, 'from sklearn.model_selection import KFold\n'), ((61082, 61107), 'numpy.asanyarray', 'np.asanyarray', (['results[k]'], {}), '(results[k])\n', (61095, 61107), True, 'import numpy as np\n'), ((61481, 61535), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', 'param_grid'], {'return_train_score': '(True)'}), '(clf, param_grid, return_train_score=True)\n', (61493, 61535), False, 'from sklearn.model_selection import GridSearchCV\n'), ((63254, 63259), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (63257, 63259), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((67370, 67383), 'scipy.stats.uniform', 'uniform', (['(1)', '(3)'], {}), '(1, 3)\n', (67377, 67383), False, 'from scipy.stats import bernoulli, expon, uniform\n'), ((68168, 68186), 'scipy.stats.uniform', 'uniform', (['(0.01)', '(0.1)'], {}), '(0.01, 0.1)\n', (68175, 68186), False, 'from scipy.stats import bernoulli, expon, uniform\n'), ((35695, 35711), 'numpy.where', 'np.where', (['(y == 1)'], {}), '(y == 1)\n', (35703, 35711), True, 'import numpy as np\n'), ((35734, 35750), 'numpy.where', 'np.where', (['(y == 2)'], {}), '(y == 2)\n', (35742, 35750), True, 'import numpy as np\n'), ((46634, 46644), 'sklearn.base.clone', 'clone', (['svc'], {}), '(svc)\n', (46639, 46644), False, 'from sklearn.base import clone\n'), ((57930, 57984), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'n_splits', 'shuffle': '(True)', 'random_state': '(0)'}), '(n_splits=n_splits, shuffle=True, random_state=0)\n', (57935, 57984), False, 'from sklearn.model_selection import KFold\n'), ((61177, 61248), 'sklearn.utils._testing.assert_array_equal', 'assert_array_equal', (['exp_results[k]', 'results[k]'], {'err_msg': "('Checking ' + k)"}), "(exp_results[k], results[k], err_msg='Checking ' + k)\n", (61195, 61248), False, 'from sklearn.utils._testing import assert_array_equal\n'), ((61330, 61398), 'sklearn.utils._testing.assert_allclose', 'assert_allclose', (['exp_results[k]', 'results[k]'], {'err_msg': "('Checking ' + k)"}), "(exp_results[k], results[k], err_msg='Checking ' + k)\n", (61345, 61398), False, 'from sklearn.utils._testing import assert_allclose\n'), ((63585, 63590), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (63588, 63590), False, 'from sklearn.svm import LinearSVC, SVC\n'), ((5621, 5660), 'itertools.product', 'product', (["params2['bar']", "params2['foo']"], {}), "(params2['bar'], params2['foo'])\n", (5628, 5660), False, 'from itertools import chain, product\n'), ((45958, 45985), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['y[test]', 'dec'], {}), '(y[test], dec)\n', (45971, 45985), False, 'from sklearn.metrics import roc_auc_score\n')] |
bornhack/bornhack-website | src/economy/migrations/0027_zettlebalance_zettlereceipt.py | 40ed0875f5129a4c8ae1887e33e7dedb4981dadc | # Generated by Django 3.2.7 on 2021-09-13 03:52
import uuid
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("economy", "0026_alter_clearhaussettlement_options"),
]
operations = [
migrations.CreateModel(
name="ZettleBalance",
fields=[
(
"uuid",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("created", models.DateTimeField(auto_now_add=True)),
("updated", models.DateTimeField(auto_now=True)),
(
"statement_time",
models.DateTimeField(
help_text="The date and time this movement was added to the account statement."
),
),
(
"payment_time",
models.DateTimeField(
blank=True,
help_text="The date and time this payment was made. Can be empty if this transaction is not a customer payment.",
null=True,
),
),
(
"payment_reference",
models.IntegerField(
blank=True,
help_text="The reference for this payment. Can be empty if this transaction is not a customer payment.",
null=True,
),
),
(
"description",
models.CharField(
help_text="The description of this transaction.", max_length=100
),
),
(
"amount",
models.DecimalField(
decimal_places=2,
help_text="The amount of this transaction",
max_digits=12,
),
),
(
"balance",
models.DecimalField(
decimal_places=2,
help_text="Our balance in Zettles systems after this transaction.",
max_digits=12,
),
),
],
options={
"ordering": ["-statement_time"],
"get_latest_by": ["statement_time"],
},
),
migrations.CreateModel(
name="ZettleReceipt",
fields=[
(
"uuid",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("created", models.DateTimeField(auto_now_add=True)),
("updated", models.DateTimeField(auto_now=True)),
(
"zettle_created",
models.DateTimeField(
help_text="The date and time this receipt was created in Zettles end"
),
),
(
"receipt_number",
models.IntegerField(help_text="The Zettle receipt number."),
),
(
"vat",
models.DecimalField(
decimal_places=2,
help_text="The part of the total amount which is VAT",
max_digits=12,
),
),
(
"total",
models.DecimalField(
decimal_places=2,
help_text="The total amount the customer paid",
max_digits=12,
),
),
(
"fee",
models.DecimalField(
decimal_places=2,
help_text="The payment fee BornHack has to pay to receive this payment",
max_digits=12,
),
),
(
"net",
models.DecimalField(
decimal_places=2,
help_text="The part of the payment which goes to BornHack after fees have been substracted.",
max_digits=12,
),
),
(
"payment_method",
models.CharField(help_text="The payment method", max_length=100),
),
(
"card_issuer",
models.CharField(
blank=True,
help_text="The card issuer. Can be empty if this was not a card payment.",
max_length=100,
null=True,
),
),
(
"staff",
models.CharField(
help_text="The Zettle account which was used to make this sale.",
max_length=100,
),
),
(
"description",
models.CharField(
help_text="The description of this transaction.", max_length=255
),
),
("sold_via", models.CharField(help_text="Always POS?", max_length=100)),
],
options={
"ordering": ["-zettle_created"],
"get_latest_by": ["zettle_created"],
},
),
]
| [((408, 499), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (424, 499), False, 'from django.db import migrations, models\n'), ((663, 702), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (683, 702), False, 'from django.db import migrations, models\n'), ((733, 768), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (753, 768), False, 'from django.db import migrations, models\n'), ((847, 953), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'help_text': '"""The date and time this movement was added to the account statement."""'}), "(help_text=\n 'The date and time this movement was added to the account statement.')\n", (867, 953), False, 'from django.db import migrations, models\n'), ((1089, 1256), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'help_text': '"""The date and time this payment was made. Can be empty if this transaction is not a customer payment."""', 'null': '(True)'}), "(blank=True, help_text=\n 'The date and time this payment was made. Can be empty if this transaction is not a customer payment.'\n , null=True)\n", (1109, 1256), False, 'from django.db import migrations, models\n'), ((1441, 1598), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'help_text': '"""The reference for this payment. Can be empty if this transaction is not a customer payment."""', 'null': '(True)'}), "(blank=True, help_text=\n 'The reference for this payment. Can be empty if this transaction is not a customer payment.'\n , null=True)\n", (1460, 1598), False, 'from django.db import migrations, models\n'), ((1777, 1863), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""The description of this transaction."""', 'max_length': '(100)'}), "(help_text='The description of this transaction.',\n max_length=100)\n", (1793, 1863), False, 'from django.db import migrations, models\n'), ((1994, 2095), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'help_text': '"""The amount of this transaction"""', 'max_digits': '(12)'}), "(decimal_places=2, help_text=\n 'The amount of this transaction', max_digits=12)\n", (2013, 2095), False, 'from django.db import migrations, models\n'), ((2275, 2400), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'help_text': '"""Our balance in Zettles systems after this transaction."""', 'max_digits': '(12)'}), "(decimal_places=2, help_text=\n 'Our balance in Zettles systems after this transaction.', max_digits=12)\n", (2294, 2400), False, 'from django.db import migrations, models\n'), ((2829, 2920), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (2845, 2920), False, 'from django.db import migrations, models\n'), ((3084, 3123), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3104, 3123), False, 'from django.db import migrations, models\n'), ((3154, 3189), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (3174, 3189), False, 'from django.db import migrations, models\n'), ((3268, 3364), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'help_text': '"""The date and time this receipt was created in Zettles end"""'}), "(help_text=\n 'The date and time this receipt was created in Zettles end')\n", (3288, 3364), False, 'from django.db import migrations, models\n'), ((3502, 3561), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'help_text': '"""The Zettle receipt number."""'}), "(help_text='The Zettle receipt number.')\n", (3521, 3561), False, 'from django.db import migrations, models\n'), ((3647, 3759), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'help_text': '"""The part of the total amount which is VAT"""', 'max_digits': '(12)'}), "(decimal_places=2, help_text=\n 'The part of the total amount which is VAT', max_digits=12)\n", (3666, 3759), False, 'from django.db import migrations, models\n'), ((3937, 4042), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'help_text': '"""The total amount the customer paid"""', 'max_digits': '(12)'}), "(decimal_places=2, help_text=\n 'The total amount the customer paid', max_digits=12)\n", (3956, 4042), False, 'from django.db import migrations, models\n'), ((4218, 4352), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'help_text': '"""The payment fee BornHack has to pay to receive this payment"""', 'max_digits': '(12)'}), "(decimal_places=2, help_text=\n 'The payment fee BornHack has to pay to receive this payment',\n max_digits=12)\n", (4237, 4352), False, 'from django.db import migrations, models\n'), ((4524, 4680), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'help_text': '"""The part of the payment which goes to BornHack after fees have been substracted."""', 'max_digits': '(12)'}), "(decimal_places=2, help_text=\n 'The part of the payment which goes to BornHack after fees have been substracted.'\n , max_digits=12)\n", (4543, 4680), False, 'from django.db import migrations, models\n'), ((4862, 4926), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""The payment method"""', 'max_length': '(100)'}), "(help_text='The payment method', max_length=100)\n", (4878, 4926), False, 'from django.db import migrations, models\n'), ((5020, 5159), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""The card issuer. Can be empty if this was not a card payment."""', 'max_length': '(100)', 'null': '(True)'}), "(blank=True, help_text=\n 'The card issuer. Can be empty if this was not a card payment.',\n max_length=100, null=True)\n", (5036, 5159), False, 'from django.db import migrations, models\n'), ((5357, 5460), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""The Zettle account which was used to make this sale."""', 'max_length': '(100)'}), "(help_text=\n 'The Zettle account which was used to make this sale.', max_length=100)\n", (5373, 5460), False, 'from django.db import migrations, models\n'), ((5620, 5706), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""The description of this transaction."""', 'max_length': '(255)'}), "(help_text='The description of this transaction.',\n max_length=255)\n", (5636, 5706), False, 'from django.db import migrations, models\n'), ((5798, 5855), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Always POS?"""', 'max_length': '(100)'}), "(help_text='Always POS?', max_length=100)\n", (5814, 5855), False, 'from django.db import migrations, models\n')] |
vkumarma/Complete-Interpreter | main.py | 5ec15ea84b0e7e735328511cc504efa43638f720 | import re
import sys
class Lexer:
def __init__(self, inp_str):
self.index = 0
self.s = inp_str
def get_char(self):
if self.index < len(self.s):
var = self.s[self.index]
self.index += 1
return var
input_file = open(str(sys.argv[1]), 'r') # Open file for reading
line = input_file.read()
# "if z then while x * 4 - 2 do skip endwhile else x := 7 endif; y := 1"
input_string = line.strip("\n")
lexer = Lexer(input_string)
hashtable = {}
tokens_list = []
def token_check(input):
if re.fullmatch("if|then|else|endif|while|do|endwhile|skip", input):
hashtable[input] = "KEYWORD"
tokens_list.append(input)
elif re.search("([a-z]|[A-Z])([a-z]|[A-Z]|[0-9])*", input):
hashtable[input] = "IDENTIFIER"
tokens_list.append(input)
elif re.search("[0-9]+", input):
hashtable[input] = "NUMBER"
tokens_list.append(input)
elif re.fullmatch("\+|\-|\*|/|\(|\)|:=|;", input):
hashtable[input] = "SYMBOL"
tokens_list.append(input)
else:
hashtable[input] = "ERROR READING"
def digit(curr_char, lexer):
sub = ""
while (curr_char.isdigit()):
sub += curr_char
curr_char = lexer.get_char()
if curr_char == None:
break
new.append(curr_char)
return sub
def longest_sub_string(curr_char, lexer):
sub = ""
while (curr_char.isalpha() or curr_char.isdigit()):
sub += curr_char
curr_char = lexer.get_char()
if curr_char == None:
break
new.append(curr_char)
return sub
def symbol(curr_char, lexer):
# print(curr_char)
sym = curr_char
curr_char = lexer.get_char()
new.append(curr_char)
return sym
def assignment(curr_char, lexer):
sub = curr_char
next_char = lexer.get_char()
if next_char == "=":
sub += next_char
new.append(next_char)
return sub
new.append(lexer.get_char())
return sub
new = [] # keeping track of current char.
curr_char = lexer.get_char()
while (curr_char != None):
while (curr_char == ' ' or curr_char == ''):
curr_char = lexer.get_char()
if (curr_char.isdigit()):
token_check(digit(curr_char, lexer))
curr_char = new.pop()
elif (curr_char.isalpha()):
token_check(longest_sub_string(curr_char, lexer))
curr_char = new.pop()
elif curr_char in "+-/*();":
token_check(symbol(curr_char, lexer))
curr_char = new.pop()
elif curr_char == ":":
token_check(assignment(curr_char, lexer))
curr_char = new.pop()
if curr_char == "=":
curr_char = lexer.get_char()
else:
token_check(curr_char)
curr_char = lexer.get_char()
def tokens():
return hashtable
# print(tokens_list)
# print(tokens())
| [((557, 621), 're.fullmatch', 're.fullmatch', (['"""if|then|else|endif|while|do|endwhile|skip"""', 'input'], {}), "('if|then|else|endif|while|do|endwhile|skip', input)\n", (569, 621), False, 'import re\n'), ((703, 756), 're.search', 're.search', (['"""([a-z]|[A-Z])([a-z]|[A-Z]|[0-9])*"""', 'input'], {}), "('([a-z]|[A-Z])([a-z]|[A-Z]|[0-9])*', input)\n", (712, 756), False, 'import re\n'), ((841, 867), 're.search', 're.search', (['"""[0-9]+"""', 'input'], {}), "('[0-9]+', input)\n", (850, 867), False, 'import re\n'), ((948, 997), 're.fullmatch', 're.fullmatch', (['"""\\\\+|\\\\-|\\\\*|/|\\\\(|\\\\)|:=|;"""', 'input'], {}), "('\\\\+|\\\\-|\\\\*|/|\\\\(|\\\\)|:=|;', input)\n", (960, 997), False, 'import re\n')] |
Ghostofapacket/NewsGrabber-Deduplicate | deduplicate.py | 0b8152af2e1c6c87cf8540970f42084b96a99d9c | import sys
sys.path.append('/usr/local/lib/python3.4/site-packages/')
from warc_dedup import deduplicate
def main():
if len(sys.argv) == 1:
raise Exception('Please provide the WARC file as argument.')
deduplicate.Warc(*sys.argv[1:]).deduplicate()
if __name__ == '__main__':
main()
| [((11, 69), 'sys.path.append', 'sys.path.append', (['"""/usr/local/lib/python3.4/site-packages/"""'], {}), "('/usr/local/lib/python3.4/site-packages/')\n", (26, 69), False, 'import sys\n'), ((220, 251), 'warc_dedup.deduplicate.Warc', 'deduplicate.Warc', (['*sys.argv[1:]'], {}), '(*sys.argv[1:])\n', (236, 251), False, 'from warc_dedup import deduplicate\n')] |
johnjdailey/FinMesh | build/lib/FinMesh/usgov/__init__.py | 64048b02bfec1a24de840877b38e82f4fa813d22 | import os
import requests
import xmltodict
import csv
import json
# # # # # # # # # #
# FRED DATA BELOW #
# # # # # # # # # #
FRED_BASE_URL = 'https://api.stlouisfed.org/fred/'
GEOFRED_BASE_URL = 'https://api.stlouisfed.org/geofred/'
def append_fred_token(url):
token = os.getenv('FRED_TOKEN')
return f'{url}&api_key={token}'
FRED_SERIES_OBS_URL = FRED_BASE_URL + 'series/observations?'
def fred_series(series, file_type=None, realtime_start=None, realtime_end=None, limit=None, offset=None, sort_order=None, observation_start=None, observation_end=None, units=None, frequency=None, aggregation_method=None, output_type=None, vintage_dates=None):
## Returns time series historical data for the requested FRED data.
url = FRED_SERIES_OBS_URL + f'series_id={series}'
if file_type: url += f'&file_type={file_type}'
if realtime_start: url += f'&realtime_start={realtime_start}'
if realtime_end: url += f'&realtime_end={realtime_end}'
if limit: url += f'&limit={limit}'
if offset: url += f'&offset={offset}'
if sort_order: url += f'&sort_order={sort_order}'
if observation_start: url += f'&observation_start={observation_start}'
if observation_end: url += f'&observation_end={observation_end}'
if units: url += f'&units={units}'
if frequency: url += f'&frequency={frequency}'
if aggregation_method: url += f'&aggregation_method={aggregation_method}'
if output_type: url += f'&output_type={output_type}'
if vintage_dates: url += f'&vintage_dates={vintage_dates}'
url = append_fred_token(url)
result = requests.get(url)
return result.text
GEOFRED_SERIES_META_URL = GEOFRED_BASE_URL + 'series/group?'
def geofred_series_meta(series_id, file_type=None):
## Returns meta data for the requested FRED data.
url = GEOFRED_SERIES_META_URL + f'series_id={series_id}'
if file_type: url += f'&file_type={file_type}'
url = append_fred_token(url)
result = requests.get(url)
return result.text
GEOFRED_REGIONAL_SERIES_URL = GEOFRED_BASE_URL + 'series/data?'
def geofred_regional_series(series_id, file_type=None, date=None, start_date=None):
## Returns the historical, geographically organized time series data for the requested FRED data.
url = GEOFRED_REGIONAL_SERIES_URL + f'series_id={series_id}'
if file_type: url += f'&file_type={file_type}'
if date: url += f'&date={date}'
if start_date: url += f'&start_date={start_date}'
url = append_fred_token(url)
result = requests.get(url)
return result.text
# # # # # # # # # # # # # # # #
# GOVERNMENT YIELD CURVE DATA #
# # # # # # # # # # # # # # # #
GOV_YIELD_URL = 'https://data.treasury.gov/feed.svc/DailyTreasuryYieldCurveRateData?$filter=month(NEW_DATE)%20eq%204%20and%20year(NEW_DATE)%20eq%202019'
def get_yield():
## Returns government treasury bond yields. Organized in Python dictionary format by bond length.
# Formatting of XML to Python Dict
curve = requests.get(GOV_YIELD_URL)
parse_curve = xmltodict.parse(curve.content)
# This is based around retrieving the n last dates or average of n days.
feed = parse_curve['feed']
entry = feed['entry']
last_entry = len(entry)-1
content = entry[last_entry]['content']['m:properties']
# Dict that contains the whole yield curve so there is no need to bring in each rate.
yield_curve_values = {
'date' : entry[last_entry]['content']['m:properties']['d:NEW_DATE']['#text'],
'1month' : float(content['d:BC_1MONTH']['#text']),
'2month' : float(content['d:BC_2MONTH']['#text']),
'3month' : float(content['d:BC_3MONTH']['#text']),
'6month' : float(content['d:BC_6MONTH']['#text']),
'1year' : float(content['d:BC_1YEAR']['#text']),
'2year' : float(content['d:BC_2YEAR']['#text']),
'3year' : float(content['d:BC_3YEAR']['#text']),
'5year' : float(content['d:BC_5YEAR']['#text']),
'10year' : float(content['d:BC_10YEAR']['#text']),
'20year' : float(content['d:BC_20YEAR']['#text']),
'30year' : float(content['d:BC_30YEAR']['#text']),
}
return yield_curve_values
| [((277, 300), 'os.getenv', 'os.getenv', (['"""FRED_TOKEN"""'], {}), "('FRED_TOKEN')\n", (286, 300), False, 'import os\n'), ((1575, 1592), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1587, 1592), False, 'import requests\n'), ((1942, 1959), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1954, 1959), False, 'import requests\n'), ((2486, 2503), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (2498, 2503), False, 'import requests\n'), ((2950, 2977), 'requests.get', 'requests.get', (['GOV_YIELD_URL'], {}), '(GOV_YIELD_URL)\n', (2962, 2977), False, 'import requests\n'), ((2996, 3026), 'xmltodict.parse', 'xmltodict.parse', (['curve.content'], {}), '(curve.content)\n', (3011, 3026), False, 'import xmltodict\n')] |
Cradac/mattermost-octane-integration | settings.py | 6a3cb4d2e0854cbf190f66467b604e6e4344a907 | '''
This is the Settings File for the Mattermost-Octane Bridge.
You can change various variables here to customize and set up the client.
'''
'''----------------------Mattermost Webhook Configuration----------------------'''
#URL of the webhook from mattermost. To create one go to `Main Menu -> Integrations -> Incoming Webhooks` and press `Add Incoming Webhook`
mm_webhook_url = 'http://localhost:8065/hooks/yuro8xrfeffj787cj1bwc4ziue'
#Override the channel to send the notifications to, use the channel name as a String
mm_channel = None
#Set a custom Username to display in Mattermost
mm_username = 'Defect Notification'
#Set a custom Profile Image for the Client
mm_profileimage = 'https://i.imgur.com/7Wg3Tgs.png' #Telekom T Image
#The latter two need to be enabled in the settings.json of the Mattermost server
'''----------------------------Flask Configuration----------------------------'''
#set external IP for the Flask Server to create a Webhook for ALM Octane
#local: 127.0.0.1 / False
#default external: 0.0.0.0 (will default to only available external adress)
external_ip = False
#default: 5000
port = 5000
#external webhook verify token can be set here, if set as `None` it will be autogenerated & changed on each startup.
wh_token = None
| [] |
jnthn/intellij-community | python/testData/console/indent7.after.py | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | print(1) | [] |