text
stringlengths 4
1.02M
| meta
dict |
---|---|
from PyDAQmx import *
import numpy
def take_data_2chan(num_points, sampling_rate):
"""
Take `num_points` from the NIDAQ
"""
analog_input = Task()
read = int32()
data_size = 2*num_points
data = numpy.zeros((data_size), dtype=numpy.float64)
# DAQmx Configure Code
analog_input.CreateAIVoltageChan("Dev1/ai0:1", "", DAQmx_Val_RSE, -10.0, 10.0, DAQmx_Val_Volts, None)
analog_input.CfgSampClkTiming("", sampling_rate, DAQmx_Val_Rising, DAQmx_Val_FiniteSamps,num_points)
# DAQmx Start Code
analog_input.StartTask()
# DAQmx Read Code
analog_input.ReadAnalogF64(num_points,10.0, DAQmx_Val_GroupByChannel, data, data_size,byref(read),None)
return data | {
"content_hash": "0e78617e3d82ee163cc680bfb04017f9",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 104,
"avg_line_length": 26.56,
"alnum_prop": 0.7259036144578314,
"repo_name": "BBN-Q/Qlab",
"id": "ce57411c51123263ef0f55f256cc9096c6fff39f",
"size": "664",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "common/+deviceDrivers/@NIDAQ/take_data_2chan.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "25742"
},
{
"name": "C++",
"bytes": "15513"
},
{
"name": "M",
"bytes": "30640"
},
{
"name": "MATLAB",
"bytes": "1630623"
},
{
"name": "Objective-C",
"bytes": "397"
},
{
"name": "Processing",
"bytes": "49601"
},
{
"name": "Python",
"bytes": "63960"
}
],
"symlink_target": ""
} |
__test__ = {"doctest": """
>>> from testsuite.tsearch2_gis.models import Location
>>> from django.contrib.gis.geos import Point
>>> Location.objects.create(name=u"Mario's Pizza", latlon=Point(12.4604, 43.9420))
<Location: Mario's Pizza>
>>> Location.objects.update_index()
>>> Location.objects.search("mario")
[<Location: Mario's Pizza>]
>>> Location.objects.search("luigi")
[]
"""}
| {
"content_hash": "260d5a25f0e480df3df0eb5434e42576",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 82,
"avg_line_length": 24.25,
"alnum_prop": 0.6855670103092784,
"repo_name": "hcarvalhoalves/django-tsearch2",
"id": "eb612a09f41b12dfd3397161796c36087fc175ee",
"size": "405",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testsuite/tsearch2_gis/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "17640"
}
],
"symlink_target": ""
} |
import os
import sys
try:
import syslog
except ImportError:
syslog = None
import platform
import types
from .log import DefaultFilter
ENV_PREFIX = 'KAIRA_'
_address_dict = {
'Windows': ('localhost', 514),
'Darwin': '/var/run/syslog',
'Linux': '/dev/log',
'FreeBSD': '/dev/log'
}
LOGGING = {
'version': 1,
'filters': {
'accessFilter': {
'()': DefaultFilter,
'param': [0, 10, 20]
},
'errorFilter': {
'()': DefaultFilter,
'param': [30, 40, 50]
}
},
'formatters': {
'simple': {
'format': '%(asctime)s - (%(name)s)[%(levelname)s]: %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
'access': {
'format': '%(asctime)s - (%(name)s)[%(levelname)s][%(host)s]: ' +
'%(request)s %(message)s %(status)d %(byte)d',
'datefmt': '%Y-%m-%d %H:%M:%S'
}
},
'handlers': {
'internal': {
'class': 'logging.StreamHandler',
'filters': ['accessFilter'],
'formatter': 'simple',
'stream': sys.stderr
},
'accessStream': {
'class': 'logging.StreamHandler',
'filters': ['accessFilter'],
'formatter': 'access',
'stream': sys.stderr
},
'errorStream': {
'class': 'logging.StreamHandler',
'filters': ['errorFilter'],
'formatter': 'simple',
'stream': sys.stderr
},
},
'loggers': {
'kaira': {
'level': 'DEBUG',
'handlers': ['internal', 'errorStream']
},
'network': {
'level': 'DEBUG',
'handlers': ['accessStream', 'errorStream']
}
}
}
if syslog:
LOGGING['handlers']['accessSysLog'] = {
'class': 'logging.handlers.SysLogHandler',
'address': _address_dict.get(platform.system(),
('localhost', 514)),
'facility': syslog.LOG_DAEMON,
'filters': ['accessFilter'],
'formatter': 'access'
}
LOGGING['handlers']['errorSysLog'] = {
'class': 'logging.handlers.SysLogHandler',
'address': _address_dict.get(platform.system(),
('localhost', 514)),
'facility': syslog.LOG_DAEMON,
'filters': ['errorFilter'],
'formatter': 'simple'
}
# this happens when using container or systems without syslog
# keep things in config would cause file not exists error
_addr = LOGGING['handlers']['accessSysLog']['address']
if type(_addr) is str and not os.path.exists(_addr):
LOGGING['handlers'].pop('accessSysLog')
LOGGING['handlers'].pop('errorSysLog')
class Config(dict):
def __init__(self, defaults=None, load_env=True, keep_alive=True):
super().__init__(defaults or {})
self.REQUEST_MAX_SIZE = 100000000 # 100 megabytes
self.REQUEST_TIMEOUT = 60 # 60 seconds
self.KEEP_ALIVE = keep_alive
if load_env:
self.load_environment_vars()
def __getattr__(self, attr):
try:
return self[attr]
except KeyError as ke:
raise AttributeError("Config has no '{}'".format(ke.args[0]))
def __setattr__(self, attr, value):
self[attr] = value
def from_envvar(self, variable_name):
"""Load a configuration from an environment variable pointing to
a configuration file.
:param variable_name: name of the environment variable
:return: bool. ``True`` if able to load config, ``False`` otherwise.
"""
config_file = os.environ.get(variable_name)
if not config_file:
raise RuntimeError('The environment variable %r is not set and '
'thus configuration could not be loaded.' %
variable_name)
return self.from_pyfile(config_file)
def from_pyfile(self, filename):
"""Update the values in the config from a Python file.
Only the uppercase variables in that module are stored in the config.
:param filename: an absolute path to the config file
"""
module = types.ModuleType('config')
module.__file__ = filename
try:
with open(filename) as config_file:
exec(compile(config_file.read(), filename, 'exec'),
module.__dict__)
except IOError as e:
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
raise
self.from_object(module)
return True
def from_object(self, obj):
"""Update the values from the given object.
Objects are usually either modules or classes.
Just the uppercase variables in that object are stored in the config.
Example usage::
from yourapplication import default_config
app.config.from_object(default_config)
You should not use this function to load the actual configuration but
rather configuration defaults. The actual config should be loaded
with :meth:`from_pyfile` and ideally from a location not within the
package because the package might be installed system wide.
:param obj: an object holding the configuration
"""
for key in dir(obj):
if key.isupper():
self[key] = getattr(obj, key)
def load_environment_vars(self):
"""
Looks for any SANIC_ prefixed environment variables and applies
them to the configuration if present.
"""
for k, v in os.environ.items():
if k.startswith(ENV_PREFIX):
_, config_key = k.split(ENV_PREFIX, 1)
self[config_key] = v
| {
"content_hash": "8769d19e15fbd2fda2543e37d592f8f7",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 78,
"avg_line_length": 33.2752808988764,
"alnum_prop": 0.5353705892284315,
"repo_name": "mulonemartin/kaira",
"id": "88757fad31017f85299d481559b8b3365584a5e9",
"size": "5923",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kaira/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "118899"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import Link
class LinkAdmin(admin.ModelAdmin):
search_fields = ['title__icontains', 'description']
list_display = ['title', 'user', 'domain', 'active']
list_filter = ['user', 'active']
date_hierarchy = 'created_on'
admin.site.register(Link, LinkAdmin)
| {
"content_hash": "ba9f24a0442584af4fa70e6113c40e2b",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 56,
"avg_line_length": 24.53846153846154,
"alnum_prop": 0.6833855799373041,
"repo_name": "moshthepitt/product.co.ke",
"id": "094d7f606e62915e065e725b89fc3afc6e0caa57",
"size": "319",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "links/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "54411"
},
{
"name": "HTML",
"bytes": "39651"
},
{
"name": "JavaScript",
"bytes": "849"
},
{
"name": "Python",
"bytes": "26102"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.template.defaultfilters import slugify
class Publicacion(models.Model):
titulo = models.CharField(max_length=50)
contenido = models.TextField()
autor = models.ForeignKey(User, on_delete=models.CASCADE, editable=False)
publicado = models.DateField(auto_now_add=True)
slug = models.SlugField(editable=False)
def __str__(self):
return self.titulo.encode('utf-8', errors='strict')
class Meta:
ordering = ('titulo',)
verbose_name = "Publicacion"
verbose_name_plural = "Publicaciones"
def save(self, *args, **kwargs):
if not self.id:
self.slug = slugify(self.titulo)
super(Publicacion, self).save(*args, **kwargs)
| {
"content_hash": "e0fee448a556fd9f0879b1dd1d47f002",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 77,
"avg_line_length": 33.24,
"alnum_prop": 0.677496991576414,
"repo_name": "edwar/repositio.com",
"id": "bc81897d11ad5c1defae5f723a544ab0ed87a44b",
"size": "831",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/blog/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "723976"
},
{
"name": "HTML",
"bytes": "2199227"
},
{
"name": "JavaScript",
"bytes": "3359735"
},
{
"name": "PHP",
"bytes": "3916"
},
{
"name": "Python",
"bytes": "140232"
}
],
"symlink_target": ""
} |
"""Google Cloud Platform library - Cloud Storage Functionality."""
from __future__ import absolute_import
from ._bucket import Bucket, Buckets
from ._object import Object, Objects
__all__ = ['Bucket', 'Buckets', 'Object', 'Objects']
| {
"content_hash": "f1424519cda80e0c38cf8669467c1467",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 66,
"avg_line_length": 33.57142857142857,
"alnum_prop": 0.7148936170212766,
"repo_name": "supriyagarg/pydatalab",
"id": "f68f853b415b5f99fae1de77581b07a69cabc29d",
"size": "824",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "google/datalab/storage/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3798"
},
{
"name": "Python",
"bytes": "767068"
},
{
"name": "Shell",
"bytes": "2456"
},
{
"name": "TypeScript",
"bytes": "50852"
}
],
"symlink_target": ""
} |
import os
import django
# Calculated paths for django and the site
# Used as starting points for various other paths
# Thanks to Gareth Rushgrove:
# http://www.morethanseven.net/2009/02/11/django-settings-tip-setting-relative-paths/
DJANGO_ROOT = os.path.dirname(os.path.realpath(django.__file__))
SITE_ROOT = os.path.dirname(os.path.realpath(__file__))
# Django settings for omfraf project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'u@31@@7+*9xer#n3=3!@4bqct^j=2$t4jty)_@*@2*7zrsli_!'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'omfraf.middleware.logging_middleware.LoggingMiddleware',
'omfraf.middleware.django-crossdomainxhr-middleware.XsSharing',
)
ROOT_URLCONF = 'omfraf.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'omfraf.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'omfraf.main',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'simple': {
'format': '[%(asctime)s] %(message)s',
'datefmt' : "%d/%b/%Y %H:%M:%S"
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'logging.NullHandler',
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'logfile': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': SITE_ROOT + "/log/debug.log",
'maxBytes': 1000000,
'backupCount': 2,
'formatter': 'simple',
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'': {
'handlers': ['null', 'console', 'logfile'],
'propagate': True,
'level': 'DEBUG',
},
'django': {
'handlers': ['null', 'console'],
'propagate': True,
'level': 'INFO',
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| {
"content_hash": "fd450fd2b93547a9cf33711471aa8379",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 101,
"avg_line_length": 33.53658536585366,
"alnum_prop": 0.645090909090909,
"repo_name": "jimivdw/OMFraF",
"id": "90631633a66c286b0a9292f741f3c816bb7f1484",
"size": "6875",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/omfraf/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "29610"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class Registrado(models.Model):
nombre = models.CharField(max_length=120, blank=True, null=True)
email = models.EmailField()
codigo_postal = models.IntegerField(blank=True, null=True)
timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
actualizado = models.DateTimeField(auto_now_add=False, auto_now=True)
media = models.FileField(upload_to='myfolder/', blank=True, null=True) #barra despues NO antes
def __unicode__(self): #Python 3 __str__
return self.email | {
"content_hash": "6cd9560ba36ffee118779a0cb8d5af0e",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 95,
"avg_line_length": 34.411764705882355,
"alnum_prop": 0.7521367521367521,
"repo_name": "probardjango/Probar-Django-1.9",
"id": "adf9ea91782a8b2ecc810927eef58a04a149cd81",
"size": "585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/boletin/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "45809"
},
{
"name": "HTML",
"bytes": "917"
},
{
"name": "JavaScript",
"bytes": "88987"
},
{
"name": "Python",
"bytes": "12473"
}
],
"symlink_target": ""
} |
from flask_webtest import SessionScope
from pytest import fixture
from portal.database import db
from portal.models.questionnaire_bank import (
QuestionnaireBank,
QuestionnaireBankQuestionnaire
)
from portal.models.recur import Recur
@fixture
def initialized_with_ss_qb(
initialized_with_ss_protocol, initialized_with_ss_q):
rp_id = db.session.merge(initialized_with_ss_protocol).id
ss_qb = QuestionnaireBank(
name='substudy_qb_baseline',
start='{"days": 0}',
expired='{"months": 1}',
research_protocol_id=rp_id)
qbq = QuestionnaireBankQuestionnaire(
questionnaire=initialized_with_ss_q, rank=0)
ss_qb.questionnaires.append(qbq)
with SessionScope(db):
db.session.add(ss_qb)
db.session.commit()
return db.session.merge(ss_qb)
@fixture
def initialized_with_ss_recur_qb(
initialized_with_ss_protocol, initialized_with_ss_q):
rp_id = db.session.merge(initialized_with_ss_protocol).id
monthly_recur = Recur(
start='{"months": 1}', cycle_length='{"months": 1}',
termination='{"months": 11}')
ss_qb = QuestionnaireBank(
name='substudy_qb_monthly',
start='{"days": 0}',
expired='{"months": 1, "days": -1}',
recurs=[monthly_recur],
research_protocol_id=rp_id)
qbq = QuestionnaireBankQuestionnaire(
questionnaire=initialized_with_ss_q, rank=0)
ss_qb.questionnaires.append(qbq)
with SessionScope(db):
db.session.add(ss_qb)
db.session.commit()
return db.session.merge(ss_qb)
| {
"content_hash": "9975e6d5097e431fdf5bfc55b00328bd",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 61,
"avg_line_length": 31.019607843137255,
"alnum_prop": 0.6599241466498104,
"repo_name": "uwcirg/true_nth_usa_portal",
"id": "a54cff346a634f29eab4ba4fe8683c7f7fe77c5a",
"size": "1582",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/fixtures/quesionnaire_bank.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1733344"
},
{
"name": "Dockerfile",
"bytes": "947"
},
{
"name": "HTML",
"bytes": "435596"
},
{
"name": "JavaScript",
"bytes": "588006"
},
{
"name": "Mako",
"bytes": "414"
},
{
"name": "Python",
"bytes": "1837126"
},
{
"name": "Shell",
"bytes": "13976"
},
{
"name": "Vue",
"bytes": "62901"
}
],
"symlink_target": ""
} |
import ConfigParser
import os
import tarfile
import urllib2
# Default client libs
import glanceclient as glance_client
import keystoneclient.v2_0.client as keystone_client
# Import Openstack exceptions
import glanceclient.exc as glance_exception
import keystoneclient.exceptions as keystone_exception
TEMPEST_TEMP_DIR = os.getenv("TEMPEST_TEMP_DIR", "/tmp").rstrip('/')
TEMPEST_ROOT_DIR = os.getenv("TEMPEST_ROOT_DIR", os.getenv("HOME")).rstrip('/')
# Environment variables override defaults
TEMPEST_CONFIG_DIR = os.getenv("TEMPEST_CONFIG_DIR",
"%s%s" % (TEMPEST_ROOT_DIR, "/etc")).rstrip('/')
TEMPEST_CONFIG_FILE = os.getenv("TEMPEST_CONFIG_FILE",
"%s%s" % (TEMPEST_CONFIG_DIR, "/tempest.conf"))
TEMPEST_CONFIG_SAMPLE = os.getenv("TEMPEST_CONFIG_SAMPLE",
"%s%s" % (TEMPEST_CONFIG_DIR,
"/tempest.conf.sample"))
# Image references
IMAGE_DOWNLOAD_CHUNK_SIZE = 8 * 1024
IMAGE_UEC_SOURCE_URL = os.getenv("IMAGE_UEC_SOURCE_URL",
"http://download.cirros-cloud.net/0.3.1/"
"cirros-0.3.1-x86_64-uec.tar.gz")
TEMPEST_IMAGE_ID = os.getenv('IMAGE_ID')
TEMPEST_IMAGE_ID_ALT = os.getenv('IMAGE_ID_ALT')
IMAGE_STATUS_ACTIVE = 'active'
class ClientManager(object):
"""
Manager that provides access to the official python clients for
calling various OpenStack APIs.
"""
def __init__(self):
self.identity_client = None
self.image_client = None
self.network_client = None
self.compute_client = None
self.volume_client = None
def get_identity_client(self, **kwargs):
"""
Returns the openstack identity python client
:param username: a string representing the username
:param password: a string representing the user's password
:param tenant_name: a string representing the tenant name of the user
:param auth_url: a string representing the auth url of the identity
:param insecure: True if we wish to disable ssl certificate validation,
False otherwise
:returns an instance of openstack identity python client
"""
if not self.identity_client:
self.identity_client = keystone_client.Client(**kwargs)
return self.identity_client
def get_image_client(self, version="1", *args, **kwargs):
"""
This method returns Openstack glance python client
:param version: a string representing the version of the glance client
to use.
:param string endpoint: A user-supplied endpoint URL for the glance
service.
:param string token: Token for authentication.
:param integer timeout: Allows customization of the timeout for client
http requests. (optional)
:return: a Client object representing the glance client
"""
if not self.image_client:
self.image_client = glance_client.Client(version, *args, **kwargs)
return self.image_client
def get_tempest_config(path_to_config):
"""
Gets the tempest configuration file as a ConfigParser object
:param path_to_config: path to the config file
:return: a ConfigParser object representing the tempest configuration file
"""
# get the sample config file from the sample
config = ConfigParser.ConfigParser()
config.readfp(open(path_to_config))
return config
def update_config_admin_credentials(config, config_section):
"""
Updates the tempest config with the admin credentials
:param config: a ConfigParser object representing the tempest config file
:param config_section: the section name where the admin credentials are
"""
# Check if credentials are present, default uses the config credentials
OS_USERNAME = os.getenv('OS_USERNAME',
config.get(config_section, "admin_username"))
OS_PASSWORD = os.getenv('OS_PASSWORD',
config.get(config_section, "admin_password"))
OS_TENANT_NAME = os.getenv('OS_TENANT_NAME',
config.get(config_section, "admin_tenant_name"))
OS_AUTH_URL = os.getenv('OS_AUTH_URL', config.get(config_section, "uri"))
if not (OS_AUTH_URL and
OS_USERNAME and
OS_PASSWORD and
OS_TENANT_NAME):
raise Exception("Admin environment variables not found.")
# TODO(tkammer): Add support for uri_v3
config_identity_params = {'uri': OS_AUTH_URL,
'admin_username': OS_USERNAME,
'admin_password': OS_PASSWORD,
'admin_tenant_name': OS_TENANT_NAME}
update_config_section_with_params(config,
config_section,
config_identity_params)
def update_config_section_with_params(config, config_section, params):
"""
Updates a given config object with given params
:param config: a ConfigParser object representing the tempest config file
:param config_section: the section we would like to update
:param params: the parameters we wish to update for that section
"""
for option, value in params.items():
config.set(config_section, option, value)
def get_identity_client_kwargs(config, config_section):
"""
Get the required arguments for the identity python client
:param config: a ConfigParser object representing the tempest config file
:param config_section: the section name in the configuration where the
arguments can be found
:return: a dictionary representing the needed arguments for the identity
client
"""
username = config.get(config_section, 'admin_username')
password = config.get(config_section, 'admin_password')
tenant_name = config.get(config_section, 'admin_tenant_name')
auth_url = config.get(config_section, 'uri')
dscv = config.get(config_section, 'disable_ssl_certificate_validation')
kwargs = {'username': username,
'password': password,
'tenant_name': tenant_name,
'auth_url': auth_url,
'insecure': dscv}
return kwargs
def create_user_with_tenant(identity_client, username, password, tenant_name):
"""
Creates a user using a given identity client
:param identity_client: openstack identity python client
:param username: a string representing the username
:param password: a string representing the user's password
:param tenant_name: a string representing the tenant name of the user
"""
# Try to create the necessary tenant
tenant_id = None
try:
tenant_description = "Tenant for Tempest %s user" % username
tenant = identity_client.tenants.create(tenant_name,
tenant_description)
tenant_id = tenant.id
except keystone_exception.Conflict:
# if already exist, use existing tenant
tenant_list = identity_client.tenants.list()
for tenant in tenant_list:
if tenant.name == tenant_name:
tenant_id = tenant.id
# Try to create the user
try:
email = "%s@test.com" % username
identity_client.users.create(name=username,
password=password,
email=email,
tenant_id=tenant_id)
except keystone_exception.Conflict:
# if already exist, use existing user
pass
def create_users_and_tenants(identity_client,
config,
config_section):
"""
Creates the two non admin users and tenants for tempest
:param identity_client: openstack identity python client
:param config: a ConfigParser object representing the tempest config file
:param config_section: the section name of identity in the config
"""
# Get the necessary params from the config file
tenant_name = config.get(config_section, 'tenant_name')
username = config.get(config_section, 'username')
password = config.get(config_section, 'password')
alt_tenant_name = config.get(config_section, 'alt_tenant_name')
alt_username = config.get(config_section, 'alt_username')
alt_password = config.get(config_section, 'alt_password')
# Create the necessary users for the test runs
create_user_with_tenant(identity_client, username, password, tenant_name)
create_user_with_tenant(identity_client, alt_username, alt_password,
alt_tenant_name)
def get_image_client_kwargs(identity_client, config, config_section):
"""
Get the required arguments for the image python client
:param identity_client: openstack identity python client
:param config: a ConfigParser object representing the tempest config file
:param config_section: the section name of identity in the config
:return: a dictionary representing the needed arguments for the image
client
"""
token = identity_client.auth_token
endpoint = identity_client.\
service_catalog.url_for(service_type='image', endpoint_type='publicURL'
)
dscv = config.get(config_section, 'disable_ssl_certificate_validation')
kwargs = {'endpoint': endpoint,
'token': token,
'insecure': dscv}
return kwargs
def images_exist(image_client):
"""
Checks whether the images ID's located in the environment variable are
indeed registered
:param image_client: the openstack python client representing the image
client
"""
exist = True
if not TEMPEST_IMAGE_ID or not TEMPEST_IMAGE_ID_ALT:
exist = False
else:
try:
image_client.images.get(TEMPEST_IMAGE_ID)
image_client.images.get(TEMPEST_IMAGE_ID_ALT)
except glance_exception.HTTPNotFound:
exist = False
return exist
def download_and_register_uec_images(image_client, download_url,
download_folder):
"""
Downloads and registered the UEC AKI/AMI/ARI images
:param image_client:
:param download_url: the url of the uec tar file
:param download_folder: the destination folder we wish to save the file to
"""
basename = os.path.basename(download_url)
path = os.path.join(download_folder, basename)
request = urllib2.urlopen(download_url)
# First, download the file
with open(path, "wb") as fp:
while True:
chunk = request.read(IMAGE_DOWNLOAD_CHUNK_SIZE)
if not chunk:
break
fp.write(chunk)
# Then extract and register images
tar = tarfile.open(path, "r")
for name in tar.getnames():
file_obj = tar.extractfile(name)
format = "aki"
if file_obj.name.endswith(".img"):
format = "ami"
if file_obj.name.endswith("initrd"):
format = "ari"
# Register images in image client
image_client.images.create(name=file_obj.name, disk_format=format,
container_format=format, data=file_obj,
is_public="true")
tar.close()
def create_images(image_client, config, config_section,
download_url=IMAGE_UEC_SOURCE_URL,
download_folder=TEMPEST_TEMP_DIR):
"""
Creates images for tempest's use and registers the environment variables
IMAGE_ID and IMAGE_ID_ALT with registered images
:param image_client: Openstack python image client
:param config: a ConfigParser object representing the tempest config file
:param config_section: the section name where the IMAGE ids are set
:param download_url: the URL from which we should download the UEC tar
:param download_folder: the place where we want to save the download file
"""
if not images_exist(image_client):
# Falls down to the default uec images
download_and_register_uec_images(image_client, download_url,
download_folder)
image_ids = []
for image in image_client.images.list():
image_ids.append(image.id)
os.environ["IMAGE_ID"] = image_ids[0]
os.environ["IMAGE_ID_ALT"] = image_ids[1]
params = {'image_ref': os.getenv("IMAGE_ID"),
'image_ref_alt': os.getenv("IMAGE_ID_ALT")}
update_config_section_with_params(config, config_section, params)
def main():
"""
Main module to control the script
"""
# Check if config file exists or fall to the default sample otherwise
path_to_config = TEMPEST_CONFIG_SAMPLE
if os.path.isfile(TEMPEST_CONFIG_FILE):
path_to_config = TEMPEST_CONFIG_FILE
config = get_tempest_config(path_to_config)
update_config_admin_credentials(config, 'identity')
client_manager = ClientManager()
# Set the identity related info for tempest
identity_client_kwargs = get_identity_client_kwargs(config,
'identity')
identity_client = client_manager.get_identity_client(
**identity_client_kwargs)
# Create the necessary users and tenants for tempest run
create_users_and_tenants(identity_client, config, 'identity')
# Set the image related info for tempest
image_client_kwargs = get_image_client_kwargs(identity_client,
config,
'identity')
image_client = client_manager.get_image_client(**image_client_kwargs)
# Create the necessary users and tenants for tempest run
create_images(image_client, config, 'compute')
# TODO(tkammer): add network implementation
if __name__ == "__main__":
main()
| {
"content_hash": "0df2e3d2e381db3ea5b0a85942582e3d",
"timestamp": "",
"source": "github",
"line_count": 370,
"max_line_length": 79,
"avg_line_length": 37.95135135135135,
"alnum_prop": 0.6306793904002279,
"repo_name": "eltonkevani/tempest_el_env",
"id": "fe9f5afef19ae90f5d96369d452a27d49a1756b9",
"size": "15134",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tools/tempest_auto_config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1871339"
},
{
"name": "Shell",
"bytes": "5748"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import core
import numpy
import six.moves as six
from framework import Variable, default_main_program
__all__ = ['DataFeeder']
class DataToLoDTensorConverter(object):
def __init__(self, place, lod_level, shape, dtype):
self.place = place
self.lod_level = lod_level
self.shape = shape
if dtype == core.VarDesc.VarType.FP32:
self.dtype = 'float32'
elif dtype == core.VarDesc.VarType.INT64:
self.dtype = 'int64'
elif dtype == core.VarDesc.VarType.FP64:
self.dtype = 'float64'
elif dtype == core.VarDesc.VarType.INT32:
self.dtype = 'int32'
else:
raise ValueError("dtype must be any of [int32, float32, int64, "
"float64]")
self.data = []
self.lod = []
for i in six.range(lod_level):
self.lod.append([0])
def feed(self, data):
self._feed_impl_(data, self.lod, self.lod_level)
def _feed_impl_(self, data, lod, lod_level):
if lod_level == 0:
self.data.append(data)
else:
cur_lod_len = len(data)
lod[-1].append(lod[-1][-1] + cur_lod_len)
for each_data in data:
self._feed_impl_(each_data, lod[:-1], lod_level - 1)
def done(self):
arr = numpy.array(self.data, dtype=self.dtype).reshape(self.shape)
t = core.LoDTensor()
t.set(arr, self.place)
if self.lod_level > 0:
t.set_lod(self.lod)
return t
class DataFeeder(object):
def __init__(self, feed_list, place, program=None):
self.feed_dtypes = []
self.feed_names = []
self.feed_shapes = []
self.feed_lod_level = []
if program is None:
program = default_main_program()
for each_var in feed_list:
if isinstance(each_var, basestring):
each_var = program.block(0).var(each_var)
if not isinstance(each_var, Variable):
raise TypeError("Feed list should contain a list of variable")
self.feed_dtypes.append(each_var.dtype)
self.feed_names.append(each_var.name)
shape = each_var.shape
batch_size_dim = -1
for i, s in enumerate(shape):
if s < 0:
batch_size_dim = i
break
if batch_size_dim == -1:
raise ValueError("Variable {0} must has a batch size dimension",
each_var.name)
self.feed_lod_level.append(each_var.lod_level)
self.feed_shapes.append(shape)
self.place = place
def feed(self, iterable):
converter = []
for lod_level, shape, dtype in six.zip(
self.feed_lod_level, self.feed_shapes, self.feed_dtypes):
converter.append(
DataToLoDTensorConverter(
place=self.place,
lod_level=lod_level,
shape=shape,
dtype=dtype))
for each_sample in iterable:
assert len(each_sample) == len(converter), (
"The number of fields in data (%s) does not match " +
"len(feed_list) (%s)") % (len(each_sample), len(converter))
for each_converter, each_slot in six.zip(converter, each_sample):
each_converter.feed(each_slot)
ret_dict = {}
for each_name, each_converter in six.zip(self.feed_names, converter):
ret_dict[each_name] = each_converter.done()
return ret_dict
| {
"content_hash": "2871619d46da6f1e9f9d35e90146574d",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 80,
"avg_line_length": 35.25,
"alnum_prop": 0.5365521003818876,
"repo_name": "lcy-seso/Paddle",
"id": "ac02401c79b787716b2e5f43e0d1c5686cf2bd13",
"size": "4279",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/data_feeder.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "278852"
},
{
"name": "C++",
"bytes": "7213431"
},
{
"name": "CMake",
"bytes": "258158"
},
{
"name": "Cuda",
"bytes": "1077180"
},
{
"name": "Go",
"bytes": "109501"
},
{
"name": "Perl",
"bytes": "11456"
},
{
"name": "Python",
"bytes": "3337838"
},
{
"name": "Shell",
"bytes": "147571"
}
],
"symlink_target": ""
} |
"""
A simple gauge with a label that is suitable for use in places like a
status bar. It can be used in either an automatic indeterminant
(pulse) mode or in determinante mode where the programmer will need to
update the position of the progress bar. The indicator can be set to
hide itself when it is not active.
"""
import wx
#----------------------------------------------------------------------
# Supported Styles
PI_PULSEMODE = 1
PI_HIDEINACTIVE = 2
#----------------------------------------------------------------------
class ProgressIndicator(wx.Panel):
def __init__(self, *args, **kw):
wx.Panel.__init__(self, *args, **kw)
self.label = wx.StaticText(self)
self.gauge = wx.Gauge(self, range=100,
style=wx.GA_HORIZONTAL|wx.GA_SMOOTH)
self._startCount = 0
self.Sizer = wx.BoxSizer(wx.HORIZONTAL)
self.Sizer.Add(self.label, 0, wx.ALIGN_CENTER_VERTICAL)
self.Sizer.Add(self.gauge, 1, wx.EXPAND)
size = wx.DefaultSize
if kw.has_key('size'):
size = kw['size']
elif len(args) >= 4:
size=args[3] # parent, id, pos, size, style, name
self.SetInitialSize(size)
if self.HasFlag(PI_PULSEMODE):
self.timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.onTimer)
if self.HasFlag(PI_HIDEINACTIVE):
self.Hide()
def __del__(self):
if hasattr(self, 'timer'):
self.timer.Stop()
del self.timer
def Start(self, label=None):
"""
Show (if necessary) and begin displaying progress
"""
self._startCount += 1
if label is not None:
self.SetLabel(label)
if self.HasFlag(PI_HIDEINACTIVE):
self.Show()
self.Layout()
if self.HasFlag(PI_PULSEMODE):
self.gauge.Pulse()
self.timer.Start(250)
else:
self.gauge.SetValue(0)
def Stop(self, clearLabel=False):
"""
Stop showing progress
"""
# Make sure Stop is called as many times as Start was. Only really
# stop when the count reaches zero.
if self._startCount == 0:
return # should be already stopped...
self._startCount -= 1
if self._startCount:
return # there's still more starts than stops...
if self.HasFlag(PI_HIDEINACTIVE):
self.Hide()
if self.HasFlag(PI_PULSEMODE):
self.timer.Stop()
if clearLabel:
self.label.SetLabel("")
def SetLabel(self, text):
"""
Set the text displayed in the label.
"""
self.label.SetLabel(text)
self.Layout()
def SetValue(self, value, label=None):
"""
For determinante mode (non-pulse) update the progress indicator to the
given value. For example, if the job is 45% done then pass 45 to this
method (as long as the range is still set to 100.)
"""
if label is not None:
self.SetLabel(label)
self.gauge.SetValue(value)
def SetRange(self, maxval):
"""
For determinante mode (non-pulse) set the max value that the gauge can
be set to. Defaults to 100.
"""
self.gauge.SetRange(maxval)
def onTimer(self, evt):
self.gauge.Pulse()
#----------------------------------------------------------------------
if __name__ == '__main__':
app = wx.App(redirect=False)
frm = wx.Frame(None, title="ProgressIndicator")
pnl = wx.Panel(frm)
pi1 = ProgressIndicator(pnl, pos=(20,20), size=(150,-1),
style=PI_HIDEINACTIVE|PI_PULSEMODE)
pi2 = ProgressIndicator(pnl, pos=(20,60), size=(150,-1),
style=PI_HIDEINACTIVE)
import wx.lib.inspection
wx.lib.inspection.InspectionTool().Show()
frm.Show()
app.MainLoop()
| {
"content_hash": "fc97d47278b3e12da3b310c57aba158c",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 78,
"avg_line_length": 30.382978723404257,
"alnum_prop": 0.4992997198879552,
"repo_name": "ktan2020/legacy-automation",
"id": "30ab13edf8c885d1e41419bc99095d636bf9aa92",
"size": "4657",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "win/Lib/site-packages/wx-3.0-msw/wx/lib/progressindicator.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "913"
},
{
"name": "Ada",
"bytes": "289"
},
{
"name": "Assembly",
"bytes": "687"
},
{
"name": "Boo",
"bytes": "540"
},
{
"name": "C",
"bytes": "40116"
},
{
"name": "C#",
"bytes": "474"
},
{
"name": "C++",
"bytes": "393"
},
{
"name": "CSS",
"bytes": "70883"
},
{
"name": "ColdFusion",
"bytes": "1012"
},
{
"name": "Common Lisp",
"bytes": "1034"
},
{
"name": "D",
"bytes": "1858"
},
{
"name": "Eiffel",
"bytes": "426"
},
{
"name": "Erlang",
"bytes": "9243"
},
{
"name": "FORTRAN",
"bytes": "1810"
},
{
"name": "Forth",
"bytes": "182"
},
{
"name": "Groovy",
"bytes": "2366"
},
{
"name": "Haskell",
"bytes": "816"
},
{
"name": "Haxe",
"bytes": "455"
},
{
"name": "Java",
"bytes": "1155"
},
{
"name": "JavaScript",
"bytes": "69444"
},
{
"name": "Lua",
"bytes": "795"
},
{
"name": "Matlab",
"bytes": "1278"
},
{
"name": "OCaml",
"bytes": "350"
},
{
"name": "Objective-C++",
"bytes": "885"
},
{
"name": "PHP",
"bytes": "1411"
},
{
"name": "Pascal",
"bytes": "388"
},
{
"name": "Perl",
"bytes": "252651"
},
{
"name": "Pike",
"bytes": "589"
},
{
"name": "Python",
"bytes": "42085780"
},
{
"name": "R",
"bytes": "1156"
},
{
"name": "Ruby",
"bytes": "480"
},
{
"name": "Scheme",
"bytes": "282"
},
{
"name": "Shell",
"bytes": "30518"
},
{
"name": "Smalltalk",
"bytes": "926"
},
{
"name": "Squirrel",
"bytes": "697"
},
{
"name": "Stata",
"bytes": "302"
},
{
"name": "SystemVerilog",
"bytes": "3145"
},
{
"name": "Tcl",
"bytes": "1039"
},
{
"name": "TeX",
"bytes": "1746"
},
{
"name": "VHDL",
"bytes": "985"
},
{
"name": "Vala",
"bytes": "664"
},
{
"name": "Verilog",
"bytes": "439"
},
{
"name": "Visual Basic",
"bytes": "2142"
},
{
"name": "XSLT",
"bytes": "152770"
},
{
"name": "ooc",
"bytes": "890"
},
{
"name": "xBase",
"bytes": "769"
}
],
"symlink_target": ""
} |
from django.core.management.base import BaseCommand
from optparse import make_option
from sam.models import Quote
import os
class Command(BaseCommand):
help = 'This creates Quote objects from text file input or writes Quotes to text file'
option_list = BaseCommand.option_list + (
make_option('-r',
'--read',
dest='read',
default=False,
action='store_true',
help='This will create Quote objects from a text file.'),
make_option('-w',
'--write',
dest='write',
default=False,
action='store_true',
help='This will create a text file with Quote objects.'),
)
def handle(self, *args, **options):
write = options['write']
module_dir = os.path.dirname(__file__)
file_path = os.path.join(module_dir, 'quotes.txt')
if not write:
quote_file = open(file_path, 'r')
for line in quote_file:
if len(line.strip()) > 0:
parts = line.split("-=-")
quote = parts[0].strip()
author = parts[1].strip()
Quote(quote=quote, author=author).save()
else:
quote_file = open(file_path, 'w')
for quote in Quote.objects.all():
quote_file.write(quote.quote + " -=- " + quote.author + "\n")
| {
"content_hash": "a87bc1bbcb639c13273f6732655f4398",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 90,
"avg_line_length": 37.35,
"alnum_prop": 0.500669344042838,
"repo_name": "samolds/samster",
"id": "5e37ab7b25882c7bc3a265e6b0218dbb3b55644b",
"size": "1494",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sam/management/commands/quotes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9741"
},
{
"name": "HTML",
"bytes": "38115"
},
{
"name": "JavaScript",
"bytes": "12171"
},
{
"name": "Python",
"bytes": "75966"
}
],
"symlink_target": ""
} |
"""Grow.dev setup."""
import re
from setuptools import find_packages
from setuptools import setup
DEP_RE = re.compile(r'([\S]*)\s?=\s? [\"\']?([^\'\"]*)[\"\']?', re.IGNORECASE)
INSTALL_REQ = []
with open('Pipfile') as pipfile:
in_dep_section = False
for line in pipfile.readlines():
line = line.strip()
if not line or line.startswith('#'):
continue
if in_dep_section:
if line.startswith('['):
in_dep_section = False
continue
line_match = DEP_RE.match(line)
if line_match:
INSTALL_REQ.append(
'{}{}'.format(line_match.group(1).strip('"'), line_match.group(2)))
elif line == '[packages]':
in_dep_section = True
setup(
name='grow',
version='2.2.3',
description=(
'Develop everywhere and deploy anywhere: a declarative '
'site generator for rapid, high-quality web site production.'
),
long_description=open('description.txt').read().strip(),
url='https://grow.dev',
zip_safe=False,
license='MIT',
author='Grow.dev Authors',
author_email='code@grow.dev',
include_package_data=True,
packages=find_packages(exclude=[
'lib*',
'node_modules',
]),
install_requires=INSTALL_REQ,
python_requires='>=3.3',
entry_points="""
[console_scripts]
grow=grow.cli:main
""",
keywords=[
'grow',
'cms',
'static site generator',
'content management'
],
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
])
| {
"content_hash": "c32c63a0aef3df61c495c7e43b7de964",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 87,
"avg_line_length": 29.191176470588236,
"alnum_prop": 0.5546599496221662,
"repo_name": "grow/grow",
"id": "199e2ed5543d4da0c60d840c75a44290666b7aba",
"size": "1985",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "377"
},
{
"name": "Dockerfile",
"bytes": "2301"
},
{
"name": "HTML",
"bytes": "12074"
},
{
"name": "JavaScript",
"bytes": "5183"
},
{
"name": "Makefile",
"bytes": "2962"
},
{
"name": "Python",
"bytes": "1102607"
},
{
"name": "Sass",
"bytes": "7080"
},
{
"name": "Shell",
"bytes": "1835"
}
],
"symlink_target": ""
} |
import json
import os
import pstats
import shutil
import sys
import tempfile
import unittest
from io import StringIO
from subprocess import Popen, PIPE
from scrapy.utils.test import get_testenv
class CmdlineTest(unittest.TestCase):
def setUp(self):
self.env = get_testenv()
self.env['SCRAPY_SETTINGS_MODULE'] = 'tests.test_cmdline.settings'
def _execute(self, *new_args, **kwargs):
encoding = getattr(sys.stdout, 'encoding') or 'utf-8'
args = (sys.executable, '-m', 'scrapy.cmdline') + new_args
proc = Popen(args, stdout=PIPE, stderr=PIPE, env=self.env, **kwargs)
comm = proc.communicate()[0].strip()
return comm.decode(encoding)
def test_default_settings(self):
self.assertEqual(self._execute('settings', '--get', 'TEST1'), 'default')
def test_override_settings_using_set_arg(self):
self.assertEqual(self._execute('settings', '--get', 'TEST1', '-s',
'TEST1=override'), 'override')
def test_override_settings_using_envvar(self):
self.env['SCRAPY_TEST1'] = 'override'
self.assertEqual(self._execute('settings', '--get', 'TEST1'), 'override')
def test_profiling(self):
path = tempfile.mkdtemp()
filename = os.path.join(path, 'res.prof')
try:
self._execute('version', '--profile', filename)
self.assertTrue(os.path.exists(filename))
out = StringIO()
stats = pstats.Stats(filename, stream=out)
stats.print_stats()
out.seek(0)
stats = out.read()
self.assertIn(os.path.join('scrapy', 'commands', 'version.py'),
stats)
self.assertIn('tottime', stats)
finally:
shutil.rmtree(path)
def test_override_dict_settings(self):
EXT_PATH = "tests.test_cmdline.extensions.DummyExtension"
EXTENSIONS = {EXT_PATH: 200}
settingsstr = self._execute('settings', '--get', 'EXTENSIONS', '-s',
'EXTENSIONS=' + json.dumps(EXTENSIONS))
# XXX: There's gotta be a smarter way to do this...
self.assertNotIn("...", settingsstr)
for char in ("'", "<", ">", 'u"'):
settingsstr = settingsstr.replace(char, '"')
settingsdict = json.loads(settingsstr)
self.assertCountEqual(settingsdict.keys(), EXTENSIONS.keys())
self.assertEqual(200, settingsdict[EXT_PATH])
| {
"content_hash": "0ad22808ff5ce08a57ce405cb91362b4",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 81,
"avg_line_length": 37.68181818181818,
"alnum_prop": 0.5930840369923602,
"repo_name": "eLRuLL/scrapy",
"id": "da99a6be8289471b0ae8337d0795263ae633fa05",
"size": "2487",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_cmdline/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2800"
},
{
"name": "Python",
"bytes": "1518287"
},
{
"name": "Roff",
"bytes": "2010"
},
{
"name": "Shell",
"bytes": "259"
}
],
"symlink_target": ""
} |
from plantcv.plantcv._debug import _debug
from plantcv.plantcv import dilate
from plantcv.plantcv import params
from plantcv.plantcv import fatal_error
from skimage import feature
import numpy as np
import cv2
import os
def canny_edge_detect(img, mask=None, sigma=1.0, low_thresh=None, high_thresh=None, thickness=1,
mask_color=None, use_quantiles=False):
"""
Edge filter an image using the Canny algorithm.
Inputs:
img = RGB or grayscale image data
mask = Mask to limit the application of Canny to a certain area, takes a binary img. (OPTIONAL)
sigma = Standard deviation of the Gaussian filter
low_thresh = Lower bound for hysteresis thresholding (linking edges). If None (default) then low_thresh is set to
10% of the image's max (OPTIONAL)
high_thresh = Upper bound for hysteresis thresholding (linking edges). If None (default) then high_thresh is set
to 20% of the image's max (OPTIONAL)
thickness = How thick the edges should appear, default thickness=1 (OPTIONAL)
mask_color = Color of the mask provided; either None (default), 'white', or 'black'
use_quantiles = Default is False, if True then treat low_thresh and high_thresh as quantiles of the edge magnitude
image, rather than the absolute edge magnitude values. If True then thresholds range is [0,1].
(OPTIONAL)
Returns:
bin_img = Thresholded, binary image
:param img: numpy.ndarray
:param mask: numpy.ndarray
:param sigma = float
:param low_thresh: float
:param high_thresh: float
:param thickness: int
:param mask_color: str
:param use_quantiles: bool
:return bin_img: numpy.ndarray
Reference: Canny, J., A Computational Approach To Edge Detection, IEEE Trans.
Pattern Analysis and Machine Intelligence, 8:679-714, 1986
Originally part of CellProfiler, code licensed under both GPL and BSD licenses.
Website: http://www.cellprofiler.org
Copyright (c) 2003-2009 Massachusetts Institute of Technology
Copyright (c) 2009-2011 Broad Institute
All rights reserved.
Original author: Lee Kamentsky
"""
# Check if the image is grayscale; if color img then make it grayscale
dimensions = np.shape(img)
if len(dimensions) == 3:
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# skimage needs a bool mask
if mask is not None:
if mask_color.upper() == 'WHITE':
mask = np.array(mask, bool)
elif mask_color.upper() == 'BLACK':
mask = cv2.bitwise_not(mask)
mask = np.array(mask, bool)
else:
fatal_error('Mask was provided but mask_color ' + str(mask_color) + ' is not "white" or "black"!')
# Run Canny edge detection on the grayscale image
bool_img = feature.canny(img, sigma, low_thresh, high_thresh, mask, use_quantiles)
# Skimage returns a bool image so convert it
bin_img = np.copy(bool_img.astype(np.uint8) * 255)
# Adjust line thickness
if thickness != 1:
debug = params.debug
params.debug = None
bin_img = dilate(bin_img, thickness, 1)
params.debug = debug
# Print or plot the binary image
_debug(visual=bin_img,
filename=os.path.join(params.debug_outdir, (str(params.device) + '_canny_edge_detect.png')),
cmap='gray')
return bin_img
| {
"content_hash": "120b978c95e45612f5c2423de97473e3",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 120,
"avg_line_length": 40.71764705882353,
"alnum_prop": 0.6579023403640566,
"repo_name": "danforthcenter/plantcv",
"id": "753dab985d5ba58e2b82411c42ec95d611a6388e",
"size": "3485",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "plantcv/plantcv/canny_edge_detect.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1112"
},
{
"name": "Python",
"bytes": "898011"
},
{
"name": "R",
"bytes": "1327"
},
{
"name": "Shell",
"bytes": "3348"
}
],
"symlink_target": ""
} |
import sys
import os
import bpy
from bpy_extras.io_utils import ExportHelper
bl_info = {
"name": "treeface exporter",
"author": "Xi Yang",
"location": "File > Export",
"description": "export scene to treeface JSON format",
"category": "Import-Export",
}
def format_matrix(mat):
result_list = []
for i in [0, 1, 2, 3]:
for j in [0, 1, 2, 3]:
result_list.append(mat[j][i])
return result_list
def is_identity_matrix(mat):
return (mat[0][0] == 1 and mat[0][1] == 0 and mat[0][2] == 0 and mat[0][3] == 0 and
mat[1][0] == 0 and mat[1][1] == 1 and mat[1][2] == 0 and mat[1][3] == 0 and
mat[2][0] == 0 and mat[2][1] == 0 and mat[2][2] == 1 and mat[2][3] == 0 and
mat[3][0] == 0 and mat[3][1] == 0 and mat[3][2] == 0 and mat[3][3] == 1)
def get_scene_toplevel_objects(scene):
toplevels = []
for object in scene.objects:
if not object.parent:
toplevels.append(object)
return toplevels
def collect_object_by_type(objects):
nodes = []
visuals = []
for obj in objects:
if obj.type == 'EMPTY' and obj.is_treeface_scene_node:
nodes.append(obj)
elif obj.type == 'MESH' and obj.is_treeface_visual_object:
visuals.append(obj)
return nodes, visuals
def export_geometry(mesh, dir):
# determine whether we have some attributes
normal_is_split = False
has_uv = False
has_tangent = False
if mesh.use_auto_smooth:
normal_is_split = True
if len(mesh.uv_layers.items()) > 0:
has_uv = True
if normal_is_split and has_uv:
has_tangent = True
# build-in-calculate some attributes
mesh.calc_normals()
if normal_is_split:
mesh.calc_normals_split(mesh.auto_smooth_angle)
if has_tangent:
mesh.calc_tangents()
file_out = os.path.join(dir, "mesh_" + mesh.name + ".json")
fh = open(file_out, 'w')
fh.write("{\n")
# write vertex attributes
fh.write(" \"primitive\": \"triangle_fans\",\n")
fh.write(" \"attributes\":\n")
fh.write(" [\n")
fh.write(" {\n")
fh.write(" \"name\": \"position\",\n")
fh.write(" \"n_elem\": 3,\n")
fh.write(" \"type\": float,\n")
fh.write(" },\n")
fh.write(" {\n")
fh.write(" \"name\": \"normal\",\n")
fh.write(" \"n_elem\": 3,\n")
fh.write(" \"type\": float,\n")
fh.write(" },\n")
if has_tangent:
fh.write(" {\n")
fh.write(" \"name\": \"tangent\",\n")
fh.write(" \"n_elem\": 3,\n")
fh.write(" \"type\": float,\n")
fh.write(" },\n")
if has_uv:
fh.write(" {\n")
fh.write(" \"name\": \"tex_coord\",\n")
fh.write(" \"n_elem\": 2,\n")
fh.write(" \"type\": float,\n")
fh.write(" },\n")
fh.write(" ],\n")
# gather vertices
vertices = []
indices = []
index_seed = 0
for poly in mesh.polygons:
for i_loop in range(poly.loop_start, poly.loop_start+poly.loop_total):
data = []
loop = mesh.loops[i_loop]
vtx = mesh.vertices[loop.vertex_index]
# position
data.extend(vtx.co)
# normal
if normal_is_split:
data.extend(loop.normal)
else:
data.extend(vtx.normal)
# tangent
if has_tangent:
data.extend(loop.tangent)
# uv
if has_uv:
data.extend(mesh.uv_layers.active.data[i_loop].uv)
indices.append(index_seed)
vertices.append(data)
index_seed += 1
indices.append(65535)
# write vertices
fh.write(" \"vertices\":\n");
fh.write(" [\n");
for vtx_data in vertices:
fh.write(" [ " + ', '.join(map(str,vtx_data)) + " ],\n");
fh.write(" ],\n");
# write indices
fh.write(" \"indices\":\n");
fh.write(" [\n");
for idx in indices:
fh.write(" " + str(idx) + ",\n");
fh.write(" ],\n");
# finalize
fh.write("}\n")
fh.close();
return True;
def write_visual_object(object, fh, indent):
fh.write(' ' * (indent) + "{\n")
fh.write(' ' * (indent + 4) + "\"geometry\": \"mesh_" + object.data.name + ".json\",\n")
if object.active_material > 0:
fh.write(' ' * (indent + 4) + "\"material\": \"material_" + object.active_material.name + ".json\",\n")
else:
fh.write(' ' * (indent + 4) + "\"material\": \"material_DEFAULT.json\",\n")
fh.write(' ' * (indent) + "},\n")
def write_node_recur(object, fh, indent):
# collect visual objects and child nodes
child_nodes, visuals = collect_object_by_type(object.children)
# write head
fh.write(' ' * indent + "{\n")
fh.write(' ' * (indent + 4) + "\"id\": \"" + object.name + "\",\n")
fh.write(' ' * (indent + 4) + "\"transform\": [ " + ', '.join(format_matrix(object.matrix_local)) + " ],\n")
# write visual items
fh.write(' ' * (indent + 4) + "\"visual\":\n")
fh.write(' ' * (indent + 4) + "[\n")
for visual_obj in visuals:
write_visual_object(visual_obj, fh, indent + 4)
fh.write(' ' * (indent + 4) + "],\n")
# write children
fh.write(' ' * (indent + 4) + "\"children\":\n")
fh.write(' ' * (indent + 4) + "[\n")
for child in child_nodes:
export_node_recur(child, fh, indent + 4)
fh.write(' ' * (indent + 4) + "],\n")
# finalize
fh.write(' ' * indent + "}\n")
def write_global_light(light, fh, indent):
fh.write(' ' * indent + "\"global_light_direction\": [" + + "],\n")
fh.write(' ' * indent + "\"global_light_position\": [" + + "],\n")
fh.write(' ' * indent + "\"global_light_intensity\": [" + + "],\n")
def write_widget_recur(object, fh, indent):
pos_x = object.matrix_local[0][3] - object.treeface_widget_width / 2
pos_y = object.matrix_local[1][3] - object.treeface_widget_height / 2
fh.write(' ' * indent + "{\n")
fh.write(' ' * (indent + 4) + "\"id\": \"" + object.name + "\",\n")
fh.write(' ' * (indent + 4) + "\"type\": \"" + object.treeface_widget_type + "\",\n")
fh.write(' ' * (indent + 4) + "\"position\": [ " + str(pos_x) + ', ' + str(pos_y) + " ],\n")
fh.write(' ' * (indent + 4) + "\"size\": [ " + str(object.treeface_widget_width) + ', ' + str(object.treeface_widget_height) + " ],\n")
fh.write(' ' * (indent + 4) + "\"node\": \"" + object.name + "\",\n")
# TODO object custom nodes
fh.write(' ' * indent + "},\n")
def export_scene(scene, dir):
scene_toplevel_objects = get_scene_toplevel_objects(scene)
nodes, visuals = collect_object_by_type(scene_toplevel_objects)
file_scene = os.path.join(dir, "scene_" + scene.name + ".json")
fh = open(file_scene, 'w')
fh.write("{\n")
write_global_light(bpy.data.lamps.get(scene.treeface_global_light), fh, 4)
fh.write(" \"node\":\n")
fh.write(" {\n")
fh.write(" \"transform\": [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ],\n")
# write visual objects at root
fh.write(" \"visual\":\n")
fh.write(" [\n")
for visual in visuals:
write_visual_object(visual, fh, 12)
fh.write(" ],\n")
# write nodes
fh.write(" \"children\":\n")
fh.write(" [\n")
for node in nodes:
write_node_recur(node, fh, 12)
fh.write(" ],\n")
fh.write(" },\n")
# finalize
fh.write("}\n")
fh.close()
def export_widget(scene, dir):
file_widget = os.path.join(dir, "scene_widget_" + scene.name + ".json")
fh = open(file_widget, 'w')
fh.write("{\n")
fh.write(" \"position\": [0, 0],")
fh.write("}\n")
fh.close()
class TreefaceExporter(bpy.types.Operator, ExportHelper):
"""Export the scene to treeface JSON file"""
bl_idname = "treeface.export"
bl_label = "Export as treeface to folder"
filename_ext = "."
use_filter_folder = True
def execute(self, context):
filepath = self.filepath
if not os.path.isdir(filepath):
self.report({'WARNING'}, "Path " + filepath + " is not a directory\n")
return {'CANCELLED'}
# validate scene
# get meshes and materials
project_ok, used_meshs, used_materials = validate_everything()
if not project_ok:
return {'CANCELLED'}
# do export
for mesh in used_meshes:
export_geometry(mesh, filepath)
for mat in used_materials:
export_material(mat, filepath)
export_scene(bpy.data.scenes[0], filepath)
export_widget(bpy.data.scenes[0], filepath)
def validate_everything(self):
all_valid = True
used_mesh_names = set()
used_material_names = set()
# we should have only one scene
if len(bpy.data.scenes) == 0:
self.report({'ERROR_INVALID_INPUT'}, "no scene in current project")
all_valid = False
if len(bpy.data.scenes) > 1:
self.report({'ERROR_INVALID_INPUT'}, "more than one scene in current project")
all_valid = False
# validate visual objects
# collect meshes
for object in bpy.data.scenes[0].objects:
if object.type == 'MESH' and object.is_treeface_visual_object:
if not is_identity_matrix(object.matrix_local):
self.report({'ERROR_INVALID_INPUT'}, "visual object " + object.name + " local matrix is not identity")
all_valid = False
if len(object.children) > 0:
self.report({'ERROR_INVALID_INPUT'}, "visual object " + object.name + " has child objects, which is not wanted")
all_valid = False
used_mesh_names.add(object.data.name)
if object.active_material:
used_material_names.add(object.active_material.name)
# visual object's parent must be a scene node, or has no parent
if object.parent:
if not object.parent.type == 'EMPTY' or not object.parent.is_treeface_scene_node:
self.report({'ERROR_INVALID_INPUT'}, "visual object " + object.name + "'s parent " + object.parent.name + " is not a scene node")
all_valid = False
elif object.type == 'EMPTY' and object.is_treeface_scene_node:
if object.parent:
if not object.parent.type == 'EMPTY' or not object.parent.is_treeface_scene_node:
self.report({'ERROR_INVALID_INPUT'}, "scene node object " + object.name + "'s parent " + object.parent.name + " is not a scene node")
all_valid = False
if object.is_treeface_widget:
if not object.parent.type == 'EMPTY' or not object.parent.is_treeface_scene_node or not object.parent.is_treeface_widget:
self.report({'ERROR_INVALID_INPUT'}, "widget object " + object.name + "'s parent " + object.parent.name + " is not a widget object")
all_valid = False
if object.is_treeface_widget:
if not object.is_treeface_scene_node:
self.report({'ERROR_INVALID_INPUT'}, "widget object " + object.name + " is not a scene node object")
all_valid = False
# validate meshes
used_meshes= []
for mesh in bpy.data.meshes:
if not mesh.name in used_mesh_names:
continue
if len(mesh.materials) > 1:
self.report({'ERROR_INVALID_INPUT'}, "mesh " + mesh.name + " has more than one materials")
all_valid = False
if len(mesh.uv_layers) > 1:
self.report({'ERROR_INVALID_INPUT'}, "mesh " + mesh.name + " has more than one UV layers")
all_valid = False
used_meshes.append(mesh)
# validate materials
used_materials = []
for mat in bpy.data.materials:
if not mat.name in used_material_names:
continue
used_materials.append(mat)
return all_valid, used_meshes, used_materials
def menu_func(self, context):
self.layout.operator(TreefaceExporter.bl_idname)
def register():
bpy.utils.register_class(TreefaceExporter)
bpy.types.INFO_MT_file_export.append(menu_func)
#bpy.types.VIEW3D_PT_tools_objectmode.prepend(menu_func)
def unregister():
bpy.utils.unregister_class(TreefaceExporter)
bpy.types.INFO_MT_file_export.remove(menu_func)
#bpy.types.VIEW3D_PT_tools_objectmode.remove(menu_func)
if __name__ == "__main__":
register()
| {
"content_hash": "0f4024c1dd29e7ba7dfec3a4eacd08e0",
"timestamp": "",
"source": "github",
"line_count": 383,
"max_line_length": 160,
"avg_line_length": 34.67624020887728,
"alnum_prop": 0.516301483322039,
"repo_name": "jiandingzhe/treeface",
"id": "5554b0f4f1e3a5312823aff9ca24aff30959ddd6",
"size": "13281",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blender_plugins/treeface_export.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "755470"
},
{
"name": "CMake",
"bytes": "26107"
},
{
"name": "Perl",
"bytes": "1932"
},
{
"name": "Python",
"bytes": "28620"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reddit', '0005_auto_20180726_2126'),
]
operations = [
migrations.AlterField(
model_name='post',
name='status',
field=models.CharField(choices=[('accepted', 'accepted'), ('pending', 'pending'), ('almost', 'almost'), ('rejected', 'rejected')], default='accepted', max_length=200),
),
]
| {
"content_hash": "003a439ca6428c2cbd4673405679442c",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 179,
"avg_line_length": 28.9375,
"alnum_prop": 0.5831533477321814,
"repo_name": "vaniakosmos/memes-reposter",
"id": "105a198e3e7922ca84631193c5207200ed840353",
"size": "512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/reddit/migrations/0006_auto_20180729_2102.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1067"
},
{
"name": "Dockerfile",
"bytes": "203"
},
{
"name": "HTML",
"bytes": "11813"
},
{
"name": "JavaScript",
"bytes": "6442"
},
{
"name": "Makefile",
"bytes": "247"
},
{
"name": "Python",
"bytes": "81075"
},
{
"name": "Shell",
"bytes": "614"
}
],
"symlink_target": ""
} |
"""
Gateway for Binance Crypto Exchange.
"""
import urllib
import hashlib
import hmac
import time
from copy import copy
from datetime import datetime
from enum import Enum
from threading import Lock
from vnpy.api.rest import RestClient, Request
from vnpy.api.websocket import WebsocketClient
from vnpy.trader.constant import (
Direction,
Exchange,
Product,
Status,
OrderType
)
from vnpy.trader.gateway import BaseGateway
from vnpy.trader.object import (
TickData,
OrderData,
TradeData,
AccountData,
ContractData,
OrderRequest,
CancelRequest,
SubscribeRequest
)
from vnpy.trader.event import EVENT_TIMER
from vnpy.event import Event
REST_HOST = "https://www.binance.com"
WEBSOCKET_TRADE_HOST = "wss://stream.binance.com:9443/ws/"
WEBSOCKET_DATA_HOST = "wss://stream.binance.com:9443/stream?streams="
STATUS_BINANCE2VT = {
"NEW": Status.NOTTRADED,
"PARTIALLY_FILLED": Status.PARTTRADED,
"FILLED": Status.ALLTRADED,
"CANCELED": Status.CANCELLED,
"REJECTED": Status.REJECTED
}
ORDERTYPE_VT2BINANCE = {
OrderType.LIMIT: "LIMIT",
OrderType.MARKET: "MARKET"
}
ORDERTYPE_BINANCE2VT = {v: k for k, v in ORDERTYPE_VT2BINANCE.items()}
DIRECTION_VT2BINANCE = {
Direction.LONG: "BUY",
Direction.SHORT: "SELL"
}
DIRECTION_BINANCE2VT = {v: k for k, v in DIRECTION_VT2BINANCE.items()}
class Security(Enum):
NONE = 0
SIGNED = 1
API_KEY = 2
symbol_name_map = {}
class BinanceGateway(BaseGateway):
"""
VN Trader Gateway for Binance connection.
"""
default_setting = {
"key": "",
"secret": "",
"session_number": 3,
"proxy_host": "",
"proxy_port": 0,
}
exchanges = [Exchange.BINANCE]
def __init__(self, event_engine):
"""Constructor"""
super().__init__(event_engine, "BINANCE")
self.trade_ws_api = BinanceTradeWebsocketApi(self)
self.market_ws_api = BinanceDataWebsocketApi(self)
self.rest_api = BinanceRestApi(self)
self.event_engine.register(EVENT_TIMER, self.process_timer_event)
def connect(self, setting: dict):
""""""
key = setting["key"]
secret = setting["secret"]
session_number = setting["session_number"]
proxy_host = setting["proxy_host"]
proxy_port = setting["proxy_port"]
self.rest_api.connect(key, secret, session_number,
proxy_host, proxy_port)
self.market_ws_api.connect(proxy_host, proxy_port)
def subscribe(self, req: SubscribeRequest):
""""""
self.market_ws_api.subscribe(req)
def send_order(self, req: OrderRequest):
""""""
return self.rest_api.send_order(req)
def cancel_order(self, req: CancelRequest):
""""""
self.rest_api.cancel_order(req)
def query_account(self):
""""""
pass
def query_position(self):
""""""
pass
def close(self):
""""""
self.rest_api.stop()
self.trade_ws_api.stop()
self.market_ws_api.stop()
def process_timer_event(self, event: Event):
""""""
self.rest_api.keep_user_stream()
class BinanceRestApi(RestClient):
"""
BINANCE REST API
"""
def __init__(self, gateway: BinanceGateway):
""""""
super().__init__()
self.gateway = gateway
self.gateway_name = gateway.gateway_name
self.trade_ws_api = self.gateway.trade_ws_api
self.key = ""
self.secret = ""
self.user_stream_key = ""
self.keep_alive_count = 0
self.recv_window = 5000
self.time_offset = 0
self.order_count = 1_000_000
self.order_count_lock = Lock()
self.connect_time = 0
def sign(self, request):
"""
Generate BINANCE signature.
"""
if request.params:
path = request.path + "?" + urllib.parse.urlencode(request.params)
else:
request.params = dict()
path = request.path
security = request.data["security"]
if security == Security.SIGNED:
timestamp = int(time.time() * 1000)
if self.time_offset > 0:
timestamp -= abs(self.time_offset)
elif self.time_offset < 0:
timestamp += abs(self.time_offset)
request.params["timestamp"] = timestamp
# request.params["recv_window"] = self.recv_window
query = urllib.parse.urlencode(sorted(request.params.items()))
signature = hmac.new(self.secret, query.encode(
"utf-8"), hashlib.sha256).hexdigest()
query += "&signature={}".format(signature)
path = request.path + "?" + query
request.path = path
request.params = {}
request.data = {}
# Add headers
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "application/json",
"X-MBX-APIKEY": self.key
}
if security == Security.SIGNED or security == Security.API_KEY:
request.headers = headers
return request
def connect(
self,
key: str,
secret: str,
session_number: int,
proxy_host: str,
proxy_port: int
):
"""
Initialize connection to REST server.
"""
self.key = key
self.secret = secret.encode()
self.proxy_port = proxy_port
self.proxy_host = proxy_host
self.connect_time = (
int(datetime.now().strftime("%y%m%d%H%M%S")) * self.order_count
)
self.init(REST_HOST, proxy_host, proxy_port)
self.start(session_number)
self.gateway.write_log("REST API启动成功")
self.query_time()
self.query_account()
self.query_order()
self.query_contract()
self.start_user_stream()
def query_time(self):
""""""
data = {
"security": Security.NONE
}
path = "/api/v1/time"
return self.add_request(
"GET",
path,
callback=self.on_query_time,
data=data
)
def query_account(self):
""""""
data = {"security": Security.SIGNED}
self.add_request(
method="GET",
path="/api/v3/account",
callback=self.on_query_account,
data=data
)
def query_order(self):
""""""
data = {"security": Security.SIGNED}
self.add_request(
method="GET",
path="/api/v3/openOrders",
callback=self.on_query_order,
data=data
)
def query_contract(self):
""""""
data = {
"security": Security.NONE
}
self.add_request(
method="GET",
path="/api/v1/exchangeInfo",
callback=self.on_query_contract,
data=data
)
def _new_order_id(self):
""""""
with self.order_count_lock:
self.order_count += 1
return self.order_count
def send_order(self, req: OrderRequest):
""""""
orderid = str(self.connect_time + self._new_order_id())
order = req.create_order_data(
orderid,
self.gateway_name
)
self.gateway.on_order(order)
data = {
"security": Security.SIGNED
}
params = {
"symbol": req.symbol,
"timeInForce": "GTC",
"side": DIRECTION_VT2BINANCE[req.direction],
"type": ORDERTYPE_VT2BINANCE[req.type],
"price": str(req.price),
"quantity": str(req.volume),
"newClientOrderId": orderid,
"newOrderRespType": "ACK"
}
self.add_request(
method="POST",
path="/api/v3/order",
callback=self.on_send_order,
data=data,
params=params,
extra=order,
on_error=self.on_send_order_error,
on_failed=self.on_send_order_failed
)
return order.vt_orderid
def cancel_order(self, req: CancelRequest):
""""""
data = {
"security": Security.SIGNED
}
params = {
"symbol": req.symbol,
"origClientOrderId": req.orderid
}
self.add_request(
method="DELETE",
path="/api/v3/order",
callback=self.on_cancel_order,
params=params,
data=data,
extra=req
)
def start_user_stream(self):
""""""
data = {
"security": Security.API_KEY
}
self.add_request(
method="POST",
path="/api/v1/userDataStream",
callback=self.on_start_user_stream,
data=data
)
def keep_user_stream(self):
""""""
self.keep_alive_count += 1
if self.keep_alive_count < 1800:
return
data = {
"security": Security.SIGNED
}
params = {
"listenKey": self.user_stream_key
}
self.add_request(
method="PUT",
path="/api/v1/userDataStream",
callback=self.on_keep_user_stream,
params=params,
data=data
)
def on_query_time(self, data, request):
""""""
local_time = int(time.time() * 1000)
server_time = int(data["serverTime"])
self.time_offset = local_time - server_time
def on_query_account(self, data, request):
""""""
for account_data in data["balances"]:
account = AccountData(
accountid=account_data["asset"],
balance=float(account_data["free"]) + float(account_data["locked"]),
frozen=float(account_data["locked"]),
gateway_name=self.gateway_name
)
if account.balance:
self.gateway.on_account(account)
self.gateway.write_log("账户资金查询成功")
def on_query_order(self, data, request):
""""""
for d in data:
dt = datetime.fromtimestamp(d["time"] / 1000)
time = dt.strftime("%Y-%m-%d %H:%M:%S")
order = OrderData(
orderid=d["clientOrderId"],
symbol=d["symbol"],
exchange=Exchange.BINANCE,
price=float(d["price"]),
volume=float(d["origQty"]),
type=ORDERTYPE_BINANCE2VT[d["type"]],
direction=DIRECTION_BINANCE2VT[d["side"]],
traded=float(d["executedQty"]),
status=STATUS_BINANCE2VT.get(d["status"], None),
time=time,
gateway_name=self.gateway_name,
)
self.gateway.on_order(order)
self.gateway.write_log("委托信息查询成功")
def on_query_contract(self, data, request):
""""""
for d in data["symbols"]:
base_currency = d["baseAsset"]
quote_currency = d["quoteAsset"]
name = f"{base_currency.upper()}/{quote_currency.upper()}"
pricetick = 0
min_volume = 0
for f in d["filters"]:
if f["filterType"] == "PRICE_FILTER":
pricetick = f["tickSize"]
elif f["filterType"] == "LOT_SIZE":
min_volume = f["stepSize"]
contract = ContractData(
symbol=d["symbol"],
exchange=Exchange.BINANCE,
name=name,
pricetick=pricetick,
size=1,
min_volume=min_volume,
product=Product.SPOT,
gateway_name=self.gateway_name,
)
self.gateway.on_contract(contract)
symbol_name_map[contract.symbol] = contract.name
self.gateway.write_log("合约信息查询成功")
def on_send_order(self, data, request):
""""""
pass
def on_send_order_failed(self, status_code: str, request: Request):
"""
Callback when sending order failed on server.
"""
order = request.extra
order.status = Status.REJECTED
self.gateway.on_order(order)
msg = f"委托失败,状态码:{status_code},信息:{request.response.text}"
self.gateway.write_log(msg)
def on_send_order_error(
self, exception_type: type, exception_value: Exception, tb, request: Request
):
"""
Callback when sending order caused exception.
"""
order = request.extra
order.status = Status.REJECTED
self.gateway.on_order(order)
# Record exception if not ConnectionError
if not issubclass(exception_type, ConnectionError):
self.on_error(exception_type, exception_value, tb, request)
def on_cancel_order(self, data, request):
""""""
pass
def on_start_user_stream(self, data, request):
""""""
self.user_stream_key = data["listenKey"]
self.keep_alive_count = 0
url = WEBSOCKET_TRADE_HOST + self.user_stream_key
self.trade_ws_api.connect(url, self.proxy_host, self.proxy_port)
def on_keep_user_stream(self, data, request):
""""""
pass
class BinanceTradeWebsocketApi(WebsocketClient):
""""""
def __init__(self, gateway):
""""""
super().__init__()
self.gateway = gateway
self.gateway_name = gateway.gateway_name
def connect(self, url, proxy_host, proxy_port):
""""""
self.init(url, proxy_host, proxy_port)
self.start()
def on_connected(self):
""""""
self.gateway.write_log("交易Websocket API连接成功")
def on_packet(self, packet: dict): # type: (dict)->None
""""""
if packet["e"] == "outboundAccountInfo":
self.on_account(packet)
else:
self.on_order(packet)
def on_account(self, packet):
""""""
for d in packet["B"]:
account = AccountData(
accountid=d["a"],
balance=float(d["f"]) + float(d["l"]),
frozen=float(d["l"]),
gateway_name=self.gateway_name
)
if account.balance:
self.gateway.on_account(account)
def on_order(self, packet: dict):
""""""
dt = datetime.fromtimestamp(packet["O"] / 1000)
time = dt.strftime("%Y-%m-%d %H:%M:%S")
if packet["C"] == "null":
orderid = packet["c"]
else:
orderid = packet["C"]
order = OrderData(
symbol=packet["s"],
exchange=Exchange.BINANCE,
orderid=orderid,
type=ORDERTYPE_BINANCE2VT[packet["o"]],
direction=DIRECTION_BINANCE2VT[packet["S"]],
price=float(packet["p"]),
volume=float(packet["q"]),
traded=float(packet["z"]),
status=STATUS_BINANCE2VT[packet["X"]],
time=time,
gateway_name=self.gateway_name
)
self.gateway.on_order(order)
# Push trade event
trade_volume = float(packet["l"])
if not trade_volume:
return
trade_dt = datetime.fromtimestamp(packet["T"] / 1000)
trade_time = trade_dt.strftime("%Y-%m-%d %H:%M:%S")
trade = TradeData(
symbol=order.symbol,
exchange=order.exchange,
orderid=order.orderid,
tradeid=packet["t"],
direction=order.direction,
price=float(packet["L"]),
volume=trade_volume,
time=trade_time,
gateway_name=self.gateway_name,
)
self.gateway.on_trade(trade)
class BinanceDataWebsocketApi(WebsocketClient):
""""""
def __init__(self, gateway):
""""""
super().__init__()
self.gateway = gateway
self.gateway_name = gateway.gateway_name
self.ticks = {}
def connect(self, proxy_host: str, proxy_port: int):
""""""
self.proxy_host = proxy_host
self.proxy_port = proxy_port
def on_connected(self):
""""""
self.gateway.write_log("行情Websocket API连接刷新")
def subscribe(self, req: SubscribeRequest):
""""""
if req.symbol not in symbol_name_map:
self.gateway.write_log(f"找不到该合约代码{req.symbol}")
return
# Create tick buf data
tick = TickData(
symbol=req.symbol,
name=symbol_name_map.get(req.symbol, ""),
exchange=Exchange.BINANCE,
datetime=datetime.now(),
gateway_name=self.gateway_name,
)
self.ticks[req.symbol.lower()] = tick
# Close previous connection
if self._active:
self.stop()
self.join()
# Create new connection
channels = []
for ws_symbol in self.ticks.keys():
channels.append(ws_symbol + "@ticker")
channels.append(ws_symbol + "@depth5")
url = WEBSOCKET_DATA_HOST + "/".join(channels)
self.init(url, self.proxy_host, self.proxy_port)
self.start()
def on_packet(self, packet):
""""""
stream = packet["stream"]
data = packet["data"]
symbol, channel = stream.split("@")
tick = self.ticks[symbol]
if channel == "ticker":
tick.volume = float(data['v'])
tick.open_price = float(data['o'])
tick.high_price = float(data['h'])
tick.low_price = float(data['l'])
tick.last_price = float(data['c'])
tick.datetime = datetime.fromtimestamp(float(data['E']) / 1000)
else:
bids = data["bids"]
for n in range(5):
price, volume = bids[n]
tick.__setattr__("bid_price_" + str(n + 1), float(price))
tick.__setattr__("bid_volume_" + str(n + 1), float(volume))
asks = data["asks"]
for n in range(5):
price, volume = asks[n]
tick.__setattr__("ask_price_" + str(n + 1), float(price))
tick.__setattr__("ask_volume_" + str(n + 1), float(volume))
if tick.last_price:
self.gateway.on_tick(copy(tick))
| {
"content_hash": "49a61d37fb5c47ffa15adf5c22686103",
"timestamp": "",
"source": "github",
"line_count": 679,
"max_line_length": 84,
"avg_line_length": 27.197349042709867,
"alnum_prop": 0.5262359885200628,
"repo_name": "andrewchenshx/vnpy",
"id": "b3454d711384882394cb2a7214c2804e588924e5",
"size": "18589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vnpy/gateway/binance/binance_gateway.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3611917"
},
{
"name": "C++",
"bytes": "9732272"
},
{
"name": "CMake",
"bytes": "44488"
},
{
"name": "Jupyter Notebook",
"bytes": "43109"
},
{
"name": "Makefile",
"bytes": "99693"
},
{
"name": "Objective-C",
"bytes": "143589"
},
{
"name": "Python",
"bytes": "6568979"
},
{
"name": "R",
"bytes": "1354"
},
{
"name": "Shell",
"bytes": "5999"
}
],
"symlink_target": ""
} |
import typing
from ... import exc
from ... import util
from ...sql import coercions
from ...sql import elements
from ...sql import operators
from ...sql import roles
from ...sql.base import _generative
from ...sql.base import Generative
Selfmatch = typing.TypeVar("Selfmatch", bound="match")
class match(Generative, elements.BinaryExpression):
"""Produce a ``MATCH (X, Y) AGAINST ('TEXT')`` clause.
E.g.::
from sqlalchemy import desc
from sqlalchemy.dialects.mysql import match
match_expr = match(
users_table.c.firstname,
users_table.c.lastname,
against="Firstname Lastname",
)
stmt = (
select(users_table)
.where(match_expr.in_boolean_mode())
.order_by(desc(match_expr))
)
Would produce SQL resembling::
SELECT id, firstname, lastname
FROM user
WHERE MATCH(firstname, lastname) AGAINST (:param_1 IN BOOLEAN MODE)
ORDER BY MATCH(firstname, lastname) AGAINST (:param_2) DESC
The :func:`_mysql.match` function is a standalone version of the
:meth:`_sql.ColumnElement.match` method available on all
SQL expressions, as when :meth:`_expression.ColumnElement.match` is
used, but allows to pass multiple columns
:param cols: column expressions to match against
:param against: expression to be compared towards
:param in_boolean_mode: boolean, set "boolean mode" to true
:param in_natural_language_mode: boolean , set "natural language" to true
:param with_query_expansion: boolean, set "query expansion" to true
.. versionadded:: 1.4.19
.. seealso::
:meth:`_expression.ColumnElement.match`
"""
__visit_name__ = "mysql_match"
inherit_cache = True
def __init__(self, *cols, **kw):
if not cols:
raise exc.ArgumentError("columns are required")
against = kw.pop("against", None)
if against is None:
raise exc.ArgumentError("against is required")
against = coercions.expect(
roles.ExpressionElementRole,
against,
)
left = elements.BooleanClauseList._construct_raw(
operators.comma_op,
clauses=cols,
)
left.group = False
flags = util.immutabledict(
{
"mysql_boolean_mode": kw.pop("in_boolean_mode", False),
"mysql_natural_language": kw.pop(
"in_natural_language_mode", False
),
"mysql_query_expansion": kw.pop("with_query_expansion", False),
}
)
if kw:
raise exc.ArgumentError("unknown arguments: %s" % (", ".join(kw)))
super().__init__(left, against, operators.match_op, modifiers=flags)
@_generative
def in_boolean_mode(self: Selfmatch) -> Selfmatch:
"""Apply the "IN BOOLEAN MODE" modifier to the MATCH expression.
:return: a new :class:`_mysql.match` instance with modifications
applied.
"""
self.modifiers = self.modifiers.union({"mysql_boolean_mode": True})
return self
@_generative
def in_natural_language_mode(self: Selfmatch) -> Selfmatch:
"""Apply the "IN NATURAL LANGUAGE MODE" modifier to the MATCH
expression.
:return: a new :class:`_mysql.match` instance with modifications
applied.
"""
self.modifiers = self.modifiers.union({"mysql_natural_language": True})
return self
@_generative
def with_query_expansion(self: Selfmatch) -> Selfmatch:
"""Apply the "WITH QUERY EXPANSION" modifier to the MATCH expression.
:return: a new :class:`_mysql.match` instance with modifications
applied.
"""
self.modifiers = self.modifiers.union({"mysql_query_expansion": True})
return self
| {
"content_hash": "454cdafb4f0031b90dcd00d02e8bd09b",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 79,
"avg_line_length": 28.764705882352942,
"alnum_prop": 0.6083844580777096,
"repo_name": "zzzeek/sqlalchemy",
"id": "561803a78d8bdf98ddd37a17e3f8b925287c5303",
"size": "4154",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "lib/sqlalchemy/dialects/mysql/expression.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cython",
"bytes": "21698"
},
{
"name": "Python",
"bytes": "16838583"
}
],
"symlink_target": ""
} |
import pickle
from datetime import timedelta
from redis import Redis
from werkzeug.datastructures import CallbackDict
from flask.sessions import SessionInterface, SessionMixin
from util import random_base64
"""
From http://flask.pocoo.org/snippets/75/
modified session ID generation to ensure better randomness
"""
class RedisSession(CallbackDict, SessionMixin):
def __init__(self, initial=None, sid=None, new=False):
def on_update(self_):
self_.modified = True
CallbackDict.__init__(self, initial, on_update)
self.sid = sid
self.new = new
self.modified = False
class RedisSessionInterface(SessionInterface):
serializer = pickle
session_class = RedisSession
def __init__(self, redis=None, prefix='session:'):
if redis is None:
redis = Redis()
self.redis = redis
self.prefix = prefix
@staticmethod
def generate_sid():
return random_base64(64)
@staticmethod
def get_redis_expiration_time(app, session):
if session.permanent:
return app.permanent_session_lifetime
return timedelta(days=1)
def open_session(self, app, request):
sid = request.cookies.get(app.session_cookie_name)
if not sid:
sid = self.generate_sid()
return self.session_class(sid=sid, new=True)
val = self.redis.get(self.prefix + sid)
if val is not None:
data = self.serializer.loads(val)
return self.session_class(data, sid=sid)
return self.session_class(sid=sid, new=True)
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
if not session:
self.redis.delete(self.prefix + session.sid)
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain)
return
redis_exp = self.get_redis_expiration_time(app, session)
cookie_exp = self.get_expiration_time(app, session)
val = self.serializer.dumps(dict(session))
self.redis.setex(self.prefix + session.sid, val,
int(redis_exp.total_seconds()))
response.set_cookie(app.session_cookie_name, session.sid,
expires=cookie_exp, httponly=True,
domain=domain)
| {
"content_hash": "2df9b9077c1b4a524b359ed51dadaa5f",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 65,
"avg_line_length": 33.901408450704224,
"alnum_prop": 0.6186123805567096,
"repo_name": "icedevml/webcrypto-rsa-login",
"id": "2081584ec05d6d808148ad0e5f5c52c2427e7876",
"size": "2407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "redis_session.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "137"
},
{
"name": "HTML",
"bytes": "3856"
},
{
"name": "JavaScript",
"bytes": "8616"
},
{
"name": "Python",
"bytes": "8448"
}
],
"symlink_target": ""
} |
"""
Takes care of reading from BAM files and scattering (parallelizing) pipelines
See docs/bamtoolz.md for algorithm details"""
from collections import OrderedDict
import time
from multiprocessing import Process, Queue
import queue
import pysam
import cytoolz.curried as cyt
import logging
logger = logging.getLogger(__name__)
def get_seq_dict(bam_fname):
"""Return us a dictionary converting reference_id to sequence name
:param bam_fname:
:return:
"""
fp = pysam.AlignmentFile(bam_fname)
contig_dict = {n: cn['SN'] for n, cn in enumerate(fp.header['SQ'])}
contig_dict[-1] = '*'
return contig_dict
def read_bam_st(bam_fname):
"""Vanilla single thread single read iterator
:param bam_fname:
:return: iterator over single read tuples (read,)
"""
for read in pysam.AlignmentFile(bam_fname).fetch(until_eof=True):
if read.flag & 0b100100000000: continue # Skip supplementary or secondary alignments
yield (read,)
def read_bam_paired_st(bam_fname):
"""Vanilla single thread paired read iterator
:param bam_fname:
:return: iterator over paired read tuples (read1, read2)
"""
singles = {}
for read in pysam.AlignmentFile(bam_fname).fetch(until_eof=True):
if read.flag & 0b100100000000: continue # Skip supplementary or secondary alignments
key = read.qname[:20] # Is this enough?
if key not in singles:
singles[key] = read
else: # You complete me
yield (read, singles[key]) if read.is_read1 else (singles[key], read)
del singles[key]
if len(singles):
logger.error('{} unpaired reads left over!'.format(len(singles)))
logger.error(singles.keys())
def read_iter(fp, contig_q):
"""Returns read objects from contigs until someone passes None as a contig
:param fp: BAM file pointer (pysam.AlignmentFile)
:param contig_q: a queue into which we put contig information
(contig, eof_true) - eof_true is set if this is
the last non-empty contig and we want to pull out
all the trailing unmapped reads right after the contig
:return: a generator
"""
for contig in iter(contig_q.get, None):
logger.debug(contig[0])
for read in fp.fetch(contig[0]):
if read.flag & 0b100100000000: continue # Skip supplementary or secondary alignments
yield read
if contig[1]: # Now want the trailing reads - fp is positioned just before them
for read in fp.fetch(until_eof=contig[1]):
if read.flag & 0b100100000000: continue # Skip supplementary or secondary alignments
yield read
def unpaired_read_iter(fp, contig_q):
"""Almost identical to read_iter, except it returns tuples (read,)
This enables us to write processing code that operates both on single
reads as well as pairs since they both come as tuples
:param fp: BAM file pointer (pysam.AlignmentFile)
:param contig_q: a queue into which we put contig information
(contig, eof_true) - eof_true is set if this is
the last non-empty contig and we want to pull out
all the trailing unmapped reads
:return: a generator that yields (read,) tuples
"""
for read in read_iter(fp, contig_q):
yield (read,)
def paired_read_iter(fp, contig_q, singles_q, max_singles=1000,
is_singles_mixer=False, single_src_cnt=None):
"""
:param fp: pysam.AlignmentFile()
:param contig_q: Messages are of the form (ref, eof)
ref is the name of the contig to fetch
eof is T/F and indicates whether we should fetch till eof
the sender should set this to T only if this is the last
non-empty contig and is followed by the unmapped reads
that sit at the end of the file.
:param singles_q: Messages are SAM strings of reads converted using tostring()
reads are recieved and mixed in with the from-disk stream
if this is the singles mixer, else, unpaired reads are
sent to this Q
:param max_singles: When we have these many singles, start passing then to the
singles mixer
:param is_singles_mixer: Set True if this is also the "singles mixer" that
receives unpaired reads from other workers
:param single_src_cnt: How many processes are out there sending singles?
Used if this is a singles mixer
:return: a generator that yields paired read tuples (read1, read2)
"""
ref_dict = dict([(r, n) for n, r in enumerate(fp.references)] + [('*', -1)])
# unmapped with no contig
ri = read_iter(fp, contig_q)
singles = OrderedDict()
while 1:
if is_singles_mixer:
try:
read_str = singles_q.get_nowait() # Any singles hanging about?
if read_str is None: # One process says they are done with singles
single_src_cnt -= 1
if single_src_cnt == 0: # Everyone says they've sent all their singles
read = None
else:
continue # At least one more source of singles about
else:
read = fromstring(read_str, ref_dict)
except queue.Empty:
read = next(ri, None)
if read is None:
time.sleep(0.01) # We are out of contigs and we should keep checking the singles Q
continue
else:
read = next(ri, None)
if read is None: # Out of reads from contigs and, if we are a mixer, out of reads from singles_q
break
key = read.qname[:20] # Is this enough?
if key not in singles:
singles[key] = read
else: # You complete me
yield (read, singles[key]) if read.is_read1 else (singles[key], read)
del singles[key]
if not is_singles_mixer:
if len(singles) > max_singles: # Flush earliest singles
singles_q.put(singles.popitem(last=False).tostring(fp))
# We need to send the remaining singles to the mixer
if not is_singles_mixer:
for read in singles.values():
singles_q.put(read.tostring(fp))
singles_q.put(None) # No more singles from us
def worker(pipeline, bam_fname, result_q, contig_q,
paired=False, singles_q=None, max_singles=1000,
is_singles_mixer=False, single_src_cnt=None):
"""Given a pipeline, run it with reads from the given bam taken from contigs supplied
over the contig_q.
This expects the pipeline to yield one final result which it can then return.
It expects the last element of pipeline to be a function that consumes a read iterator and returns a result.
This is more flexible than you think, since the result can be an iterator, so this can be
used to filter reads in parallel. See examples in the filter analysis tutorial
:param pipeline: A list of pipelines
:param bam_fname: Source BAM file
:param result_q: The result is put here.
:param contig_q: messages are of the form (ref, True/False)
ref is the name of the contig
True/False indicates if eof should be set T/F
This controls whether we read to end of file including all the
unmapped reads. The caller figures out if this is that last
contig that sits just before that tail of unmapped reads at the end
of the BAM file
:param paired: Do we pair the reads before passing them to the pipeline?
:param singles_q: messages are SAM strings of reads converted using tostring().
This is only used/relevant if paired=True because we use that to
collect the singles from all contigs and pair them up
Depending on whether this is the last
:param max_singles: When we have these many singles, start passing then to the
singles mixer
:param is_singles_mixer: Set True if this is also the "singles mixer" that
receives unpaired reads from other workers
:param single_src_cnt: How many sources of singles we have
This is
:return:
"""
if paired and singles_q is None:
raise RuntimeError('Need singles_q to be defined if using paired reads')
fp = pysam.AlignmentFile(bam_fname)
if paired:
t1 = paired_read_iter(fp, contig_q,
singles_q=singles_q, max_singles=max_singles,
is_singles_mixer=is_singles_mixer, single_src_cnt=single_src_cnt)
else:
t1 = unpaired_read_iter(fp, contig_q)
sink = pipeline[-1]
result_q.put(sink(cyt.pipe(t1, *pipeline[:-1])))
def scatter(pipeline, bam_fname, paired=False, ncpus=2, max_singles=1000):
"""Given a pipeline and a source bam file use multiprocessing to run the pipeline
via multiple workers splitting up the work by contig
python multiprocessing will be used for running the pipelines in parallel and care
must be taken to ensure the individual pipeline nodes are parallelizable
This expects the pipeline to yield one final result which it can then return.
It expects the last element of pipeline to be a function that consumes a read iterator and returns a result.
This is more flexible than you think, since the result can be an iterator, so this can be
used to filter reads in parallel. See examples in the filter analysis tutorial
:param bam_fname:
:param pipeline:
:param paired: When run in parallel, paired vs unpaired pipelines work differently
So we have to tell scatter if we want to source paired or unpaired reads
:param ncpus:
:param max_singles:
:return:
"""
assert ncpus > 1, "ncpus = 1 can't use scatter!"
result_q = Queue()
contig_q = Queue()
if paired:
singles_q = Queue()
is_mixer = [False] * (ncpus - 1) + [True]
else:
singles_q = False
is_mixer = [False] * ncpus
p_list = []
for i in range(ncpus):
p_list += [
Process(target=worker,
args=(pipeline, bam_fname, result_q, contig_q,
paired, singles_q, max_singles,
is_mixer[i], ncpus - 1))
]
for p in p_list:
p.start()
_contigs = find_non_empty_contigs(bam_fname)
contigs = [(c, False) for c in _contigs[:-1]] + [(_contigs[-1], True)]
# This ensures that we read till EOF for the last contig and thereby fetch all of the trailing unmapped reads
for contig in contigs:
contig_q.put(contig)
# Tell child processes to stop
for i in range(ncpus):
contig_q.put(None)
for i in range(ncpus):
yield result_q.get()
# Orderly exit
for p in p_list:
p.join()
def find_non_empty_contigs(bam_fname):
# Thanks to Güneş Bayir for suggesting a proper algorithm to pull the unmapped reads
contigs = []
fp = pysam.AlignmentFile(bam_fname)
for ref in fp.references:
for _ in fp.fetch(ref):
contigs += [ref]
break
return contigs
def fromstring(s, ref_dict):
"""Inverse of pysam.AlignedSegment.tostring(): given a string, create an aligned segment
:param s:
:param ref_dict: ref_dict = dict([(r, n) for n, r in enumerate(fp.references)] + [('*', -1)])
:return:
"""
def _split(_s):
qname, flag, rname, pos, \
mapping_quality, cigarstring, \
rnext, pnext, template_length, seq, qual, *_tg = _s.split('\t')
flag = int(flag)
rname = ref_dict[rname] # dict must have '*': -1 entry too
pos = int(pos)
mapping_quality = int(mapping_quality)
rnext = rname if rnext == '=' else ref_dict[rnext]
pnext = int(pnext)
template_length = int(template_length)
return qname, flag, rname, pos, \
mapping_quality, cigarstring, \
rnext, pnext, template_length, seq, qual, _tg
# So close, pysam.tostring, so close
def _tags(_t):
_tl = _t.split(':')
if _tl[1] == 'i':
_tl[2] = int(_tl[2])
elif _tl[1] == 'f':
_tl[2] = float(_tl[2])
return _tl[0], _tl[2], _tl[1]
r = pysam.AlignedSegment()
r.qname, r.flag, r.rname, r.pos, \
r.mapping_quality, r.cigarstring, \
r.rnext, r.pnext, r.template_length, r.seq, r.qual, tags = _split(s)
r.set_tags([_tags(t) for t in tags])
return r | {
"content_hash": "90498d9e94927427f1f945fe611b023a",
"timestamp": "",
"source": "github",
"line_count": 339,
"max_line_length": 111,
"avg_line_length": 36.36873156342183,
"alnum_prop": 0.6381701678968286,
"repo_name": "sbg/Mitty",
"id": "3e6139fe1a7c9a3042638b070028f7c3e52b865e",
"size": "12331",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mitty/analysis/bamtoolz.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "147223"
},
{
"name": "Python",
"bytes": "239645"
},
{
"name": "Shell",
"bytes": "834"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from moto.core.responses import BaseResponse
from moto.ec2.utils import sequence_from_querystring
class ElasticIPAddresses(BaseResponse):
def allocate_address(self):
if "Domain" in self.querystring:
domain = self.querystring.get('Domain')[0]
else:
domain = "standard"
if self.is_not_dryrun('AllocateAddress'):
address = self.ec2_backend.allocate_address(domain)
template = self.response_template(ALLOCATE_ADDRESS_RESPONSE)
return template.render(address=address)
def associate_address(self):
instance = eni = None
if "InstanceId" in self.querystring:
instance = self.ec2_backend.get_instance(
self.querystring['InstanceId'][0])
elif "NetworkInterfaceId" in self.querystring:
eni = self.ec2_backend.get_network_interface(
self.querystring['NetworkInterfaceId'][0])
else:
self.ec2_backend.raise_error(
"MissingParameter", "Invalid request, expect InstanceId/NetworkId parameter.")
reassociate = False
if "AllowReassociation" in self.querystring:
reassociate = self.querystring['AllowReassociation'][0] == "true"
if self.is_not_dryrun('AssociateAddress'):
if instance or eni:
if "PublicIp" in self.querystring:
eip = self.ec2_backend.associate_address(instance=instance, eni=eni, address=self.querystring[
'PublicIp'][0], reassociate=reassociate)
elif "AllocationId" in self.querystring:
eip = self.ec2_backend.associate_address(instance=instance, eni=eni, allocation_id=self.querystring[
'AllocationId'][0], reassociate=reassociate)
else:
self.ec2_backend.raise_error(
"MissingParameter", "Invalid request, expect PublicIp/AllocationId parameter.")
else:
self.ec2_backend.raise_error(
"MissingParameter", "Invalid request, expect either instance or ENI.")
template = self.response_template(ASSOCIATE_ADDRESS_RESPONSE)
return template.render(address=eip)
def describe_addresses(self):
template = self.response_template(DESCRIBE_ADDRESS_RESPONSE)
if "Filter.1.Name" in self.querystring:
filter_by = sequence_from_querystring(
"Filter.1.Name", self.querystring)[0]
filter_value = sequence_from_querystring(
"Filter.1.Value", self.querystring)
if filter_by == 'instance-id':
addresses = filter(lambda x: x.instance.id == filter_value[
0], self.ec2_backend.describe_addresses())
else:
raise NotImplementedError(
"Filtering not supported in describe_address.")
elif "PublicIp.1" in self.querystring:
public_ips = sequence_from_querystring(
"PublicIp", self.querystring)
addresses = self.ec2_backend.address_by_ip(public_ips)
elif "AllocationId.1" in self.querystring:
allocation_ids = sequence_from_querystring(
"AllocationId", self.querystring)
addresses = self.ec2_backend.address_by_allocation(allocation_ids)
else:
addresses = self.ec2_backend.describe_addresses()
return template.render(addresses=addresses)
def disassociate_address(self):
if self.is_not_dryrun('DisAssociateAddress'):
if "PublicIp" in self.querystring:
self.ec2_backend.disassociate_address(
address=self.querystring['PublicIp'][0])
elif "AssociationId" in self.querystring:
self.ec2_backend.disassociate_address(
association_id=self.querystring['AssociationId'][0])
else:
self.ec2_backend.raise_error(
"MissingParameter", "Invalid request, expect PublicIp/AssociationId parameter.")
return self.response_template(DISASSOCIATE_ADDRESS_RESPONSE).render()
def release_address(self):
if self.is_not_dryrun('ReleaseAddress'):
if "PublicIp" in self.querystring:
self.ec2_backend.release_address(
address=self.querystring['PublicIp'][0])
elif "AllocationId" in self.querystring:
self.ec2_backend.release_address(
allocation_id=self.querystring['AllocationId'][0])
else:
self.ec2_backend.raise_error(
"MissingParameter", "Invalid request, expect PublicIp/AllocationId parameter.")
return self.response_template(RELEASE_ADDRESS_RESPONSE).render()
ALLOCATE_ADDRESS_RESPONSE = """<AllocateAddressResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<publicIp>{{ address.public_ip }}</publicIp>
<domain>{{ address.domain }}</domain>
{% if address.allocation_id %}
<allocationId>{{ address.allocation_id }}</allocationId>
{% endif %}
</AllocateAddressResponse>"""
ASSOCIATE_ADDRESS_RESPONSE = """<AssociateAddressResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
{% if address.association_id %}
<associationId>{{ address.association_id }}</associationId>
{% endif %}
</AssociateAddressResponse>"""
DESCRIBE_ADDRESS_RESPONSE = """<DescribeAddressesResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<addressesSet>
{% for address in addresses %}
<item>
<publicIp>{{ address.public_ip }}</publicIp>
<domain>{{ address.domain }}</domain>
{% if address.instance %}
<instanceId>{{ address.instance.id }}</instanceId>
{% else %}
<instanceId/>
{% endif %}
{% if address.eni %}
<networkInterfaceId>{{ address.eni.id }}</networkInterfaceId>
{% else %}
<networkInterfaceId/>
{% endif %}
{% if address.allocation_id %}
<allocationId>{{ address.allocation_id }}</allocationId>
{% endif %}
{% if address.association_id %}
<associationId>{{ address.association_id }}</associationId>
{% endif %}
</item>
{% endfor %}
</addressesSet>
</DescribeAddressesResponse>"""
DISASSOCIATE_ADDRESS_RESPONSE = """<DisassociateAddressResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DisassociateAddressResponse>"""
RELEASE_ADDRESS_RESPONSE = """<ReleaseAddressResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</ReleaseAddressResponse>"""
| {
"content_hash": "6929b1211559f16c235f75cb335ce3e7",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 120,
"avg_line_length": 44.82608695652174,
"alnum_prop": 0.6128585284744353,
"repo_name": "gjtempleton/moto",
"id": "a64a33bb5dcbd2dcb3ee2abf8b0110ca90495458",
"size": "7217",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "moto/ec2/responses/elastic_ip_addresses.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "630"
},
{
"name": "Python",
"bytes": "2603223"
},
{
"name": "Ruby",
"bytes": "188"
}
],
"symlink_target": ""
} |
AUTHFAILURE = 'AuthFailure'
# DryRun 操作,代表请求将会是成功的,只是多传了 DryRun 参数。
DRYRUNOPERATION = 'DryRunOperation'
# 操作失败。
FAILEDOPERATION = 'FailedOperation'
# 内部错误。
INTERNALERROR = 'InternalError'
# 参数错误。
INVALIDPARAMETER = 'InvalidParameter'
# 参数取值错误。
INVALIDPARAMETERVALUE = 'InvalidParameterValue'
# 超过配额限制。
LIMITEXCEEDED = 'LimitExceeded'
# 缺少参数错误。
MISSINGPARAMETER = 'MissingParameter'
# 操作被拒绝。
OPERATIONDENIED = 'OperationDenied'
# 请求的次数超过了频率限制。
REQUESTLIMITEXCEEDED = 'RequestLimitExceeded'
# 资源被占用。
RESOURCEINUSE = 'ResourceInUse'
# 资源不足。
RESOURCEINSUFFICIENT = 'ResourceInsufficient'
# 资源不存在。
RESOURCENOTFOUND = 'ResourceNotFound'
# 资源不可用。
RESOURCEUNAVAILABLE = 'ResourceUnavailable'
# 资源售罄。
RESOURCESSOLDOUT = 'ResourcesSoldOut'
# 未授权操作。
UNAUTHORIZEDOPERATION = 'UnauthorizedOperation'
# 未开通权限/无有效套餐包/账号已欠费。
UNAUTHORIZEDOPERATION_UNAUTHORIZED = 'UnauthorizedOperation.Unauthorized'
# 未知参数错误。
UNKNOWNPARAMETER = 'UnknownParameter'
# 操作不支持。
UNSUPPORTEDOPERATION = 'UnsupportedOperation'
| {
"content_hash": "7d986fe5f2f818673a8ce0f8bea30d5e",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 73,
"avg_line_length": 18.254545454545454,
"alnum_prop": 0.7878486055776892,
"repo_name": "tzpBingo/github-trending",
"id": "94a1c8ef71f603c85dd9fbed3f02d0c8a5f5eba6",
"size": "1967",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "codespace/python/tencentcloud/vm/v20201229/errorcodes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "11470"
},
{
"name": "HTML",
"bytes": "1543"
},
{
"name": "Python",
"bytes": "49985109"
},
{
"name": "Shell",
"bytes": "18039"
}
],
"symlink_target": ""
} |
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(1375, 827)
self.verticalLayout = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.tabWidget = QtWidgets.QTabWidget(Dialog)
self.tabWidget.setObjectName("tabWidget")
self.specular_tab = QtWidgets.QWidget()
self.specular_tab.setObjectName("specular_tab")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.specular_tab)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.specular_compare_widget = CompareWidget(self.specular_tab)
self.specular_compare_widget.setObjectName("specular_compare_widget")
self.verticalLayout_2.addWidget(self.specular_compare_widget)
self.tabWidget.addTab(self.specular_tab, "")
self.offspecular_tab = QtWidgets.QWidget()
self.offspecular_tab.setObjectName("offspecular_tab")
self.gridLayout = QtWidgets.QGridLayout(self.offspecular_tab)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.gridLayout_2 = QtWidgets.QGridLayout()
self.gridLayout_2.setVerticalSpacing(6)
self.gridLayout_2.setObjectName("gridLayout_2")
self.offspec_mm_plot = MPLWidget(self.offspecular_tab)
self.offspec_mm_plot.setObjectName("offspec_mm_plot")
self.gridLayout_2.addWidget(self.offspec_mm_plot, 0, 1, 1, 1)
self.offspec_pp_plot = MPLWidget(self.offspecular_tab)
self.offspec_pp_plot.setObjectName("offspec_pp_plot")
self.gridLayout_2.addWidget(self.offspec_pp_plot, 0, 0, 1, 1)
self.offspec_pm_plot = MPLWidget(self.offspecular_tab)
self.offspec_pm_plot.setObjectName("offspec_pm_plot")
self.gridLayout_2.addWidget(self.offspec_pm_plot, 1, 0, 1, 1)
self.offspec_mp_plot = MPLWidget(self.offspecular_tab)
self.offspec_mp_plot.setObjectName("offspec_mp_plot")
self.gridLayout_2.addWidget(self.offspec_mp_plot, 1, 1, 1, 1)
self.gridLayout.addLayout(self.gridLayout_2, 1, 0, 1, 1)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setSizeConstraint(QtWidgets.QLayout.SetMinimumSize)
self.horizontalLayout.setContentsMargins(5, 5, 5, 5)
self.horizontalLayout.setObjectName("horizontalLayout")
self.label = QtWidgets.QLabel(self.offspecular_tab)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.label_2 = QtWidgets.QLabel(self.offspecular_tab)
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.offspec_intensity_min = QtWidgets.QDoubleSpinBox(self.offspecular_tab)
self.offspec_intensity_min.setMinimum(-20.0)
self.offspec_intensity_min.setMaximum(2.0)
self.offspec_intensity_min.setSingleStep(0.25)
self.offspec_intensity_min.setProperty("value", -6.0)
self.offspec_intensity_min.setObjectName("offspec_intensity_min")
self.horizontalLayout.addWidget(self.offspec_intensity_min)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.label_3 = QtWidgets.QLabel(self.offspecular_tab)
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
self.offspec_intensity_max = QtWidgets.QDoubleSpinBox(self.offspecular_tab)
self.offspec_intensity_max.setMinimum(-10.0)
self.offspec_intensity_max.setMaximum(10.0)
self.offspec_intensity_max.setSingleStep(0.25)
self.offspec_intensity_max.setObjectName("offspec_intensity_max")
self.horizontalLayout.addWidget(self.offspec_intensity_max)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem2)
self.offspec_apply_crop_button = QtWidgets.QPushButton(self.offspecular_tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.offspec_apply_crop_button.sizePolicy().hasHeightForWidth())
self.offspec_apply_crop_button.setSizePolicy(sizePolicy)
self.offspec_apply_crop_button.setObjectName("offspec_apply_crop_button")
self.horizontalLayout.addWidget(self.offspec_apply_crop_button)
self.offset_reset_crop_button = QtWidgets.QPushButton(self.offspecular_tab)
self.offset_reset_crop_button.setObjectName("offset_reset_crop_button")
self.horizontalLayout.addWidget(self.offset_reset_crop_button)
self.gridLayout.addLayout(self.horizontalLayout, 0, 0, 1, 1)
self.tabWidget.addTab(self.offspecular_tab, "")
self.gisans_tab = QtWidgets.QWidget()
self.gisans_tab.setObjectName("gisans_tab")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.gisans_tab)
self.verticalLayout_4.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.horizontalLayout_2.setContentsMargins(5, 5, 5, 5)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.label_6 = QtWidgets.QLabel(self.gisans_tab)
self.label_6.setObjectName("label_6")
self.horizontalLayout_2.addWidget(self.label_6)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem3)
self.label_5 = QtWidgets.QLabel(self.gisans_tab)
self.label_5.setObjectName("label_5")
self.horizontalLayout_2.addWidget(self.label_5)
self.gisans_intensity_min = QtWidgets.QDoubleSpinBox(self.gisans_tab)
self.gisans_intensity_min.setMinimum(-20.0)
self.gisans_intensity_min.setMaximum(2.0)
self.gisans_intensity_min.setSingleStep(0.25)
self.gisans_intensity_min.setProperty("value", -6.0)
self.gisans_intensity_min.setObjectName("gisans_intensity_min")
self.horizontalLayout_2.addWidget(self.gisans_intensity_min)
spacerItem4 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem4)
self.label_4 = QtWidgets.QLabel(self.gisans_tab)
self.label_4.setObjectName("label_4")
self.horizontalLayout_2.addWidget(self.label_4)
self.gisans_intensity_max = QtWidgets.QDoubleSpinBox(self.gisans_tab)
self.gisans_intensity_max.setMinimum(-10.0)
self.gisans_intensity_max.setMaximum(10.0)
self.gisans_intensity_max.setSingleStep(0.25)
self.gisans_intensity_max.setObjectName("gisans_intensity_max")
self.horizontalLayout_2.addWidget(self.gisans_intensity_max)
spacerItem5 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem5)
self.gisans_apply_crop_button = QtWidgets.QPushButton(self.gisans_tab)
self.gisans_apply_crop_button.setObjectName("gisans_apply_crop_button")
self.horizontalLayout_2.addWidget(self.gisans_apply_crop_button)
self.gisans_reset_crop_button = QtWidgets.QPushButton(self.gisans_tab)
self.gisans_reset_crop_button.setObjectName("gisans_reset_crop_button")
self.horizontalLayout_2.addWidget(self.gisans_reset_crop_button)
self.verticalLayout_4.addLayout(self.horizontalLayout_2)
self.scrollArea = QtWidgets.QScrollArea(self.gisans_tab)
self.scrollArea.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents = QtWidgets.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 1319, 685))
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.gridLayout_6 = QtWidgets.QGridLayout(self.scrollAreaWidgetContents)
self.gridLayout_6.setContentsMargins(0, 0, 0, 0)
self.gridLayout_6.setObjectName("gridLayout_6")
self.gridLayout_4 = QtWidgets.QGridLayout()
self.gridLayout_4.setObjectName("gridLayout_4")
self.gisans_pp_frame = QtWidgets.QFrame(self.scrollAreaWidgetContents)
self.gisans_pp_frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.gisans_pp_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.gisans_pp_frame.setObjectName("gisans_pp_frame")
self.gridLayout_7 = QtWidgets.QGridLayout(self.gisans_pp_frame)
self.gridLayout_7.setObjectName("gridLayout_7")
self.gisans_pp_layout = QtWidgets.QGridLayout()
self.gisans_pp_layout.setObjectName("gisans_pp_layout")
self.gridLayout_7.addLayout(self.gisans_pp_layout, 0, 0, 1, 1)
self.gridLayout_4.addWidget(self.gisans_pp_frame, 0, 0, 1, 1)
self.gisans_mm_frame = QtWidgets.QFrame(self.scrollAreaWidgetContents)
self.gisans_mm_frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.gisans_mm_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.gisans_mm_frame.setObjectName("gisans_mm_frame")
self.gridLayout_8 = QtWidgets.QGridLayout(self.gisans_mm_frame)
self.gridLayout_8.setObjectName("gridLayout_8")
self.gisans_mm_layout = QtWidgets.QGridLayout()
self.gisans_mm_layout.setObjectName("gisans_mm_layout")
self.gridLayout_8.addLayout(self.gisans_mm_layout, 0, 0, 1, 1)
self.gridLayout_4.addWidget(self.gisans_mm_frame, 0, 1, 1, 1)
self.gisans_pm_frame = QtWidgets.QFrame(self.scrollAreaWidgetContents)
self.gisans_pm_frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.gisans_pm_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.gisans_pm_frame.setObjectName("gisans_pm_frame")
self.gridLayout_11 = QtWidgets.QGridLayout(self.gisans_pm_frame)
self.gridLayout_11.setObjectName("gridLayout_11")
self.gisans_pm_layout = QtWidgets.QGridLayout()
self.gisans_pm_layout.setObjectName("gisans_pm_layout")
self.gridLayout_11.addLayout(self.gisans_pm_layout, 0, 0, 1, 1)
self.gridLayout_4.addWidget(self.gisans_pm_frame, 1, 0, 1, 1)
self.gisans_mp_frame = QtWidgets.QFrame(self.scrollAreaWidgetContents)
self.gisans_mp_frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.gisans_mp_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.gisans_mp_frame.setObjectName("gisans_mp_frame")
self.gridLayout_12 = QtWidgets.QGridLayout(self.gisans_mp_frame)
self.gridLayout_12.setObjectName("gridLayout_12")
self.gisans_mp_layout = QtWidgets.QGridLayout()
self.gisans_mp_layout.setObjectName("gisans_mp_layout")
self.gridLayout_12.addLayout(self.gisans_mp_layout, 0, 0, 1, 1)
self.gridLayout_4.addWidget(self.gisans_mp_frame, 1, 1, 1, 1)
self.gridLayout_6.addLayout(self.gridLayout_4, 0, 0, 1, 1)
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.verticalLayout_4.addWidget(self.scrollArea)
self.tabWidget.addTab(self.gisans_tab, "")
self.verticalLayout.addWidget(self.tabWidget)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Close)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
self.tabWidget.setCurrentIndex(0)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
self.tabWidget.currentChanged['int'].connect(Dialog.update_active_tab)
self.offset_reset_crop_button.clicked.connect(Dialog.reset_offspec_crop)
self.offspec_apply_crop_button.clicked.connect(Dialog.apply_offspec_crop)
self.gisans_apply_crop_button.clicked.connect(Dialog.apply_gisans_crop)
self.gisans_reset_crop_button.clicked.connect(Dialog.reset_gisans_crop)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Result Viewer"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.specular_tab), _translate("Dialog", "Specular"))
self.label.setText(_translate("Dialog", "Intensity range:"))
self.label_2.setText(_translate("Dialog", "I_min 10^"))
self.label_3.setText(_translate("Dialog", "I_max 10^"))
self.offspec_apply_crop_button.setText(_translate("Dialog", "Apply cropping to all"))
self.offset_reset_crop_button.setText(_translate("Dialog", "Refresh"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.offspecular_tab), _translate("Dialog", "Off-Specular"))
self.label_6.setText(_translate("Dialog", "Intensity range:"))
self.label_5.setText(_translate("Dialog", "I_min 10^"))
self.label_4.setText(_translate("Dialog", "I_max 10^"))
self.gisans_apply_crop_button.setText(_translate("Dialog", "Apply cropping to all"))
self.gisans_reset_crop_button.setText(_translate("Dialog", "Refresh"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.gisans_tab), _translate("Dialog", "GISANS"))
from .compare_plots import CompareWidget
from .mplwidget import MPLWidget
| {
"content_hash": "e86cdd3dd0fb0886d0744855e4de6dba",
"timestamp": "",
"source": "github",
"line_count": 222,
"max_line_length": 117,
"avg_line_length": 63.9054054054054,
"alnum_prop": 0.7168534573905688,
"repo_name": "mdoucet/reflectivity_ui",
"id": "b77f9d2c37ba1bb310c6c3884b806c38da89ce78",
"size": "14398",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reflectivity_ui/interfaces/generated/ui_result_viewer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "13410291"
},
{
"name": "Makefile",
"bytes": "331"
},
{
"name": "Python",
"bytes": "1158055"
},
{
"name": "Shell",
"bytes": "208"
}
],
"symlink_target": ""
} |
from heat.openstack.common.notifier import api as notifier_api
from heat.engine import api as engine_api
from heat.engine import notification
def send(stack):
"""Send usage notifications to the configured notification driver."""
# The current notifications have a start/end:
# see: https://wiki.openstack.org/wiki/SystemUsageData
# so to be consistent we translate our status into a known start/end/error
# suffix.
level = notification.get_default_level()
if stack.status == stack.IN_PROGRESS:
suffix = 'start'
elif stack.status == stack.COMPLETE:
suffix = 'end'
else:
suffix = 'error'
level = notifier_api.ERROR
event_type = '%s.%s.%s' % ('stack',
stack.action.lower(),
suffix)
notification.notify(stack.context, event_type, level,
engine_api.format_notification_body(stack))
| {
"content_hash": "610de7b789cbe99953f3cb443921aed1",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 78,
"avg_line_length": 33.75,
"alnum_prop": 0.6275132275132275,
"repo_name": "ntt-sic/heat",
"id": "516090ca7c4a95415c053d11d88cee76390daf62",
"size": "1520",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "heat/engine/notification/stack.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3336181"
},
{
"name": "Shell",
"bytes": "22168"
}
],
"symlink_target": ""
} |
print('Enter your age:')
age = int(input())
# if age >= 21:
# print('You are legal here.')
# else:
# print('Your are illegal here.')
print('You are legal here.') if age >= 21 else print('Your are illegal here.')
| {
"content_hash": "3387ddc999c96625168857935231e49f",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 78,
"avg_line_length": 27.625,
"alnum_prop": 0.6153846153846154,
"repo_name": "KristianMariyanov/PythonPlayground",
"id": "5bc188092f2c5159c1da98a0bde2db71fcd0b517",
"size": "221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hack-bulgaria-course/python01/week1/legal.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5811"
}
],
"symlink_target": ""
} |
import CsHelper
from CsDatabag import CsCmdLine
import logging
class CsChain(object):
def __init__(self):
self.chain = {}
self.last_added = ''
self.count = {}
def add(self, table, chain):
if table not in self.chain.keys():
self.chain.setdefault(table, []).append(chain)
else:
self.chain[table].append(chain)
if self.last_added != chain:
self.last_added = chain
self.count[chain] = 0
def add_rule(self, chain):
self.count[chain] += 1
def get(self, table):
if table not in self.chain.keys():
return {}
return self.chain[table]
def get_count(self, chain):
return self.count[chain]
def last(self):
return self.last_added
def has_chain(self, table, chain):
if table not in self.chain.keys():
return False
if chain not in self.chain[table]:
return False
return True
class CsTable(object):
def __init__(self):
self.table = []
self.last_added = ''
def add(self, name):
if name not in self.table:
self.table.append(name)
self.last_added = name
def get(self):
return self.table
def last(self):
return self.last_added
class CsNetfilters(object):
def __init__(self, load=True):
self.rules = []
self.table = CsTable()
self.chain = CsChain()
if load:
self.get_all_rules()
def get_all_rules(self):
for i in CsHelper.execute("iptables-save"):
if i.startswith('*'): # Table
self.table.add(i[1:])
if i.startswith(':'): # Chain
self.chain.add(self.table.last(), i[1:].split(' ')[0])
if i.startswith('-A'): # Rule
self.chain.add_rule(i.split()[1])
rule = CsNetfilter()
rule.parse(i)
rule.set_table(self.table.last())
rule.set_chain(i.split()[1])
rule.set_count(self.chain.get_count(i.split()[1]))
self.save(rule)
def save(self, rule):
self.rules.append(rule)
def get(self):
return self.rules
def has_table(self, table):
return table in self.table.get()
def has_chain(self, table, chain):
return self.chain.has_chain(table, chain)
def has_rule(self, new_rule):
for r in self.get():
if new_rule == r:
if new_rule.get_count() > 0:
continue
r.mark_seen()
return True
return False
def get_unseen(self):
del_list = [x for x in self.rules if x.unseen()]
for r in del_list:
cmd = "iptables -t %s %s" % (r.get_table(), r.to_str(True))
logging.debug("unseen cmd: %s ", cmd)
CsHelper.execute(cmd)
# print "Delete rule %s from table %s" % (r.to_str(True), r.get_table())
logging.info("Delete rule %s from table %s", r.to_str(True), r.get_table())
def compare(self, list):
""" Compare reality with what is needed """
# PASS 1: Ensure all chains are present
for fw in list:
new_rule = CsNetfilter()
new_rule.parse(fw[2])
new_rule.set_table(fw[0])
self.add_chain(new_rule)
ruleSet = set()
# PASS 2: Create rules
for fw in list:
tupledFw = tuple(fw)
if tupledFw in ruleSet:
logging.debug("Already processed : %s", tupledFw)
continue
new_rule = CsNetfilter()
new_rule.parse(fw[2])
new_rule.set_table(fw[0])
if isinstance(fw[1], int):
new_rule.set_count(fw[1])
rule_chain = new_rule.get_chain()
logging.debug("Checking if the rule already exists: rule=%s table=%s chain=%s", new_rule.get_rule(), new_rule.get_table(), new_rule.get_chain())
if self.has_rule(new_rule):
logging.debug("Exists: rule=%s table=%s", fw[2], new_rule.get_table())
else:
# print "Add rule %s in table %s" % ( fw[2], new_rule.get_table())
logging.info("Add: rule=%s table=%s", fw[2], new_rule.get_table())
# front means insert instead of append
cpy = fw[2]
if fw[1] == "front":
cpy = cpy.replace('-A', '-I')
if isinstance(fw[1], int):
# if the rule is for ACLs, we want to insert them in order, right before the DROP all
if rule_chain.startswith("ACL_INBOUND") or rule_chain.startswith("ACL_OUTBOUND"):
rule_count = self.chain.get_count(rule_chain) if self.chain.get_count(rule_chain) > 0 else 1
cpy = cpy.replace("-A %s" % new_rule.get_chain(), '-I %s %s' % (new_rule.get_chain(), rule_count))
else:
cpy = cpy.replace("-A %s" % new_rule.get_chain(), '-I %s %s' % (new_rule.get_chain(), fw[1]))
ret = CsHelper.execute2("iptables -t %s %s" % (new_rule.get_table(), cpy))
# There are some issues in this framework causing failures .. like adding a chain without checking it is present causing
# the failures. Also some of the rule like removeFromLoadBalancerRule is deleting rule and deleteLoadBalancerRule
# trying to delete which causes the failure.
# For now raising the log.
# TODO: Need to fix in the framework.
if ret.returncode != 0:
error = ret.communicate()[0]
logging.debug("iptables command got failed ... continuing")
ruleSet.add(tupledFw)
self.chain.add_rule(rule_chain)
self.del_standard()
self.get_unseen()
def add_chain(self, rule):
""" Add the given chain if it is not already present """
if not self.has_chain(rule.get_table(), rule.get_chain()):
if rule.get_chain():
CsHelper.execute("iptables -t %s -N %s" % (rule.get_table(), rule.get_chain()))
self.chain.add(rule.get_table(), rule.get_chain())
def del_standard(self):
""" Del rules that are there but should not be deleted
These standard firewall rules vary according to the device type
"""
type = CsCmdLine("cmdline").get_type()
try:
table = ''
for i in open("/etc/iptables/iptables-%s" % type):
if i.startswith('*'): # Table
table = i[1:].strip()
if i.startswith('-A'): # Rule
self.del_rule(table, i.strip())
except IOError:
logging.debug("Exception in del_standard, returning")
# Nothing can be done
return
def del_rule(self, table, rule):
nr = CsNetfilter()
nr.parse(rule)
nr.set_table(table)
self.delete(nr)
def delete(self, rule):
""" Delete a rule from the list of configured rules
The rule will not actually be removed on the host """
self.rules[:] = [x for x in self.rules if not x == rule]
class CsNetfilter(object):
def __init__(self):
self.rule = {}
self.table = ''
self.chain = ''
self.seen = False
self.count = 0
def parse(self, rule):
self.rule = self.__convert_to_dict(rule)
def unseen(self):
return self.seen is False
def mark_seen(self):
self.seen = True
def __convert_to_dict(self, rule):
rule = unicode(rule.lstrip())
rule = rule.replace('! -', '!_-')
rule = rule.replace('-p all', '')
rule = rule.replace(' ', ' ')
rule = rule.replace('bootpc', '68')
# Ugly hack no.23 split this or else I will have an odd number of parameters
rule = rule.replace('--checksum-fill', '--checksum fill')
# -m can appear twice in a string
rule = rule.replace('-m state', '-m2 state')
rule = rule.replace('ESTABLISHED,RELATED', 'RELATED,ESTABLISHED')
bits = rule.split(' ')
rule = dict(zip(bits[0::2], bits[1::2]))
if "-A" in rule.keys():
self.chain = rule["-A"]
return rule
def set_table(self, table):
if table == '':
table = "filter"
self.table = table
def get_table(self):
return self.table
def set_chain(self, chain):
self.chain = chain
def set_count(self, count=0):
self.count = count
def get_count(self):
return self.count
def get_chain(self):
return self.chain
def get_rule(self):
return self.rule
def to_str(self, delete=False):
""" Convert the rule back into aynactically correct iptables command """
# Order is important
order = ['-A', '-s', '-d', '!_-d', '-i', '!_-i', '-p', '-m', '-m2', '--icmp-type', '--state',
'--dport', '--destination-port', '-o', '!_-o', '-j', '--set-xmark', '--checksum',
'--to-source', '--to-destination', '--mark']
str = ''
for k in order:
if k in self.rule.keys():
printable = k.replace('-m2', '-m')
printable = printable.replace('!_-', '! -')
if delete:
printable = printable.replace('-A', '-D')
if str == '':
str = "%s %s" % (printable, self.rule[k])
else:
str = "%s %s %s" % (str, printable, self.rule[k])
str = str.replace("--checksum fill", "--checksum-fill")
return str
def __eq__(self, rule):
if rule.get_table() != self.get_table():
return False
if rule.get_chain() != self.get_chain():
return False
if len(rule.get_rule().items()) != len(self.get_rule().items()):
return False
common = set(rule.get_rule().items()) & set(self.get_rule().items())
if len(common) != len(rule.get_rule()):
return False
return True
| {
"content_hash": "7456b53eba9f115f9ad344bc1c01611d",
"timestamp": "",
"source": "github",
"line_count": 297,
"max_line_length": 156,
"avg_line_length": 34.66329966329966,
"alnum_prop": 0.5159786304031083,
"repo_name": "DaanHoogland/cloudstack",
"id": "01dfa7cac39917f8dceb6630105b56ab93c8672a",
"size": "11100",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "systemvm/debian/opt/cloud/bin/cs/CsNetfilter.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9979"
},
{
"name": "C#",
"bytes": "2356211"
},
{
"name": "CSS",
"bytes": "343148"
},
{
"name": "Dockerfile",
"bytes": "2375"
},
{
"name": "FreeMarker",
"bytes": "4887"
},
{
"name": "Groovy",
"bytes": "146420"
},
{
"name": "HTML",
"bytes": "153560"
},
{
"name": "Java",
"bytes": "36818077"
},
{
"name": "JavaScript",
"bytes": "8264908"
},
{
"name": "Python",
"bytes": "12533840"
},
{
"name": "Ruby",
"bytes": "22732"
},
{
"name": "SCSS",
"bytes": "362625"
},
{
"name": "Shell",
"bytes": "708848"
},
{
"name": "XSLT",
"bytes": "57835"
}
],
"symlink_target": ""
} |
class ChatterboxException(Exception):
pass
class RateLimitException(ChatterboxException):
pass
class KeyInvalidationException(ChatterboxException):
pass
| {
"content_hash": "b5adb67fad365e27b6f3d834f344638d",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 52,
"avg_line_length": 16.9,
"alnum_prop": 0.7988165680473372,
"repo_name": "blitzagency/django-chatterbox",
"id": "5b0cfd74366e54241c85a10f1c75b9126a4ba72f",
"size": "169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chatterbox/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1745"
},
{
"name": "HTML",
"bytes": "10759"
},
{
"name": "JavaScript",
"bytes": "38264"
},
{
"name": "Makefile",
"bytes": "1540"
},
{
"name": "Python",
"bytes": "209440"
},
{
"name": "Ruby",
"bytes": "3342"
},
{
"name": "SaltStack",
"bytes": "2743"
},
{
"name": "Scheme",
"bytes": "615"
}
],
"symlink_target": ""
} |
import os
import pytest
from datadog_checks.dev import docker_run
from datadog_checks.dev.conditions import CheckDockerLogs
from datadog_checks.dev.utils import load_jmx_config
from . import common
@pytest.fixture(scope='session')
def dd_environment():
compose_file = os.path.join(common.HERE, 'docker', 'docker-compose.yaml')
with docker_run(
compose_file, mount_logs=True, conditions=[CheckDockerLogs(compose_file, ['Started HiveMQ in'], matches='all')]
):
config = load_jmx_config()
config['instances'] = [common.INSTANCE]
yield config, {'use_jmx': True}
| {
"content_hash": "0059197e8643431973d051227bcfc9f9",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 119,
"avg_line_length": 30.35,
"alnum_prop": 0.7051070840197694,
"repo_name": "DataDog/integrations-core",
"id": "4aff5f384f932a889e33dae6eed8707c7f17b5c4",
"size": "722",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hivemq/tests/conftest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "578"
},
{
"name": "COBOL",
"bytes": "12312"
},
{
"name": "Dockerfile",
"bytes": "22998"
},
{
"name": "Erlang",
"bytes": "15518"
},
{
"name": "Go",
"bytes": "6988"
},
{
"name": "HCL",
"bytes": "4080"
},
{
"name": "HTML",
"bytes": "1318"
},
{
"name": "JavaScript",
"bytes": "1817"
},
{
"name": "Kotlin",
"bytes": "430"
},
{
"name": "Lua",
"bytes": "3489"
},
{
"name": "PHP",
"bytes": "20"
},
{
"name": "PowerShell",
"bytes": "2398"
},
{
"name": "Python",
"bytes": "13020828"
},
{
"name": "Roff",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "241"
},
{
"name": "Scala",
"bytes": "7000"
},
{
"name": "Shell",
"bytes": "83227"
},
{
"name": "Swift",
"bytes": "203"
},
{
"name": "TSQL",
"bytes": "29972"
},
{
"name": "TypeScript",
"bytes": "1019"
}
],
"symlink_target": ""
} |
"""Tests for the Atag climate platform."""
from unittest.mock import PropertyMock, patch
from homeassistant.components.atag.climate import DOMAIN, PRESET_MAP
from homeassistant.components.climate import (
ATTR_HVAC_ACTION,
ATTR_HVAC_MODE,
ATTR_PRESET_MODE,
HVAC_MODE_HEAT,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_TEMPERATURE,
)
from homeassistant.components.climate.const import CURRENT_HVAC_IDLE, PRESET_AWAY
from homeassistant.components.homeassistant import DOMAIN as HA_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_TEMPERATURE,
STATE_UNKNOWN,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
from tests.components.atag import UID, init_integration
from tests.test_util.aiohttp import AiohttpClientMocker
CLIMATE_ID = f"{Platform.CLIMATE}.{DOMAIN}"
async def test_climate(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the creation and values of Atag climate device."""
await init_integration(hass, aioclient_mock)
entity_registry = er.async_get(hass)
assert entity_registry.async_is_registered(CLIMATE_ID)
entity = entity_registry.async_get(CLIMATE_ID)
assert entity.unique_id == f"{UID}-{Platform.CLIMATE}"
assert hass.states.get(CLIMATE_ID).attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
async def test_setting_climate(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test setting the climate device."""
await init_integration(hass, aioclient_mock)
with patch("pyatag.entities.Climate.set_temp") as mock_set_temp:
await hass.services.async_call(
Platform.CLIMATE,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: CLIMATE_ID, ATTR_TEMPERATURE: 15},
blocking=True,
)
await hass.async_block_till_done()
mock_set_temp.assert_called_once_with(15)
with patch("pyatag.entities.Climate.set_preset_mode") as mock_set_preset:
await hass.services.async_call(
Platform.CLIMATE,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: CLIMATE_ID, ATTR_PRESET_MODE: PRESET_AWAY},
blocking=True,
)
await hass.async_block_till_done()
mock_set_preset.assert_called_once_with(PRESET_MAP[PRESET_AWAY])
with patch("pyatag.entities.Climate.set_hvac_mode") as mock_set_hvac:
await hass.services.async_call(
Platform.CLIMATE,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: CLIMATE_ID, ATTR_HVAC_MODE: HVAC_MODE_HEAT},
blocking=True,
)
await hass.async_block_till_done()
mock_set_hvac.assert_called_once_with(HVAC_MODE_HEAT)
async def test_incorrect_modes(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test incorrect values are handled correctly."""
with patch(
"pyatag.entities.Climate.hvac_mode",
new_callable=PropertyMock(return_value="bug"),
):
await init_integration(hass, aioclient_mock)
assert hass.states.get(CLIMATE_ID).state == STATE_UNKNOWN
async def test_update_failed(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test data is not destroyed on update failure."""
entry = await init_integration(hass, aioclient_mock)
await async_setup_component(hass, HA_DOMAIN, {})
assert hass.states.get(CLIMATE_ID).state == HVAC_MODE_HEAT
coordinator = hass.data[DOMAIN][entry.entry_id]
with patch("pyatag.AtagOne.update", side_effect=TimeoutError) as updater:
await coordinator.async_refresh()
await hass.async_block_till_done()
updater.assert_called_once()
assert not coordinator.last_update_success
assert coordinator.data.id == UID
| {
"content_hash": "8ae143096a2e07a6792e40a00f07fef3",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 88,
"avg_line_length": 36.398148148148145,
"alnum_prop": 0.6932078351564488,
"repo_name": "home-assistant/home-assistant",
"id": "ba6bc892e40c2805083532dc3155b04acfc4bf72",
"size": "3931",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "tests/components/atag/test_climate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20557383"
},
{
"name": "Shell",
"bytes": "6671"
}
],
"symlink_target": ""
} |
from attendly import *
VERSION='0.2.3' | {
"content_hash": "742920d114a50645413dd286c13ef828",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 22,
"avg_line_length": 13,
"alnum_prop": 0.717948717948718,
"repo_name": "Attendly/attendly-python",
"id": "c7a60043ce1277de2d104acb6085938eeaf3e922",
"size": "39",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "attendly/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3152"
}
],
"symlink_target": ""
} |
'''
Author: sapatel91
Date: March 30, 2014
File: TestSendSOAP.py
Purpose: Send SOAP Commands to C4
Disclaimer: USE AT YOUR RISK, I TAKE NO RESPONSIBILITY
Most likely there won't be any though
'''
from Modules.PyControl4 import *
# Establish Connection
# NOTE: IP Address will be different for your system
C4SoapConn('192.168.1.10', 5020)
# Pulse Volume Down in Family Room
Message = '<c4soap name="SendToDeviceAsync" async="1" seq="1615"><param name="iddevice" type="number">10</param><param name="data" type="string"><devicecommand><command>PULSE_VOL_DOWN</command><params></params></devicecommand></param></c4soap>'
C4SoapConn.Send(Message) | {
"content_hash": "2d3b3e94ec94912f611add8e264d658c",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 244,
"avg_line_length": 33.9,
"alnum_prop": 0.7168141592920354,
"repo_name": "sapatel91/pyControl4",
"id": "5929eaecb01bfdec6572ac623db110cf7a3ddc1d",
"size": "678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TestSendSOAP.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9239"
}
],
"symlink_target": ""
} |
from parsimonious import Grammar
from parsimonious.exceptions import ParseError
from parsimonious.nodes import NodeVisitor, VisitationError
from turing.utils.normalize import get_state_name_norm
norm_state = get_state_name_norm()
class TuringSyntaxError(VisitationError):
def __init__(self, text, pos):
self.text = text
self.pos = pos
def line(self):
pos = self.pos
for i, line in enumerate(self.text.split("\n")):
if pos <= len(line):
return i
else:
pos -= len(line) + 1
return 0
def column(self):
pos = self.pos
for i, line in enumerate(self.text.split("\n")):
if pos <= len(line):
return pos + 1
else:
pos -= len(line) + 1
return 0
rules = """
program = state*
_ = ~"\s*"
initial_state_modifier = "initial"
final_state_modifier = "final"
state_modifier = initial_state_modifier / final_state_modifier
state_name = ~"[ a-zA-Z0-9_]+"
state_code = statement*
state = _ state_modifier* _ state_modifier* _ "state" _ state_name _ "{" _ state_code _ "}" _
literal = ~"[a-zA-Z0-9]{1}"
self = _ "self" _
head = _ "head" _
ref = self / head
junk_direction = ~".*"
direction = "right" / "left" / junk_direction
move = _ "move" _ direction _
no_move = _ "no" _ "move" _
movement = move / no_move
write = _ "do"* _ "write" _ literal _
erase = _ "do"* _ "erase" _
no_action = _ "do" _ "nothing" _
action = write / erase / no_action
state_name = ~"[a-zA-Z0-9 ,.]+"u
state_ref = self / state_name
assume = _ "assume" _ state_ref _
eq = "is"
eq_neq = eq
condition = _ head _ eq_neq _ literal _
if_branch = _ "if" _ condition _ "then" _ state_code _
elif_branch = _ "else" _ "if" _ condition _ "then" _ state_code _
elif_branches = elif_branch*
else_branch = _ "else" _ state_code _
if_block = _ if_branch _ elif_branches _ else_branch? _ "endif" _
statement = movement / action / assume / if_block
"""
def indent(level=0):
def wrapper(visit_to_wrap):
def indent_visit(self, *args):
text = visit_to_wrap(self, *args)
indent_text = ""
for line in text.split("\n"):
indent_text += (' ' * 4 * level) + line + "\n"
return indent_text[:-1]
return indent_visit
return wrapper
class TuringSyntaxVisitor(NodeVisitor):
def generic_visit(self, node, child):
return child
def visit_program(self, node, child):
return child
def visit_initial_state_modifier(self, node, child):
return "InitialMixin"
def visit_final_state_modifier(self, node, child):
return "FinalMixin"
def visit_state_modifier(self, node, child):
return child[0]
@indent(level=2)
def visit_state_code(self, node, child):
return "\n".join(child)
def visit_state_name(self, node, child):
return node.text.strip()
def visit_state(self, node, child):
_, modifier1, _, modifier2, _, _, _, name, _, _, _, code, _, _, _ = child
class_name = norm_state(name)
modifiers = ["UserState"]
if modifier1:
modifiers.append(modifier1[0])
if modifier2:
modifiers.append(modifier2[0])
return """
class _%(class_name)s(%(modifiers)s):
name = '%(name)s'
def _resolve(self, machine):
%(code)s
pass # handle empty state as well
_states.add(_%(class_name)s()) """ % {
'class_name': class_name,
'name': name,
'modifiers': ', '.join(modifiers),
'code': code,
}
def visit_number(self, node, child):
_, number, _ = node
return "'" + number.text.strip() + "'"
def visit_char(self, node, child):
_, char, _ = node
return "'" + char.text + "'"
def visit_literal(self, node, child):
literal = node.text.strip("'\"")
return "'" + literal + "'"
def visit_statement(self, node, child):
return child[0]
def visit_movement(self, node, child):
return child[0]
def visit_move(self, node, child):
_, _, _, direction, _ = node
move = 'machine.move(%s)'
if direction.text == 'left':
there = "Move.LEFT"
elif direction.text == 'right':
there = "Move.RIGHT"
else:
assert 0
return move % there
def visit_no_move(self, node, child):
return 'machine.move(Move.NONE)'
def visit_action(self, node, child):
return child[0]
def visit_write(self, node, child):
_, _, _, _, _, what, _ = node
return "machine.do(Action.WRITE, '%s')" % what.text
def visit_erase(self, node, child):
return "machine.do(Action.ERASE)"
def visit_no_action(self, node, child):
return "machine.do(Action.NONE)"
def visit_assume(self, node, child):
_, _, _, state_ref, _ = node
return "machine.assume('%s')" % state_ref.text
def visit_self(self, node, child):
return 'self'
def visit_head(self, node, child):
return 'machine.head'
def visit_eq(self, node, child):
return '=='
def visit_eq_neq(self, node, child):
return child[0]
def visit_condition(self, node, child):
_, head, _, eq_neq, _, literal, _ = child
return head + ' ' + eq_neq + ' ' + literal
def visit_if_branch(self, node, child):
_, _, _, condition, _, _, _, code, _ = child
return """\
if %(condition)s:
%(code)s""" % {'condition': condition, 'code': code}
def visit_elif_branch(self, node, child):
_, _, _, _, _, condition, _, _, _, code, _ = child
return """
elif %(condition)s:
%(code)s""" % {'condition': condition, 'code': code}
def visit_elif_branches(self, node, child):
return ''.join(child)
def visit_else_branch(self, node, child):
_, _, _, code, _ = child
return """
else:
%(code)s""" % {'code': code}
def visit_if_block(self, node, child):
_, if_branch, _, elif_branches, _, else_branch, _, _, _ = child
return "%(if_branch)s%(elif_branches)s%(else_branch)s" % {
'if_branch': if_branch,
'elif_branches': elif_branches,
'else_branch': else_branch[0] if else_branch else "",
}
def visit_junk_direction(self, node, child):
raise TuringSyntaxError(node.full_text, node.start)
def parse(src):
root = Grammar(rules)["program"].parse(src)
return TuringSyntaxVisitor().visit(root)
| {
"content_hash": "d111164d946ab053e6be040a37bd5139",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 97,
"avg_line_length": 25.381132075471697,
"alnum_prop": 0.5422242045792447,
"repo_name": "myaskevich/turing",
"id": "aced871c841e30d50ace24ef933246510fce571a",
"size": "6727",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "turing/syntax.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25273"
},
{
"name": "VimL",
"bytes": "1436"
}
],
"symlink_target": ""
} |
from django.contrib.contenttypes.models import ContentType
from django.test.client import RequestFactory
from kitsune.access.helpers import has_perm, has_perm_or_owns
from kitsune.access.tests import permission
from kitsune.forums.tests import ForumTestCase, forum, thread
from kitsune.sumo.urlresolvers import reverse
from kitsune.users.tests import user, group
class ForumTestPermissions(ForumTestCase):
"""Make sure access helpers work on the forums."""
def setUp(self):
url = reverse('forums.threads', args=[u'test-forum'])
self.context = {'request': RequestFactory().get(url)}
self.group = group(save=True)
# Set up forum_1
f = self.forum_1 = forum(save=True)
ct = ContentType.objects.get_for_model(self.forum_1)
permission(codename='forums_forum.thread_edit_forum', content_type=ct,
object_id=f.id, group=self.group, save=True)
permission(codename='forums_forum.post_edit_forum', content_type=ct,
object_id=f.id, group=self.group, save=True)
permission(codename='forums_forum.post_delete_forum', content_type=ct,
object_id=f.id, group=self.group, save=True)
permission(codename='forums_forum.thread_delete_forum',
content_type=ct, object_id=f.id, group=self.group,
save=True)
permission(codename='forums_forum.thread_sticky_forum',
content_type=ct, object_id=f.id, group=self.group,
save=True)
permission(codename='forums_forum.thread_move_forum', content_type=ct,
object_id=f.id, group=self.group, save=True)
# Set up forum_2
f = self.forum_2 = forum(save=True)
permission(codename='forums_forum.thread_move_forum', content_type=ct,
object_id=f.id, group=self.group, save=True)
def test_has_perm_thread_edit(self):
"""User in group can edit thread in forum_1, but not in forum_2."""
u = user(save=True)
self.group.user_set.add(u)
self.context['request'].user = u
assert has_perm(self.context, 'forums_forum.thread_edit_forum',
self.forum_1)
assert not has_perm(self.context, 'forums_forum.thread_edit_forum',
self.forum_2)
def test_has_perm_or_owns_thread_edit(self):
"""Users can edit their own threads."""
my_t = thread(save=True)
me = my_t.creator
other_t = thread(save=True)
self.context['request'].user = me
perm = 'forums_forum.thread_edit_forum'
assert has_perm_or_owns(self.context, perm, my_t, self.forum_1)
assert not has_perm_or_owns(self.context, perm, other_t, self.forum_1)
def test_has_perm_thread_delete(self):
"""User in group can delete thread in forum_1, but not in forum_2."""
u = user(save=True)
self.group.user_set.add(u)
self.context['request'].user = u
assert has_perm(self.context, 'forums_forum.thread_delete_forum',
self.forum_1)
assert not has_perm(self.context, 'forums_forum.thread_delete_forum',
self.forum_2)
def test_has_perm_thread_sticky(self):
# User in group can change sticky status of thread in forum_1,
# but not in forum_2.
u = user(save=True)
self.group.user_set.add(u)
self.context['request'].user = u
assert has_perm(self.context, 'forums_forum.thread_sticky_forum',
self.forum_1)
assert not has_perm(self.context, 'forums_forum.thread_sticky_forum',
self.forum_2)
def test_has_perm_thread_locked(self):
# Sanity check: user in group has no permission to change
# locked status in forum_1.
u = user(save=True)
self.group.user_set.add(u)
self.context['request'].user = u
assert not has_perm(self.context, 'forums_forum.thread_locked_forum',
self.forum_1)
def test_has_perm_post_edit(self):
"""User in group can edit any post in forum_1, but not in forum_2."""
u = user(save=True)
self.group.user_set.add(u)
self.context['request'].user = u
assert has_perm(self.context, 'forums_forum.post_edit_forum',
self.forum_1)
assert not has_perm(self.context, 'forums_forum.post_edit_forum',
self.forum_2)
def test_has_perm_post_delete(self):
"""User in group can delete posts in forum_1, but not in forum_2."""
u = user(save=True)
self.group.user_set.add(u)
self.context['request'].user = u
assert has_perm(self.context, 'forums_forum.post_delete_forum',
self.forum_1)
assert not has_perm(self.context, 'forums_forum.post_delete_forum',
self.forum_2)
def test_no_perm_thread_delete(self):
"""User not in group cannot delete thread in any forum."""
self.context['request'].user = user(save=True)
assert not has_perm(self.context, 'forums_forum.thread_delete_forum',
self.forum_1)
assert not has_perm(self.context, 'forums_forum.thread_delete_forum',
self.forum_2)
| {
"content_hash": "be31bf0a8daa221d63b51a173dfad635",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 78,
"avg_line_length": 43.136,
"alnum_prop": 0.6008902077151336,
"repo_name": "silentbob73/kitsune",
"id": "bef841ca25962804b119bed1be5079a2d3bef000",
"size": "5392",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "kitsune/forums/tests/test_permissions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "2694"
},
{
"name": "CSS",
"bytes": "283895"
},
{
"name": "HTML",
"bytes": "615299"
},
{
"name": "JavaScript",
"bytes": "762650"
},
{
"name": "Python",
"bytes": "2739371"
},
{
"name": "Shell",
"bytes": "11190"
},
{
"name": "Smarty",
"bytes": "2062"
}
],
"symlink_target": ""
} |
"""
Cisco Zone Driver is responsible to manage access control using FC zoning
for Cisco FC fabrics.
This is a concrete implementation of FCZoneDriver interface implementing
add_connection and delete_connection interfaces.
**Related Flags**
:zone_activate: Used by: class: 'FCZoneDriver'. Defaults to True
:zone_name_prefix: Used by: class: 'FCZoneDriver'. Defaults to 'openstack'
"""
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import importutils
import six
from cinder import exception
from cinder.i18n import _, _LE, _LI
from cinder.zonemanager.drivers.cisco import cisco_fabric_opts as fabric_opts
from cinder.zonemanager.drivers import fc_zone_driver
from cinder.zonemanager import utils as zm_utils
LOG = logging.getLogger(__name__)
cisco_opts = [
cfg.StrOpt('cisco_sb_connector',
default='cinder.zonemanager.drivers.cisco'
'.cisco_fc_zone_client_cli.CiscoFCZoneClientCLI',
help='Southbound connector for zoning operation'),
]
CONF = cfg.CONF
CONF.register_opts(cisco_opts, 'fc-zone-manager')
class CiscoFCZoneDriver(fc_zone_driver.FCZoneDriver):
"""Cisco FC zone driver implementation.
OpenStack Fibre Channel zone driver to manage FC zoning in
Cisco SAN fabrics.
Version history:
1.0 - Initial Cisco FC zone driver
"""
VERSION = "1.0.0"
def __init__(self, **kwargs):
super(CiscoFCZoneDriver, self).__init__(**kwargs)
self.configuration = kwargs.get('configuration', None)
if self.configuration:
self.configuration.append_config_values(cisco_opts)
# Adding a hack to handle parameters from super classes
# in case configured with multi backends.
fabric_names = self.configuration.safe_get('fc_fabric_names')
activate = self.configuration.safe_get('cisco_zone_activate')
prefix = self.configuration.safe_get('cisco_zone_name_prefix')
base_san_opts = []
if not fabric_names:
base_san_opts.append(
cfg.StrOpt('fc_fabric_names', default=None,
help='Comma separated list of fibre channel '
'fabric names. This list of names is used to'
' retrieve other SAN credentials for connecting'
' to each SAN fabric'
))
if not activate:
base_san_opts.append(
cfg.BoolOpt('cisco_zone_activate',
default=True,
help='Indicates whether zone should '
'be activated or not'))
if not prefix:
base_san_opts.append(
cfg.StrOpt('cisco_zone_name_prefix',
default="openstack",
help="A prefix to be used when naming zone"))
if len(base_san_opts) > 0:
CONF.register_opts(base_san_opts)
self.configuration.append_config_values(base_san_opts)
fabric_names = [x.strip() for x in self.
configuration.fc_fabric_names.split(',')]
# There can be more than one SAN in the network and we need to
# get credentials for each SAN.
if fabric_names:
self.fabric_configs = fabric_opts.load_fabric_configurations(
fabric_names)
@lockutils.synchronized('cisco', 'fcfabric-', True)
def add_connection(self, fabric, initiator_target_map):
"""Concrete implementation of add_connection.
Based on zoning policy and state of each I-T pair, list of zone
members are created and pushed to the fabric to add zones. The
new zones created or zones updated are activated based on isActivate
flag set in cinder.conf returned by volume driver after attach
operation.
:param fabric: Fabric name from cinder.conf file
:param initiator_target_map: Mapping of initiator to list of targets
"""
LOG.debug("Add connection for Fabric:%s", fabric)
LOG.info(_LI("CiscoFCZoneDriver - Add connection "
"for I-T map: %s"), initiator_target_map)
fabric_ip = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_address')
fabric_user = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_user')
fabric_pwd = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_password')
fabric_port = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_port')
zoning_policy = self.configuration.zoning_policy
zoning_policy_fab = self.fabric_configs[fabric].safe_get(
'cisco_zoning_policy')
if zoning_policy_fab:
zoning_policy = zoning_policy_fab
zoning_vsan = self.fabric_configs[fabric].safe_get('cisco_zoning_vsan')
LOG.info(_LI("Zoning policy for Fabric %s"), zoning_policy)
statusmap_from_fabric = self.get_zoning_status(
fabric_ip, fabric_user, fabric_pwd, fabric_port, zoning_vsan)
if statusmap_from_fabric.get('session') == 'none':
cfgmap_from_fabric = self.get_active_zone_set(
fabric_ip, fabric_user, fabric_pwd, fabric_port, zoning_vsan)
zone_names = []
if cfgmap_from_fabric.get('zones'):
zone_names = cfgmap_from_fabric['zones'].keys()
# based on zoning policy, create zone member list and
# push changes to fabric.
for initiator_key in initiator_target_map.keys():
zone_map = {}
initiator = initiator_key.lower()
t_list = initiator_target_map[initiator_key]
if zoning_policy == 'initiator-target':
for t in t_list:
target = t.lower()
zone_members = [
zm_utils.get_formatted_wwn(initiator),
zm_utils.get_formatted_wwn(target)]
zone_name = (self.
configuration.cisco_zone_name_prefix
+ initiator.replace(':', '')
+ target.replace(':', ''))
if (len(cfgmap_from_fabric) == 0 or (
zone_name not in zone_names)):
zone_map[zone_name] = zone_members
else:
# This is I-T zoning, skip if zone exists.
LOG.info(_LI("Zone exists in I-T mode. "
"Skipping zone creation %s"),
zone_name)
elif zoning_policy == 'initiator':
zone_members = [
zm_utils.get_formatted_wwn(initiator)]
for t in t_list:
target = t.lower()
zone_members.append(
zm_utils.get_formatted_wwn(target))
zone_name = self.configuration.cisco_zone_name_prefix \
+ initiator.replace(':', '')
if len(zone_names) > 0 and (zone_name in zone_names):
zone_members = zone_members + filter(
lambda x: x not in zone_members,
cfgmap_from_fabric['zones'][zone_name])
zone_map[zone_name] = zone_members
else:
msg = _("Zoning Policy: %s, not"
" recognized") % zoning_policy
LOG.error(msg)
raise exception.FCZoneDriverException(msg)
LOG.info(_LI("Zone map to add: %s"), zone_map)
if len(zone_map) > 0:
conn = None
try:
conn = importutils.import_object(
self.configuration.cisco_sb_connector,
ipaddress=fabric_ip,
username=fabric_user,
password=fabric_pwd,
port=fabric_port,
vsan=zoning_vsan)
conn.add_zones(
zone_map, self.configuration.cisco_zone_activate,
zoning_vsan, cfgmap_from_fabric,
statusmap_from_fabric)
conn.cleanup()
except exception.CiscoZoningCliException as cisco_ex:
msg = _("Exception: %s") % six.text_type(cisco_ex)
raise exception.FCZoneDriverException(msg)
except Exception as e:
LOG.error(_LE("Exception: %s") % six.text_type(e))
msg = (_("Failed to add zoning configuration %s") %
six.text_type(e))
raise exception.FCZoneDriverException(msg)
LOG.debug("Zones added successfully: %s", zone_map)
else:
LOG.debug("Zoning session exists VSAN: %s", zoning_vsan)
@lockutils.synchronized('cisco', 'fcfabric-', True)
def delete_connection(self, fabric, initiator_target_map):
"""Concrete implementation of delete_connection.
Based on zoning policy and state of each I-T pair, list of zones
are created for deletion. The zones are either updated deleted based
on the policy and attach/detach state of each I-T pair.
:param fabric: Fabric name from cinder.conf file
:param initiator_target_map: Mapping of initiator to list of targets
"""
LOG.debug("Delete connection for fabric:%s", fabric)
LOG.info(_LI("CiscoFCZoneDriver - Delete connection for I-T map: %s"),
initiator_target_map)
fabric_ip = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_address')
fabric_user = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_user')
fabric_pwd = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_password')
fabric_port = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_port')
zoning_policy = self.configuration.zoning_policy
zoning_policy_fab = self.fabric_configs[fabric].safe_get(
'cisco_zoning_policy')
if zoning_policy_fab:
zoning_policy = zoning_policy_fab
zoning_vsan = self.fabric_configs[fabric].safe_get('cisco_zoning_vsan')
LOG.info(_LI("Zoning policy for fabric %s"), zoning_policy)
statusmap_from_fabric = self.get_zoning_status(
fabric_ip, fabric_user, fabric_pwd, fabric_port, zoning_vsan)
if statusmap_from_fabric.get('session') == 'none':
cfgmap_from_fabric = self.get_active_zone_set(
fabric_ip, fabric_user, fabric_pwd, fabric_port, zoning_vsan)
zone_names = []
if cfgmap_from_fabric.get('zones'):
zone_names = cfgmap_from_fabric['zones'].keys()
# Based on zoning policy, get zone member list and push
# changes to fabric. This operation could result in an update
# for zone config with new member list or deleting zones from
# active cfg.
LOG.debug("zone config from Fabric: %s", cfgmap_from_fabric)
for initiator_key in initiator_target_map.keys():
initiator = initiator_key.lower()
formatted_initiator = zm_utils.get_formatted_wwn(initiator)
zone_map = {}
zones_to_delete = []
t_list = initiator_target_map[initiator_key]
if zoning_policy == 'initiator-target':
# In this case, zone needs to be deleted.
for t in t_list:
target = t.lower()
zone_name = (
self.configuration.cisco_zone_name_prefix
+ initiator.replace(':', '')
+ target.replace(':', ''))
LOG.debug("Zone name to del: %s", zone_name)
if (len(zone_names) > 0 and (zone_name in zone_names)):
# delete zone.
LOG.debug("Added zone to delete to list: %s",
zone_name)
zones_to_delete.append(zone_name)
elif zoning_policy == 'initiator':
zone_members = [formatted_initiator]
for t in t_list:
target = t.lower()
zone_members.append(
zm_utils.get_formatted_wwn(target))
zone_name = self.configuration.cisco_zone_name_prefix \
+ initiator.replace(':', '')
if (zone_names and (zone_name in zone_names)):
filtered_members = filter(
lambda x: x not in zone_members,
cfgmap_from_fabric['zones'][zone_name])
# The assumption here is that initiator is always
# there in the zone as it is 'initiator' policy.
# We find the filtered list and if it is non-empty,
# add initiator to it and update zone if filtered
# list is empty, we remove that zone.
LOG.debug("Zone delete - I mode: filtered targets:%s",
filtered_members)
if filtered_members:
filtered_members.append(formatted_initiator)
LOG.debug("Filtered zone members to update: %s",
filtered_members)
zone_map[zone_name] = filtered_members
LOG.debug("Filtered zone Map to update: %s",
zone_map)
else:
zones_to_delete.append(zone_name)
else:
LOG.info(_LI("Zoning Policy: %s, not recognized"),
zoning_policy)
LOG.debug("Final Zone map to update: %s", zone_map)
LOG.debug("Final Zone list to delete: %s", zones_to_delete)
conn = None
try:
conn = importutils.import_object(
self.configuration.cisco_sb_connector,
ipaddress=fabric_ip,
username=fabric_user,
password=fabric_pwd,
port=fabric_port,
vsan=zoning_vsan)
# Update zone membership.
if zone_map:
conn.add_zones(
zone_map, self.configuration.cisco_zone_activate,
zoning_vsan, cfgmap_from_fabric,
statusmap_from_fabric)
# Delete zones ~sk.
if zones_to_delete:
zone_name_string = ''
num_zones = len(zones_to_delete)
for i in range(0, num_zones):
if i == 0:
zone_name_string = ('%s%s' % (
zone_name_string,
zones_to_delete[i]))
else:
zone_name_string = ('%s%s%s' % (
zone_name_string, ';',
zones_to_delete[i]))
conn.delete_zones(zone_name_string,
self.configuration.
cisco_zone_activate,
zoning_vsan, cfgmap_from_fabric,
statusmap_from_fabric)
conn.cleanup()
except Exception as e:
msg = _("Exception: %s") % six.text_type(e)
LOG.error(msg)
msg = _("Failed to update or delete zoning configuration")
raise exception.FCZoneDriverException(msg)
LOG.debug("Zones deleted successfully: %s", zone_map)
else:
LOG.debug("Zoning session exists VSAN: %s", zoning_vsan)
def get_san_context(self, target_wwn_list):
"""Lookup SAN context for visible end devices.
Look up each SAN configured and return a map of SAN (fabric IP) to
list of target WWNs visible to the fabric.
"""
formatted_target_list = []
fabric_map = {}
fabrics = [x.strip() for x in self.
configuration.fc_fabric_names.split(',')]
LOG.debug("Fabric List: %s", fabrics)
LOG.debug("Target wwn List: %s", target_wwn_list)
if len(fabrics) > 0:
for t in target_wwn_list:
formatted_target_list.append(
zm_utils.get_formatted_wwn(t.lower()))
LOG.debug("Formatted Target wwn List: %s", formatted_target_list)
for fabric_name in fabrics:
fabric_ip = self.fabric_configs[fabric_name].safe_get(
'cisco_fc_fabric_address')
fabric_user = self.fabric_configs[fabric_name].safe_get(
'cisco_fc_fabric_user')
fabric_pwd = self.fabric_configs[fabric_name].safe_get(
'cisco_fc_fabric_password')
fabric_port = self.fabric_configs[fabric_name].safe_get(
'cisco_fc_fabric_port')
zoning_vsan = self.fabric_configs[fabric_name].safe_get(
'cisco_zoning_vsan')
# Get name server data from fabric and get the targets
# logged in.
nsinfo = None
try:
conn = importutils.import_object(
self.configuration.cisco_sb_connector,
ipaddress=fabric_ip,
username=fabric_user,
password=fabric_pwd, port=fabric_port,
vsan=zoning_vsan)
nsinfo = conn.get_nameserver_info()
LOG.debug("show fcns database info from fabric:%s", nsinfo)
conn.cleanup()
except exception.CiscoZoningCliException as ex:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Error getting show fcns database "
"info: %s"), six.text_type(ex))
except Exception as e:
msg = (_("Failed to get show fcns database info:%s") %
six.text_type(e))
LOG.error(msg)
raise exception.FCZoneDriverException(msg)
visible_targets = filter(
lambda x: x in formatted_target_list, nsinfo)
if visible_targets:
LOG.info(_LI("Filtered targets for SAN is: %s"),
{fabric_name: visible_targets})
# getting rid of the ':' before returning
for idx, elem in enumerate(visible_targets):
visible_targets[idx] = six.text_type(
visible_targets[idx]).replace(':', '')
fabric_map[fabric_name] = visible_targets
else:
LOG.debug("No targets are in the fcns info for SAN %s",
fabric_name)
LOG.debug("Return SAN context output:%s", fabric_map)
return fabric_map
def get_active_zone_set(self, fabric_ip,
fabric_user, fabric_pwd, fabric_port,
zoning_vsan):
"""Gets active zoneset config for vsan."""
cfgmap = {}
conn = None
try:
LOG.debug("Southbound connector: %s",
self.configuration.cisco_sb_connector)
conn = importutils.import_object(
self.configuration.cisco_sb_connector,
ipaddress=fabric_ip, username=fabric_user,
password=fabric_pwd, port=fabric_port, vsan=zoning_vsan)
cfgmap = conn.get_active_zone_set()
conn.cleanup()
except Exception as e:
msg = (_("Failed to access active zoning configuration:%s") %
six.text_type(e))
LOG.error(msg)
raise exception.FCZoneDriverException(msg)
LOG.debug("Active zone set from fabric: %s", cfgmap)
return cfgmap
def get_zoning_status(self, fabric_ip, fabric_user, fabric_pwd,
fabric_port, zoning_vsan):
"""Gets zoneset status and mode."""
statusmap = {}
conn = None
try:
LOG.debug("Southbound connector: %s",
self.configuration.cisco_sb_connector)
conn = importutils.import_object(
self.configuration.cisco_sb_connector,
ipaddress=fabric_ip, username=fabric_user,
password=fabric_pwd, port=fabric_port, vsan=zoning_vsan)
statusmap = conn.get_zoning_status()
conn.cleanup()
except Exception as e:
msg = (_("Failed to access zoneset status:%s") %
six.text_type(e))
LOG.error(msg)
raise exception.FCZoneDriverException(msg)
LOG.debug("Zoneset status from fabric: %s", statusmap)
return statusmap
| {
"content_hash": "29914cb36a720776baeb988f30c2dc3b",
"timestamp": "",
"source": "github",
"line_count": 476,
"max_line_length": 79,
"avg_line_length": 47.42226890756302,
"alnum_prop": 0.5017498781730386,
"repo_name": "tmenjo/cinder-2015.1.0",
"id": "2878ce3a51579e70afaf52ab2d8fd79bedc99d52",
"size": "23220",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "cinder/zonemanager/drivers/cisco/cisco_fc_zone_driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PLpgSQL",
"bytes": "2511"
},
{
"name": "Python",
"bytes": "10650346"
},
{
"name": "Shell",
"bytes": "8111"
}
],
"symlink_target": ""
} |
import subprocess
import os
import sys
import time
import re
import socket
import socketserver
import threading
import copy
from pynhost import constants, objutils
from pynhost.platforms import platformhandler
class BaseEngine:
def __init__(self):
pass
def get_lines(self):
'''This should always be overridden'''
assert False
def cleanup(self):
pass
class SphinxEngine(BaseEngine):
def __init__(self, hmm_directory=None, lm_filename=None, dictionary=None):
self.hmm_directory = hmm_directory
self.lm_filename = lm_filename
self.dictionary = dictionary
self.loaded = False
print('Loading PocketSphinx Speech Engine...')
def get_lines(self):
full_command = ['pocketsphinx_continuous']
commands = {
'-hmm': self.hmm_directory,
'-lm': self.lm_filename,
'-dict': self.dictionary,
}
for cmd, config_name in commands.items():
if config_name is not None:
full_command.extend([cmd, config_name])
null = open(os.devnull)
with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=null,
bufsize=1, universal_newlines=True) as p:
for line in p.stdout:
split_line = line.rstrip('\n').split(' ')
if split_line[0] == 'READY....' and not self.loaded:
self.loaded = True
print('Ready!')
if len(split_line) > 1 and split_line[0][0].isdigit():
yield ' '.join(split_line[1:])
class SharedDirectoryEngine(BaseEngine):
def __init__(self, shared_dir, filter_on=True):
self.shared_dir = shared_dir
self.filter_on = filter_on
if not os.path.isdir(shared_dir):
os.mkdir(shared_dir)
self.clear_directory()
def get_lines(self):
lines = self.get_buffer_lines()
for line in lines:
if self.filter_on:
line = self.filter_duplicate_letters(line)
yield line
def get_buffer_lines(self):
files = sorted([f for f in os.listdir(self.shared_dir) if not os.path.isdir(f) and re.match(r'o\d+$', f)])
lines = []
for fname in files:
with open(os.path.join(self.shared_dir, fname)) as fobj:
for line in fobj:
lines.append(line.rstrip('\n'))
os.remove(os.path.join(self.shared_dir, fname))
return lines
def filter_duplicate_letters(self, line):
line_list = []
for word in line.split():
new_word = ''
for i, char in enumerate(word):
if (char.islower() or i in [0, len(word) - 1] or
char.lower() != word[i + 1] or
not char.isalpha()):
new_word += char
line_list.append(new_word)
return ' '.join(line_list)
def clear_directory(self):
while os.listdir(self.shared_dir):
for file_path in os.listdir(self.shared_dir):
full_path = os.path.join(self.shared_dir, file_path)
try:
if os.path.isfile(full_path):
os.unlink(full_path)
else:
shutil.rmtree(full_path)
except FileNotFoundError:
pass
class DebugEngine(BaseEngine):
def __init__(self, delay=constants.DEFAULT_DEBUG_DELAY):
self.delay = delay
def get_lines(self):
lines = [input('\n> ')]
time.sleep(self.delay)
return lines
class SubprocessEngine(BaseEngine):
def __init__(self, process_cmd, filter_func=None):
self.p = subprocess.Popen(process_cmd, stdout=subprocess.PIPE)
self.filter_func = filter_func
def get_lines(self):
for line in self.p.stdout:
line = self.filter_func(line) if line is not None else line
if line:
yield line.decode('utf8')
class SocketEngine(BaseEngine):
def __init__(self, host=socket.gethostname(), port=constants.DEFAULT_PORT_NUMBER):
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server.bind((host, port))
self.server.listen(10)
def get_lines(self):
conn, addr = self.server.accept()
try:
line = conn.recv(16384).decode('utf8')
finally:
conn.close()
return [line]
def cleanup(self):
self.server.close()
class HTTPEngine(BaseEngine):
def __init__(self, host=socket.gethostname(), port=constants.DEFAULT_PORT_NUMBER):
self.host = host
self.port = port
self.t = threading.Thread(target=self.run_server)
self.t.daemon = True
self.t.start()
def get_lines(self):
lines = copy.copy(self.server.messages)
# avoid possible threading wierdness
self.server.messages = self.server.messages[len(lines):]
return lines
def run_server(self):
self.server = objutils.MyServer((self.host, self.port), objutils.WebSocketsHandler)
self.server.serve_forever()
def cleanup(self):
self.server.shutdown()
| {
"content_hash": "de4ffa2a31bf9c0c82568d77142cc240",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 114,
"avg_line_length": 33.57861635220126,
"alnum_prop": 0.572017231691328,
"repo_name": "evfredericksen/pynacea",
"id": "90e6d7cc9685109b9a44986bc55335bb74ca0439",
"size": "5339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pynhost/pynhost/engineio.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "93054"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(
name='vmfusion',
version='0.2.0',
author='Mario Steinhoff',
author_email='steinhoff.mario@gmail.com',
packages=['vmfusion'],
url='https://github.com/msteinhoff/vmfusion-python',
license='LICENSE.txt',
description='A python API for the VMware Fusion CLI tools.',
long_description=open('README.md').read(),
install_requires=[
"pyparsing >= 2.0.1"
]
)
| {
"content_hash": "c843da40d4481daa58e219b4084daa61",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 64,
"avg_line_length": 27.375,
"alnum_prop": 0.6484018264840182,
"repo_name": "msteinhoff/vmfusion-python",
"id": "de3f1d246a61a4f5da0e11f2f0994ae5a077cf87",
"size": "438",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12843"
}
],
"symlink_target": ""
} |
import re
import sys
class Configuration:
''' This class provides storage for the application configuration
and an interface to a configuration file
'''
def __init__(self, filepath):
''' On instantiation time the regexes for matching the
properties in the configuration file are set. Also
the filepath is set and the file read function is
executed
'''
# Color definitions
self.color = {
'RESET' : "\033[0m",
'RED' : "\033[31m",
'BLACK' : "\033[30m",
'GREEN' : "\033[32m",
'YELLOW' : "\033[33m",
'BLUE' : "\033[34m",
'PURPLE' : "\033[35m",
'CYAN' : "\033[36m",
}
self.configuration = { }
self.configuration['RESET'] = 'RESET'
self.filepath = filepath
# regex definitions for configurations
self.re_username = re.compile("username=")
self.re_password = re.compile("password=")
self.re_refreshtime = re.compile("refreshtime=")
self.re_prompt=re.compile("prompt=")
self.re_timestamp=re.compile("timestamp=")
self.re_screennames=re.compile("screennames=")
# read data from configfile
self.read_config_file(self.filepath)
def read_config_file(self, filepath):
''' The config file specified via filepath is opened and
the values are read and stored in the object variables
'''
try:
self.f = open(self.filepath, 'r')
for line in self.f:
if self.re_username.match(line):
self.configuration['username'] = self.re_username.sub("",line).rstrip(' \n')
elif self.re_password.match(line):
self.configuration['password'] = self.re_password.sub("",line).rstrip(' \n')
elif self.re_refreshtime.match(line):
self.configuration['refreshtime'] = self.re_refreshtime.sub("",line).rstrip(' \n')
elif self.re_prompt.match(line):
self.configuration['prompt'] = self.re_prompt.sub("",line).rstrip(' \n').upper()
elif self.re_timestamp.match(line):
self.configuration['timestamp'] = self.re_timestamp.sub("",line).rstrip(' \n').upper()
elif self.re_screennames.match(line):
self.configuration['screennames'] = self.re_screennames.sub("",line).rstrip(' \n').upper()
else:
pass
self.f.close()
except:
print "Unexpected error:", sys.exc_info()[0]
def get_configuration(self):
''' This function returns the configuration data
'''
return self.configuration
| {
"content_hash": "00187ac4916336b58563fba110bbdc46",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 110,
"avg_line_length": 43.55223880597015,
"alnum_prop": 0.526045236463331,
"repo_name": "mrtazz/twsh",
"id": "089184be3408edfec9e4916620614721550e1692",
"size": "2918",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "configuration.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10255"
}
],
"symlink_target": ""
} |
import pandas as pd
import numpy as np
import nltk as nlp
import matplotlib.pyplot as plt
import importlib
import re
# Custom modules
import database
# Reload custom modules
def reloadall():
global database
database = importlib.reload(database)
print("Begin Main")
# Initialize variables
db = database.InitializeDB()
# Import all stopwords from nltk
stopwords = nlp.corpus.stopwords.words()
# Derive the Tag table from Ancestor
subset = db.ancestor.head(10)
# Create connection character removing regex
chars_removed = ["-","_","/"]
# Compile improves speed through precompiling
# re.escape escapes all characters
# The list needs to be a string which looks like this [/_-]
rx_cr = re.compile("["+re.escape("".join(chars_removed))+"]")
# String processing and splitting
# re.subs uses the precompiled regex and replaces the chars from there with spaces
temp = subset["Name"].apply(lambda x: re.sub(rx_cr," ",x.lower()).split())
# Generating AT table data
# Using nested list comprehension to expand the lists
# stack and reset_index sets merges the multi index
# drop removes temp index
linking = pd.DataFrame([[w for w in row] for row in temp], index=temp.index).stack().reset_index().drop("level_1",1)
linking.columns = ["AncestorID","Name"]
# Generating Tag table data
# grouping.count gives total occurrence and reset_index creates new index
# shifting index to from 0:n to 1:x
# Adding new column with TagID info
tag = linking.groupby(["Name"]).count().reset_index()
tag.index += 1
tag.columns = ["Name","Occurrence"]
tag['TagID'] = pd.Series(tag.index, index=tag.index)
# Filling Tag table
db.insertTable(db.Tag, tag[["Name","Occurrence"]])
#Preparing at table data
at = pd.merge(linking,tag)[["AncestorID","TagID"]]
at.index += 1
# Filling AT table
db.insertTable(db.AT, at.astype(object))
| {
"content_hash": "12a45330731e1a8e38712acf23ad6f10",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 116,
"avg_line_length": 26.897058823529413,
"alnum_prop": 0.7282668124658284,
"repo_name": "Christoph/PythonTest",
"id": "38dbe2db601409796c2bd7876155c1f20283b88b",
"size": "1843",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tagrefinery/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "154118"
},
{
"name": "HTML",
"bytes": "1283"
},
{
"name": "JavaScript",
"bytes": "45505"
},
{
"name": "Python",
"bytes": "8554"
},
{
"name": "Shell",
"bytes": "343"
}
],
"symlink_target": ""
} |
from unittest import TestCase
from unittest import main
from pylegos.core import Inspector
#USE CASE 1: Called from the different types of methods that can be contained inside a class
class sut1(object):
#USE CASE 2: Called from a nested class
class sut1nested(object):
@staticmethod
def getCallerFromNestedStaticMethod(sutMethod):
return sutMethod()
@classmethod
def getCallerFromNestedClassMethod(cls,sutMethod):
return sutMethod()
def getCallerFromNestedObjMethod(self,sutMethod):
return sutMethod()
@staticmethod
def getCallerFromStaticMethod(sutMethod):
return sutMethod()
@classmethod
def getCallerFromClassMethod(cls,sutMethod):
return sutMethod()
def getCallerFromObjMethod(self,sutMethod):
return sutMethod()
#USE CASE 3: Called from a module level method
def test_getCallerFromModMethod(sutMethod):
return sutMethod()
#USE CASE 4: Caller is from different types of methods in a class
class sut2(object):
#USE CASE 5: Caller is from a nested class
# This will also have calls from this nested class to
# the sut1nested class. Will come as as the sut1method arg
class sut2nested(object):
@staticmethod
def calledFromNestedStaticMethod(sut1Method,sutTestMethod):
return sut1Method(sutMethod=sutTestMethod)
@classmethod
def calledFromNestedClassMethod(cls,sut1Method,sutTestMethod):
return sut1Method(sutMethod=sutTestMethod)
def calledFromNestedObjMethod(self,sut1Method,sutTestMethod):
return sut1Method(sutMethod=sutTestMethod)
@staticmethod
def calledFromStaticMethod(sut1Method,sutTestMethod):
return sut1Method(sutMethod=sutTestMethod)
@classmethod
def calledFromClassMethod(cls,sut1Method,sutTestMethod):
return sut1Method(sutMethod=sutTestMethod)
#USE CASE 6: Caller is a module level method
def test_calledFromModMethod():
# THIS NEEDS TO BE IN THE FORMAT OF Inspector.<methodToTest>:'<ExpectedValueToReturn>
sutTestMethods = {Inspector.getCallerFQN:'test_inspector.test_calledFromModMethod',
Inspector.getCallerClass:None,
Inspector.getCallerMod:'test_inspector',
Inspector.getCallerFunc:'test_calledFromModMethod'}
for testFx,expectVal in sutTestMethods.items():
for sutTest in [sut1.getCallerFromStaticMethod,
sut1.getCallerFromClassMethod,
sut1().getCallerFromObjMethod,
sut1.sut1nested.getCallerFromNestedStaticMethod,
sut1.sut1nested.getCallerFromNestedClassMethod,
sut1.sut1nested().getCallerFromNestedObjMethod]:
testRetVal = sutTest(sutMethod=testFx)
TestCase().assertEqual(expectVal,testRetVal,'Failed on call to |::|'+str(testFx)+'|::| for function type |::|'+str(sutTest)+'|::| from module method |::|test_calledFromModMethod|::|')
testRetval = test_getCallerFromModMethod(sutMethod=testFx)
TestCase().assertEqual(expectVal,testRetVal,'Failed on call to |::|'+str(testFx)+'|::| from function type |::|test_getCallerFromModMethod|::| from module method |::|test_calledFromModMethod|::|')
class TestInspector(TestCase):
def test_Inspector(self):
# THIS WILL CALL ALL CALLER TESTS FOR NORMAL USE CASE WHERE CALLER WILL COME FROM
# A STANDARD CLASS/METHOD
inspector = Inspector.Instance()
# THIS NEEDS TO BE IN THE FORMAT OF Inspector.<methodToTest>:'<ExpectedValueToReturn>
sutTestMethods = {inspector.getCallerFQN:'test_inspector.TestInspector.test_Inspector',
inspector.getCallerClass:'TestInspector',
inspector.getCallerMod:'test_inspector',
inspector.getCallerFunc:'test_Inspector'}
for testFx,expectVal in sutTestMethods.items():
for sutTest in [sut1.getCallerFromStaticMethod,
sut1.getCallerFromClassMethod,
sut1().getCallerFromObjMethod,
sut1.sut1nested.getCallerFromNestedStaticMethod,
sut1.sut1nested.getCallerFromNestedClassMethod,
sut1.sut1nested().getCallerFromNestedObjMethod]:
testRetVal = sutTest(sutMethod=testFx)
self.assertEqual(expectVal,testRetVal,'Failed on call to <'+str(testFx)+'> for function type ['+str(sutTest)+']')
testRetval = test_getCallerFromModMethod(sutMethod=testFx)
self.assertEqual(expectVal,testRetVal,'Failed on call to <'+str(testFx)+'> for function type [test_getCallerFromModMethod]')
# NOW CALL ALL TESTS FOR USE CASES FOR DIFFERENT TYPES OF CALLERS
test_calledFromModMethod()
#rv = sut2.calledFromStaticMethod(sut1Method=sut1.getCallerFromStaticMethod,sutTestMethod=Inspector.getCallerFQN)
#self.assertEqual('test_inspector.sut2.calledFromStaticMethod',rv,'failed on poc')
# TODO FIX CALL TO GET CLASS WHEN CALLED FROM STATIC METHOD
#rv = sut2.calledFromStaticMethod(sut1Method=sut1.getCallerFromStaticMethod,sutTestMethod=Inspector.getCallerClass)
#self.assertEqual('calledFromStaticMethod',rv,'failed on poc')
@classmethod
def test_InspectorFromStatic(cls):
inspector = Inspector.Instance()
rv = sut1().getCallerFromStaticMethod(sutMethod=inspector.getCallerClass)
TestCase().assertEqual('TestInspector',rv)
if __name__ == '__main__':
#TestInspector.test_InspectorStatics()
main()
| {
"content_hash": "9c849c2cb63df92e191181fa0fa4db52",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 204,
"avg_line_length": 47.61666666666667,
"alnum_prop": 0.6825341267063353,
"repo_name": "velexio/pyLegos",
"id": "50470837b38c1d876838c23e8664e85075605158",
"size": "5714",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_inspector.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "456342"
},
{
"name": "Shell",
"bytes": "3694"
},
{
"name": "Smarty",
"bytes": "22363"
}
],
"symlink_target": ""
} |
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
from pybindgen import CppMethod, CppConstructor, CppClass, Enum
def my_module_gen(out_file):
mod = Module('b')
mod.add_include('"b.h"')
B = mod.add_class('B')
B.add_constructor([])
B.add_copy_constructor()
B.add_instance_attribute('b_a', ReturnValue.new('uint32_t'))
B.add_instance_attribute('b_b', ReturnValue.new('uint32_t'))
mod.add_function('BDoA', None, [Parameter.new('B', 'b')])
mod.add_function('BDoB', ReturnValue.new('B'), [])
mod.generate(FileCodeSink(out_file) )
if __name__ == '__main__':
my_module_gen(sys.stdout)
| {
"content_hash": "c6f6eab59c859033096a7d5d5262912b",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 76,
"avg_line_length": 27.4,
"alnum_prop": 0.6613138686131387,
"repo_name": "softDi/clusim",
"id": "7c65ef0eff5e5355c8af9896061de17571a57565",
"size": "709",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "ns3/pybindgen-0.17.0.post57+nga6376f2/examples/b/modulegen.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3021"
},
{
"name": "C",
"bytes": "365226"
},
{
"name": "C++",
"bytes": "24340132"
},
{
"name": "CSS",
"bytes": "3775"
},
{
"name": "Click",
"bytes": "19348"
},
{
"name": "Gnuplot",
"bytes": "9919"
},
{
"name": "HTML",
"bytes": "7942"
},
{
"name": "JavaScript",
"bytes": "7698"
},
{
"name": "Makefile",
"bytes": "92131"
},
{
"name": "Matlab",
"bytes": "39069"
},
{
"name": "Perl",
"bytes": "302716"
},
{
"name": "Perl 6",
"bytes": "151"
},
{
"name": "Python",
"bytes": "44191047"
},
{
"name": "QMake",
"bytes": "6602"
},
{
"name": "Shell",
"bytes": "146434"
}
],
"symlink_target": ""
} |
class MarkdAsDriverException(Exception):
"""
Wrap any error as DriverException
"""
def __init__(self, wrapped_exc):
super().__init__()
self.wrapped_exc = wrapped_exc
| {
"content_hash": "7b445c343791c15b92a591cd7d1de676",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 40,
"avg_line_length": 28.285714285714285,
"alnum_prop": 0.601010101010101,
"repo_name": "neo4j/neo4j-python-driver",
"id": "042eab8c02dda052664bed07917dfe260864c4cf",
"size": "841",
"binary": false,
"copies": "1",
"ref": "refs/heads/5.0",
"path": "testkitbackend/exceptions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2068"
},
{
"name": "Python",
"bytes": "1654566"
},
{
"name": "Shell",
"bytes": "4165"
}
],
"symlink_target": ""
} |
import string
def sanitiseLine(line):
if line[-1:] == '\n': line = line[:-1]
if string.find(line, "##") != -1:
line = line[:string.find(line, "##")]
line = string.strip(line)
return line
def decodeFunction(featureVal):
retType, rest = string.split(featureVal, " ", 1)
nameIdent, params = string.split(rest, "(")
name, value = string.split(nameIdent, "=")
params, rest = string.split(params, ")")
param1, param2 = string.split(params, ",")[0:2]
return retType, name, value, param1, param2
def decodeEvent(featureVal):
retType, rest = string.split(featureVal, " ", 1)
nameIdent, params = string.split(rest, "(")
name, value = string.split(nameIdent, "=")
return retType, name, value
def decodeParam(p):
param = string.strip(p)
type = ""
name = ""
value = ""
if " " in param:
type, nv = string.split(param, " ")
if "=" in nv:
name, value = string.split(nv, "=")
else:
name = nv
return type, name, value
class Face:
def __init__(self):
self.order = []
self.features = {}
self.values = {}
self.events = {}
def ReadFromFile(self, name):
currentCategory = ""
currentComment = []
currentCommentFinished = 0
file = open(name)
for line in file.readlines():
line = sanitiseLine(line)
if line:
if line[0] == "#":
if line[1] == " ":
if currentCommentFinished:
currentComment = []
currentCommentFinished = 0
currentComment.append(line[2:])
else:
currentCommentFinished = 1
featureType, featureVal = string.split(line, " ", 1)
if featureType in ["fun", "get", "set"]:
retType, name, value, param1, param2 = decodeFunction(featureVal)
p1 = decodeParam(param1)
p2 = decodeParam(param2)
self.features[name] = {
"FeatureType": featureType,
"ReturnType": retType,
"Value": value,
"Param1Type": p1[0], "Param1Name": p1[1], "Param1Value": p1[2],
"Param2Type": p2[0], "Param2Name": p2[1], "Param2Value": p2[2],
"Category": currentCategory, "Comment": currentComment
}
if self.values.has_key(value):
raise "Duplicate value " + value + " " + name
self.values[value] = 1
self.order.append(name)
elif featureType == "evt":
retType, name, value = decodeEvent(featureVal)
self.features[name] = {
"FeatureType": featureType,
"ReturnType": retType,
"Value": value,
"Category": currentCategory, "Comment": currentComment
}
if self.events.has_key(value):
raise "Duplicate event " + value + " " + name
self.events[value] = 1
self.order.append(name)
elif featureType == "cat":
currentCategory = featureVal
elif featureType == "val":
name, value = string.split(featureVal, "=", 1)
self.features[name] = {
"FeatureType": featureType,
"Category": currentCategory,
"Value": value }
self.order.append(name)
elif featureType == "enu" or featureType == "lex":
name, value = string.split(featureVal, "=", 1)
self.features[name] = {
"FeatureType": featureType,
"Category": currentCategory,
"Value": value }
self.order.append(name)
| {
"content_hash": "be3ada268d7393478c2b7352b545b044",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 71,
"avg_line_length": 31.17924528301887,
"alnum_prop": 0.5851739788199698,
"repo_name": "segafan/wme1_jankavan_tlc_edition-repo",
"id": "59ada3dce3dcc18462df9529f3866d68f90ab3a8",
"size": "3360",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "src/scite/scintilla/include/Face.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ada",
"bytes": "91920"
},
{
"name": "Assembly",
"bytes": "154110"
},
{
"name": "Batchfile",
"bytes": "23640"
},
{
"name": "C",
"bytes": "8163065"
},
{
"name": "C#",
"bytes": "927846"
},
{
"name": "C++",
"bytes": "8758239"
},
{
"name": "CLIPS",
"bytes": "7056"
},
{
"name": "CSS",
"bytes": "2132"
},
{
"name": "Clarion",
"bytes": "4766"
},
{
"name": "DIGITAL Command Language",
"bytes": "24020"
},
{
"name": "FLUX",
"bytes": "1895"
},
{
"name": "Groff",
"bytes": "224601"
},
{
"name": "HTML",
"bytes": "3072563"
},
{
"name": "Inno Setup",
"bytes": "21811"
},
{
"name": "Lex",
"bytes": "7731"
},
{
"name": "Logos",
"bytes": "2079048"
},
{
"name": "Makefile",
"bytes": "243377"
},
{
"name": "Module Management System",
"bytes": "16278"
},
{
"name": "Objective-C",
"bytes": "75691"
},
{
"name": "PHP",
"bytes": "1007"
},
{
"name": "Pascal",
"bytes": "41721"
},
{
"name": "Perl",
"bytes": "34085"
},
{
"name": "Python",
"bytes": "48923"
},
{
"name": "SAS",
"bytes": "15827"
},
{
"name": "Shell",
"bytes": "836963"
},
{
"name": "Smalltalk",
"bytes": "2735"
},
{
"name": "TeX",
"bytes": "336604"
},
{
"name": "XSLT",
"bytes": "1834"
},
{
"name": "Yacc",
"bytes": "10988"
}
],
"symlink_target": ""
} |
"""Bijector base."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import collections
import contextlib
import re
import numpy as np
import six
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.util.tf_export import tf_export
__all__ = [
"Bijector",
]
class _Mapping(collections.namedtuple(
"_Mapping", ["x", "y", "ildj", "kwargs"])):
"""Helper class to make it easier to manage caching in `Bijector`."""
def __new__(cls, x=None, y=None, ildj=None, kwargs=None):
"""Custom __new__ so namedtuple items have defaults.
Args:
x: `Tensor`. Forward.
y: `Tensor`. Inverse.
ildj: `Tensor`. Inverse log det Jacobian.
kwargs: Python dictionary. Extra args supplied to
forward/inverse/etc functions.
Returns:
mapping: New instance of _Mapping.
"""
return super(_Mapping, cls).__new__(cls, x, y, ildj, kwargs)
@property
def x_key(self):
"""Returns key used for caching Y=g(X)."""
return (self.x,) + self._deep_tuple(tuple(sorted(self.kwargs.items())))
@property
def y_key(self):
"""Returns key used for caching X=g^{-1}(Y)."""
return (self.y,) + self._deep_tuple(tuple(sorted(self.kwargs.items())))
def merge(self, x=None, y=None, ildj=None, kwargs=None, mapping=None):
"""Returns new _Mapping with args merged with self.
Args:
x: `Tensor`. Forward.
y: `Tensor`. Inverse.
ildj: `Tensor`. Inverse log det Jacobian.
kwargs: Python dictionary. Extra args supplied to
forward/inverse/etc functions.
mapping: Instance of _Mapping to merge. Can only be specified if no other
arg is specified.
Returns:
mapping: New instance of `_Mapping` which has inputs merged with self.
Raises:
ValueError: if mapping and any other arg is not `None`.
"""
if mapping is None:
mapping = _Mapping(x=x, y=y, ildj=ildj, kwargs=kwargs)
elif not all(arg is None for arg in [x, y, ildj, kwargs]):
raise ValueError("Cannot specify mapping and individual args.")
return _Mapping(
x=self._merge(self.x, mapping.x),
y=self._merge(self.y, mapping.y),
ildj=self._merge(self.ildj, mapping.ildj),
kwargs=self._merge(self.kwargs, mapping.kwargs))
def _merge(self, old, new):
"""Helper to merge which handles merging one value."""
if old is None:
return new
elif new is not None and old != new:
raise ValueError("Incompatible values: %s != %s" % (old, new))
return old
def _deep_tuple(self, x):
"""Converts lists of lists to tuples of tuples."""
return (tuple(map(self._deep_tuple, x))
if isinstance(x, (list, tuple)) else x)
@six.add_metaclass(abc.ABCMeta)
@tf_export("distributions.bijectors.Bijector")
class Bijector(object):
"""Interface for transformations of a `Distribution` sample.
Bijectors can be used to represent any differentiable and injective
(one to one) function defined on an open subset of `R^n`. Some non-injective
transformations are also supported (see "Non Injective Transforms" below).
#### Mathematical Details
A `Bijector` implements a
[diffeomorphism](https://en.wikipedia.org/wiki/Diffeomorphism), i.e., a
bijective, differentiable function. A `Bijector` is used by
`TransformedDistribution` but can be generally used for transforming a
`Distribution` generated `Tensor`. A `Bijector` is characterized by three
operations:
1. Forward Evaluation
Useful for turning one random outcome into another random outcome from a
different distribution.
2. Inverse Evaluation
Useful for "reversing" a transformation to compute one probability in
terms of another.
3. (log o det o Jacobian o inverse)(x)
"The log of the determinant of the matrix of all first-order partial
derivatives of the inverse function."
Useful for inverting a transformation to compute one probability in terms
of another. Geometrically, the det(Jacobian) is the volume of the
transformation and is used to scale the probability.
By convention, transformations of random variables are named in terms of the
forward transformation. The forward transformation creates samples, the
inverse is useful for computing probabilities.
#### Example Uses
- Basic properties:
```python
x = ... # A tensor.
# Evaluate forward transformation.
fwd_x = my_bijector.forward(x)
x == my_bijector.inverse(fwd_x)
x != my_bijector.forward(fwd_x) # Not equal because x != g(g(x)).
```
- Computing a log-likelihood:
```python
def transformed_log_prob(bijector, log_prob, x):
return (bijector.inverse_log_det_jacobian(x) +
log_prob(bijector.inverse(x)))
```
- Transforming a random outcome:
```python
def transformed_sample(bijector, x):
return bijector.forward(x)
```
#### Example Bijectors
- "Exponential"
```none
Y = g(X) = exp(X)
X ~ Normal(0, 1) # Univariate.
```
Implies:
```none
g^{-1}(Y) = log(Y)
|Jacobian(g^{-1})(y)| = 1 / y
Y ~ LogNormal(0, 1), i.e.,
prob(Y=y) = |Jacobian(g^{-1})(y)| * prob(X=g^{-1}(y))
= (1 / y) Normal(log(y); 0, 1)
```
Here is an example of how one might implement the `Exp` bijector:
```python
class Exp(Bijector):
def __init__(self, event_ndims=0, validate_args=False, name="exp"):
super(Exp, self).__init__(
event_ndims=event_ndims, validate_args=validate_args, name=name)
def _forward(self, x):
return math_ops.exp(x)
def _inverse(self, y):
return math_ops.log(y)
def _inverse_log_det_jacobian(self, y):
return -self._forward_log_det_jacobian(self._inverse(y))
def _forward_log_det_jacobian(self, x):
if self.event_ndims is None:
raise ValueError("Jacobian requires known event_ndims.")
event_dims = array_ops.shape(x)[-self.event_ndims:]
return math_ops.reduce_sum(x, axis=event_dims)
```
- "Affine"
```none
Y = g(X) = sqrtSigma * X + mu
X ~ MultivariateNormal(0, I_d)
```
Implies:
```none
g^{-1}(Y) = inv(sqrtSigma) * (Y - mu)
|Jacobian(g^{-1})(y)| = det(inv(sqrtSigma))
Y ~ MultivariateNormal(mu, sqrtSigma) , i.e.,
prob(Y=y) = |Jacobian(g^{-1})(y)| * prob(X=g^{-1}(y))
= det(sqrtSigma)^(-d) *
MultivariateNormal(inv(sqrtSigma) * (y - mu); 0, I_d)
```
#### Jacobian
The Jacobian is a reduction over event dims. To see this, consider the `Exp`
`Bijector` applied to a `Tensor` which has sample, batch, and event (S, B, E)
shape semantics. Suppose the `Tensor`'s partitioned-shape is `(S=[4], B=[2],
E=[3, 3])`. The shape of the `Tensor` returned by `forward` and `inverse` is
unchanged, i.e., `[4, 2, 3, 3]`. However the shape returned by
`inverse_log_det_jacobian` is `[4, 2]` because the Jacobian is a reduction
over the event dimensions.
It is sometimes useful to implement the inverse Jacobian as the negative
forward Jacobian. For example,
```python
def _inverse_log_det_jacobian(self, y):
return -self._forward_log_det_jac(self._inverse(y)) # Note negation.
```
The correctness of this approach can be seen from the following claim.
- Claim:
Assume `Y = g(X)` is a bijection whose derivative exists and is nonzero
for its domain, i.e., `dY/dX = d/dX g(X) != 0`. Then:
```none
(log o det o jacobian o g^{-1})(Y) = -(log o det o jacobian o g)(X)
```
- Proof:
From the bijective, nonzero differentiability of `g`, the
[inverse function theorem](
https://en.wikipedia.org/wiki/Inverse_function_theorem)
implies `g^{-1}` is differentiable in the image of `g`.
Applying the chain rule to `y = g(x) = g(g^{-1}(y))` yields
`I = g'(g^{-1}(y))*g^{-1}'(y)`.
The same theorem also implies `g^{-1}'` is non-singular therefore:
`inv[ g'(g^{-1}(y)) ] = g^{-1}'(y)`.
The claim follows from [properties of determinant](
https://en.wikipedia.org/wiki/Determinant#Multiplicativity_and_matrix_groups).
Generally its preferable to directly implement the inverse Jacobian. This
should have superior numerical stability and will often share subgraphs with
the `_inverse` implementation.
#### Subclass Requirements
- Subclasses typically implement:
- `_forward`,
- `_inverse`,
- `_inverse_log_det_jacobian`,
- `_forward_log_det_jacobian` (optional).
The `_forward_log_det_jacobian` is called when the bijector is inverted via
the `Invert` bijector. If undefined, a slightly less efficiently
calculation, `-1 * _inverse_log_det_jacobian`, is used.
If the bijector changes the shape of the input, you must also implement:
- _forward_event_shape_tensor,
- _forward_event_shape (optional),
- _inverse_event_shape_tensor,
- _inverse_event_shape (optional).
By default the event-shape is assumed unchanged from input.
- If the `Bijector`'s use is limited to `TransformedDistribution` (or friends
like `QuantizedDistribution`) then depending on your use, you may not need
to implement all of `_forward` and `_inverse` functions.
Examples:
1. Sampling (e.g., `sample`) only requires `_forward`.
2. Probability functions (e.g., `prob`, `cdf`, `survival`) only require
`_inverse` (and related).
3. Only calling probability functions on the output of `sample` means
`_inverse` can be implemented as a cache lookup.
See "Example Uses" [above] which shows how these functions are used to
transform a distribution. (Note: `_forward` could theoretically be
implemented as a cache lookup but this would require controlling the
underlying sample generation mechanism.)
#### Non Injective Transforms
**WARNING** Handing of non-injective transforms is subject to change.
Non injective maps `g` are supported, provided their domain `D` can be
partitioned into `k` disjoint subsets, `Union{D1, ..., Dk}`, such that,
ignoring sets of measure zero, the restriction of `g` to each subset is a
differentiable bijection onto `g(D)`. In particular, this imples that for
`y in g(D)`, the set inverse, i.e. `g^{-1}(y) = {x in D : g(x) = y}`, always
contains exactly `k` distinct points.
The property, `_is_injective` is set to `False` to indicate that the bijector
is not injective, yet satisfies the above condition.
The usual bijector API is modified in the case `_is_injective is False` (see
method docstrings for specifics). Here we show by example the `AbsoluteValue`
bijector. In this case, the domain `D = (-inf, inf)`, can be partitioned
into `D1 = (-inf, 0)`, `D2 = {0}`, and `D3 = (0, inf)`. Let `gi` be the
restriction of `g` to `Di`, then both `g1` and `g3` are bijections onto
`(0, inf)`, with `g1^{-1}(y) = -y`, and `g3^{-1}(y) = y`. We will use
`g1` and `g3` to define bijector methods over `D1` and `D3`. `D2 = {0}` is
an oddball in that `g2` is one to one, and the derivative is not well defined.
Fortunately, when considering transformations of probability densities
(e.g. in `TransformedDistribution`), sets of measure zero have no effect in
theory, and only a small effect in 32 or 64 bit precision. For that reason,
we define `inverse(0)` and `inverse_log_det_jacobian(0)` both as `[0, 0]`,
which is convenient and results in a left-semicontinuous pdf.
```python
abs = tf.contrib.distributions.bijectors.AbsoluteValue()
abs.forward(-1.)
==> 1.
abs.forward(1.)
==> 1.
abs.inverse(1.)
==> (-1., 1.)
# The |dX/dY| is constant, == 1. So Log|dX/dY| == 0.
abs.inverse_log_det_jacobian(1.)
==> (0., 0.)
# Special case handling of 0.
abs.inverse(0.)
==> (0., 0.)
abs.inverse_log_det_jacobian(0.)
==> (0., 0.)
```
"""
@abc.abstractmethod
def __init__(self,
event_ndims=None,
graph_parents=None,
is_constant_jacobian=False,
validate_args=False,
dtype=None,
name=None):
"""Constructs Bijector.
A `Bijector` transforms random variables into new random variables.
Examples:
```python
# Create the Y = g(X) = X transform which operates on vector events.
identity = Identity(event_ndims=1)
# Create the Y = g(X) = exp(X) transform which operates on matrices.
exp = Exp(event_ndims=2)
```
See `Bijector` subclass docstring for more details and specific examples.
Args:
event_ndims: number of dimensions associated with event coordinates.
graph_parents: Python list of graph prerequisites of this `Bijector`.
is_constant_jacobian: Python `bool` indicating that the Jacobian is not a
function of the input.
validate_args: Python `bool`, default `False`. Whether to validate input
with asserts. If `validate_args` is `False`, and the inputs are invalid,
correct behavior is not guaranteed.
dtype: `tf.dtype` supported by this `Bijector`. `None` means dtype is not
enforced.
name: The name to give Ops created by the initializer.
Raises:
ValueError: If a member of `graph_parents` is not a `Tensor`.
"""
self._event_ndims = (
ops.convert_to_tensor(event_ndims, dtype=dtypes.int32)
if event_ndims is not None else None)
self._graph_parents = graph_parents or []
self._is_constant_jacobian = is_constant_jacobian
self._validate_args = validate_args
self._dtype = dtype
self._from_y = {}
self._from_x = {}
# Using abbreviation ildj for "inverse log det Jacobian."
# This variable is not `None` iff is_constant_jacobian is `True`.
self._constant_ildj = None
if name:
self._name = name
else:
# We want the default convention to be snake_case rather than CamelCase
# since `Chain` uses bijector.name as the kwargs dictionary key.
def camel_to_snake(name):
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
self._name = camel_to_snake(type(self).__name__.lstrip("_"))
for i, t in enumerate(self._graph_parents):
if t is None or not tensor_util.is_tensor(t):
raise ValueError("Graph parent item %d is not a Tensor; %s." % (i, t))
@property
def event_ndims(self):
"""Returns then number of event dimensions this bijector operates on."""
return self._event_ndims
@property
def graph_parents(self):
"""Returns this `Bijector`'s graph_parents as a Python list."""
return self._graph_parents
@property
def is_constant_jacobian(self):
"""Returns true iff the Jacobian is not a function of x.
Note: Jacobian is either constant for both forward and inverse or neither.
Returns:
is_constant_jacobian: Python `bool`.
"""
return self._is_constant_jacobian
@property
def _is_injective(self):
"""Returns true iff the forward map `g` is injective (one-to-one function).
**WARNING** This hidden property and its behavior are subject to change.
Note: Non-injective maps `g` are supported, provided their domain `D` can
be partitioned into `k` disjoint subsets, `Union{D1, ..., Dk}`, such that,
ignoring sets of measure zero, the restriction of `g` to each subset is a
differentiable bijection onto `g(D)`.
Returns:
is_injective: Python `bool`.
"""
return True
@property
def validate_args(self):
"""Returns True if Tensor arguments will be validated."""
return self._validate_args
@property
def dtype(self):
"""dtype of `Tensor`s transformable by this distribution."""
return self._dtype
@property
def name(self):
"""Returns the string name of this `Bijector`."""
return self._name
def _forward_event_shape_tensor(self, input_shape):
"""Subclass implementation for `forward_event_shape_tensor` function."""
# By default, we assume event_shape is unchanged.
return input_shape
def forward_event_shape_tensor(self,
input_shape,
name="forward_event_shape_tensor"):
"""Shape of a single sample from a single batch as an `int32` 1D `Tensor`.
Args:
input_shape: `Tensor`, `int32` vector indicating event-portion shape
passed into `forward` function.
name: name to give to the op
Returns:
forward_event_shape_tensor: `Tensor`, `int32` vector indicating
event-portion shape after applying `forward`.
"""
with self._name_scope(name, [input_shape]):
input_shape = ops.convert_to_tensor(input_shape, dtype=dtypes.int32,
name="input_shape")
return self._forward_event_shape_tensor(input_shape)
def _forward_event_shape(self, input_shape):
"""Subclass implementation for `forward_event_shape` public function."""
# By default, we assume event_shape is unchanged.
return input_shape
def forward_event_shape(self, input_shape):
"""Shape of a single sample from a single batch as a `TensorShape`.
Same meaning as `forward_event_shape_tensor`. May be only partially defined.
Args:
input_shape: `TensorShape` indicating event-portion shape passed into
`forward` function.
Returns:
forward_event_shape_tensor: `TensorShape` indicating event-portion shape
after applying `forward`. Possibly unknown.
"""
return self._forward_event_shape(tensor_shape.TensorShape(input_shape))
def _inverse_event_shape_tensor(self, output_shape):
"""Subclass implementation for `inverse_event_shape_tensor` function."""
# By default, we assume event_shape is unchanged.
return output_shape
def inverse_event_shape_tensor(self,
output_shape,
name="inverse_event_shape_tensor"):
"""Shape of a single sample from a single batch as an `int32` 1D `Tensor`.
Args:
output_shape: `Tensor`, `int32` vector indicating event-portion shape
passed into `inverse` function.
name: name to give to the op
Returns:
inverse_event_shape_tensor: `Tensor`, `int32` vector indicating
event-portion shape after applying `inverse`.
"""
with self._name_scope(name, [output_shape]):
output_shape = ops.convert_to_tensor(output_shape, dtype=dtypes.int32,
name="output_shape")
return self._inverse_event_shape_tensor(output_shape)
def _inverse_event_shape(self, output_shape):
"""Subclass implementation for `inverse_event_shape` public function."""
# By default, we assume event_shape is unchanged.
return tensor_shape.TensorShape(output_shape)
def inverse_event_shape(self, output_shape):
"""Shape of a single sample from a single batch as a `TensorShape`.
Same meaning as `inverse_event_shape_tensor`. May be only partially defined.
Args:
output_shape: `TensorShape` indicating event-portion shape passed into
`inverse` function.
Returns:
inverse_event_shape_tensor: `TensorShape` indicating event-portion shape
after applying `inverse`. Possibly unknown.
"""
return self._inverse_event_shape(output_shape)
def _forward(self, x):
"""Subclass implementation for `forward` public function."""
raise NotImplementedError("forward not implemented.")
def _call_forward(self, x, name, **kwargs):
with self._name_scope(name, [x]):
x = ops.convert_to_tensor(x, name="x")
self._maybe_assert_dtype(x)
if not self._is_injective: # No caching for non-injective
return self._forward(x, **kwargs)
mapping = self._lookup(x=x, kwargs=kwargs)
if mapping.y is not None:
return mapping.y
mapping = mapping.merge(y=self._forward(x, **kwargs))
self._cache(mapping)
return mapping.y
def forward(self, x, name="forward"):
"""Returns the forward `Bijector` evaluation, i.e., X = g(Y).
Args:
x: `Tensor`. The input to the "forward" evaluation.
name: The name to give this op.
Returns:
`Tensor`.
Raises:
TypeError: if `self.dtype` is specified and `x.dtype` is not
`self.dtype`.
NotImplementedError: if `_forward` is not implemented.
"""
return self._call_forward(x, name)
def _inverse(self, y):
"""Subclass implementation for `inverse` public function."""
raise NotImplementedError("inverse not implemented")
def _call_inverse(self, y, name, **kwargs):
with self._name_scope(name, [y]):
y = ops.convert_to_tensor(y, name="y")
self._maybe_assert_dtype(y)
if not self._is_injective: # No caching for non-injective
return self._inverse(y, **kwargs)
mapping = self._lookup(y=y, kwargs=kwargs)
if mapping.x is not None:
return mapping.x
mapping = mapping.merge(x=self._inverse(y, **kwargs))
self._cache(mapping)
return mapping.x
def inverse(self, y, name="inverse"):
"""Returns the inverse `Bijector` evaluation, i.e., X = g^{-1}(Y).
Args:
y: `Tensor`. The input to the "inverse" evaluation.
name: The name to give this op.
Returns:
`Tensor`, if this bijector is injective.
If not injective, returns the k-tuple containing the unique
`k` points `(x1, ..., xk)` such that `g(xi) = y`.
Raises:
TypeError: if `self.dtype` is specified and `y.dtype` is not
`self.dtype`.
NotImplementedError: if `_inverse` is not implemented.
"""
return self._call_inverse(y, name)
def _inverse_log_det_jacobian(self, y):
"""Subclass implementation of `inverse_log_det_jacobian` public function."""
raise NotImplementedError("inverse_log_det_jacobian not implemented.")
def _call_inverse_log_det_jacobian(self, y, name, **kwargs):
with self._name_scope(name, [y]):
if self._constant_ildj is not None:
return self._constant_ildj
y = ops.convert_to_tensor(y, name="y")
self._maybe_assert_dtype(y)
if not self._is_injective: # No caching for non-injective
return self._inverse_log_det_jacobian(y, **kwargs)
mapping = self._lookup(y=y, kwargs=kwargs)
if mapping.ildj is not None:
return mapping.ildj
try:
x = None # Not needed; leave cache as is.
ildj = self._inverse_log_det_jacobian(y, **kwargs)
except NotImplementedError as original_exception:
try:
x = mapping.x if mapping.x is not None else self._inverse(y, **kwargs)
ildj = -self._forward_log_det_jacobian(x, **kwargs)
except NotImplementedError:
raise original_exception
mapping = mapping.merge(x=x, ildj=ildj)
self._cache(mapping)
if self.is_constant_jacobian:
self._constant_ildj = mapping.ildj
return mapping.ildj
def inverse_log_det_jacobian(self, y, name="inverse_log_det_jacobian"):
"""Returns the (log o det o Jacobian o inverse)(y).
Mathematically, returns: `log(det(dX/dY))(Y)`. (Recall that: `X=g^{-1}(Y)`.)
Note that `forward_log_det_jacobian` is the negative of this function,
evaluated at `g^{-1}(y)`.
Args:
y: `Tensor`. The input to the "inverse" Jacobian evaluation.
name: The name to give this op.
Returns:
`Tensor`, if this bijector is injective.
If not injective, returns the tuple of local log det
Jacobians, `log(det(Dg_i^{-1}(y)))`, where `g_i` is the restriction
of `g` to the `ith` partition `Di`.
Raises:
TypeError: if `self.dtype` is specified and `y.dtype` is not
`self.dtype`.
NotImplementedError: if `_inverse_log_det_jacobian` is not implemented.
"""
return self._call_inverse_log_det_jacobian(y, name)
def _forward_log_det_jacobian(self, x):
"""Subclass implementation of `forward_log_det_jacobian`."""
raise NotImplementedError(
"forward_log_det_jacobian not implemented.")
def _call_forward_log_det_jacobian(self, x, name, **kwargs):
with self._name_scope(name, [x]):
if self._constant_ildj is not None:
# Need "-1. *" to avoid invalid-unary-operand-type linter warning.
return -1. * self._constant_ildj
x = ops.convert_to_tensor(x, name="x")
self._maybe_assert_dtype(x)
if not self._is_injective:
return self._forward_log_det_jacobian(x, **kwargs) # No caching.
mapping = self._lookup(x=x, kwargs=kwargs)
if mapping.ildj is not None:
return -mapping.ildj
try:
y = None # Not needed; leave cache as is.
ildj = -self._forward_log_det_jacobian(x, **kwargs)
except NotImplementedError as original_exception:
try:
y = mapping.y if mapping.y is not None else self._forward(x, **kwargs)
ildj = self._inverse_log_det_jacobian(y, **kwargs)
except NotImplementedError:
raise original_exception
mapping = mapping.merge(y=y, ildj=ildj)
self._cache(mapping)
if self.is_constant_jacobian:
self._constant_ildj = mapping.ildj
return -mapping.ildj
def forward_log_det_jacobian(self, x, name="forward_log_det_jacobian"):
"""Returns both the forward_log_det_jacobian.
Args:
x: `Tensor`. The input to the "forward" Jacobian evaluation.
name: The name to give this op.
Returns:
`Tensor`, if this bijector is injective.
If not injective this is not implemented.
Raises:
TypeError: if `self.dtype` is specified and `y.dtype` is not
`self.dtype`.
NotImplementedError: if neither `_forward_log_det_jacobian`
nor {`_inverse`, `_inverse_log_det_jacobian`} are implemented, or
this is a non-injective bijector.
"""
if not self._is_injective:
raise NotImplementedError(
"forward_log_det_jacobian cannot be implemented for non-injective "
"transforms.")
return self._call_forward_log_det_jacobian(x, name)
@contextlib.contextmanager
def _name_scope(self, name=None, values=None):
"""Helper function to standardize op scope."""
with ops.name_scope(self.name):
with ops.name_scope(
name, values=(values or []) + self.graph_parents) as scope:
yield scope
def _maybe_assert_dtype(self, x):
"""Helper to check dtype when self.dtype is known."""
if self.dtype is not None and self.dtype.base_dtype != x.dtype.base_dtype:
raise TypeError("Input had dtype %s but expected %s." %
(self.dtype, x.dtype))
def _cache(self, mapping):
"""Helper which stores mapping info in forward/inverse dicts."""
if self._constant_ildj is not None:
# Fold in ildj if known constant Jacobian.
mapping = mapping.merge(ildj=self._constant_ildj)
# Merging from lookup is an added check that we're not overwriting anything
# which is not None.
mapping = mapping.merge(mapping=self._lookup(
mapping.x, mapping.y, mapping.kwargs))
if mapping.x is None and mapping.y is None:
raise ValueError("Caching expects at least one of (x,y) to be known, "
"i.e., not None.")
self._from_x[mapping.x_key] = mapping
self._from_y[mapping.y_key] = mapping
def _lookup(self, x=None, y=None, kwargs=None):
"""Helper which retrieves mapping info from forward/inverse dicts."""
mapping = _Mapping(x=x, y=y, kwargs=kwargs)
# Since _cache requires both x,y to be set, we only need to do one cache
# lookup since the mapping is always in both or neither.
if mapping.x is not None:
return self._from_x.get(mapping.x_key, mapping)
if mapping.y is not None:
return self._from_y.get(mapping.y_key, mapping)
return mapping
def _event_dims_tensor(self, sample):
"""Return a 1D `int32` tensor: `range(rank(sample))[-event_ndims:]`."""
if self.event_ndims is None:
raise ValueError("Jacobian cannot be computed with unknown event_ndims")
static_event_ndims = tensor_util.constant_value(self.event_ndims)
static_rank = sample.get_shape().ndims
if static_event_ndims is not None and static_rank is not None:
return ops.convert_to_tensor(
static_rank + np.arange(-static_event_ndims, 0).astype(np.int32))
if static_event_ndims is not None:
event_range = np.arange(-static_event_ndims, 0).astype(np.int32)
else:
event_range = math_ops.range(-self.event_ndims, 0, dtype=dtypes.int32)
if static_rank is not None:
return event_range + static_rank
else:
return event_range + array_ops.rank(sample)
| {
"content_hash": "1ddbb5f3441919d229db3ce89720d274",
"timestamp": "",
"source": "github",
"line_count": 813,
"max_line_length": 80,
"avg_line_length": 35.779827798277985,
"alnum_prop": 0.6465674309876586,
"repo_name": "rabipanda/tensorflow",
"id": "44d64070ce48c0c115ea7edb1237124bc6698e90",
"size": "29778",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/python/ops/distributions/bijector_impl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9008"
},
{
"name": "C",
"bytes": "332772"
},
{
"name": "C++",
"bytes": "36554246"
},
{
"name": "CMake",
"bytes": "190994"
},
{
"name": "Go",
"bytes": "1058787"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "543556"
},
{
"name": "Jupyter Notebook",
"bytes": "1940884"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "49545"
},
{
"name": "Objective-C",
"bytes": "12456"
},
{
"name": "Objective-C++",
"bytes": "94526"
},
{
"name": "PHP",
"bytes": "1487"
},
{
"name": "Perl",
"bytes": "6179"
},
{
"name": "Perl 6",
"bytes": "1357"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "32129142"
},
{
"name": "Ruby",
"bytes": "547"
},
{
"name": "Shell",
"bytes": "412473"
}
],
"symlink_target": ""
} |
import pytest
from parameterized import parameterized
from airflow.plugins_manager import AirflowPlugin
from airflow.security import permissions
from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user
from tests.test_utils.config import conf_vars
from tests.test_utils.mock_plugins import mock_plugin_manager
@pytest.fixture(scope="module")
def configured_app(minimal_app_for_api):
app = minimal_app_for_api
create_user(
app, # type: ignore
username="test",
role_name="Test",
permissions=[(permissions.ACTION_CAN_READ, permissions.RESOURCE_PLUGIN)],
)
create_user(app, username="test_no_permissions", role_name="TestNoPermissions") # type: ignore
yield app
delete_user(app, username="test") # type: ignore
delete_user(app, username="test_no_permissions") # type: ignore
class TestPluginsEndpoint:
@pytest.fixture(autouse=True)
def setup_attrs(self, configured_app) -> None:
"""
Setup For XCom endpoint TC
"""
self.app = configured_app
self.client = self.app.test_client() # type:ignore
class TestGetPlugins(TestPluginsEndpoint):
def test_get_plugins_return_200(self):
mock_plugin = AirflowPlugin()
mock_plugin.name = "test_plugin"
with mock_plugin_manager(plugins=[mock_plugin]):
response = self.client.get("api/v1/plugins", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert response.json == {
'plugins': [
{
'appbuilder_menu_items': [],
'appbuilder_views': [],
'executors': [],
'flask_blueprints': [],
'global_operator_extra_links': [],
'hooks': [],
'macros': [],
'operator_extra_links': [],
'source': None,
'name': 'test_plugin',
}
],
'total_entries': 1,
}
def test_get_plugins_works_with_more_plugins(self):
mock_plugin = AirflowPlugin()
mock_plugin.name = "test_plugin"
mock_plugin_2 = AirflowPlugin()
mock_plugin_2.name = "test_plugin2"
with mock_plugin_manager(plugins=[mock_plugin, mock_plugin_2]):
response = self.client.get("api/v1/plugins", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert response.json["total_entries"] == 2
def test_get_plugins_return_200_if_no_plugins(self):
with mock_plugin_manager(plugins=[]):
response = self.client.get("api/v1/plugins", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
def test_should_raises_401_unauthenticated(self):
response = self.client.get("/api/v1/plugins")
assert_401(response)
def test_should_raise_403_forbidden(self):
response = self.client.get(
"/api/v1/plugins", environ_overrides={'REMOTE_USER': "test_no_permissions"}
)
assert response.status_code == 403
class TestGetPluginsPagination(TestPluginsEndpoint):
@parameterized.expand(
[
("/api/v1/plugins?limit=1", ['TEST_PLUGIN_1']),
("/api/v1/plugins?limit=2", ['TEST_PLUGIN_1', "TEST_PLUGIN_2"]),
(
"/api/v1/plugins?offset=5",
[
"TEST_PLUGIN_6",
"TEST_PLUGIN_7",
"TEST_PLUGIN_8",
"TEST_PLUGIN_9",
"TEST_PLUGIN_10",
],
),
(
"/api/v1/plugins?offset=0",
[
"TEST_PLUGIN_1",
"TEST_PLUGIN_2",
"TEST_PLUGIN_3",
"TEST_PLUGIN_4",
"TEST_PLUGIN_5",
"TEST_PLUGIN_6",
"TEST_PLUGIN_7",
"TEST_PLUGIN_8",
"TEST_PLUGIN_9",
"TEST_PLUGIN_10",
],
),
("/api/v1/plugins?limit=1&offset=5", ["TEST_PLUGIN_6"]),
("/api/v1/plugins?limit=1&offset=1", ["TEST_PLUGIN_2"]),
(
"/api/v1/plugins?limit=2&offset=2",
["TEST_PLUGIN_3", "TEST_PLUGIN_4"],
),
]
)
def test_handle_limit_offset(self, url, expected_plugin_names):
plugins = self._create_plugins(10)
with mock_plugin_manager(plugins=plugins):
response = self.client.get(url, environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert response.json["total_entries"] == 10
plugin_names = [plugin['name'] for plugin in response.json['plugins'] if plugin]
assert plugin_names == expected_plugin_names
def test_should_respect_page_size_limit_default(self):
plugins = self._create_plugins(200)
with mock_plugin_manager(plugins=plugins):
response = self.client.get("/api/v1/plugins", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert response.json["total_entries"] == 200
assert len(response.json["plugins"]) == 100
def test_limit_of_zero_should_return_default(self):
plugins = self._create_plugins(200)
with mock_plugin_manager(plugins=plugins):
response = self.client.get("/api/v1/plugins?limit=0", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert response.json["total_entries"] == 200
assert len(response.json["plugins"]) == 100
@conf_vars({("api", "maximum_page_limit"): "150"})
def test_should_return_conf_max_if_req_max_above_conf(self):
plugins = self._create_plugins(200)
with mock_plugin_manager(plugins=plugins):
response = self.client.get("/api/v1/plugins?limit=180", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert len(response.json['plugins']) == 150
def _create_plugins(self, count):
plugins = []
for i in range(1, count + 1):
mock_plugin = AirflowPlugin()
mock_plugin.name = f"TEST_PLUGIN_{i}"
plugins.append(mock_plugin)
return plugins
| {
"content_hash": "ca549f689f212677644bb790042cc92a",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 110,
"avg_line_length": 38.5748502994012,
"alnum_prop": 0.5605402049053089,
"repo_name": "Acehaidrey/incubator-airflow",
"id": "669a2f31e294e2c19c0324c5bb89ca54080299ff",
"size": "7229",
"binary": false,
"copies": "8",
"ref": "refs/heads/main",
"path": "tests/api_connexion/endpoints/test_plugin_endpoint.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25785"
},
{
"name": "Dockerfile",
"bytes": "76693"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "164512"
},
{
"name": "JavaScript",
"bytes": "236992"
},
{
"name": "Jinja",
"bytes": "37155"
},
{
"name": "Jupyter Notebook",
"bytes": "2929"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "21727510"
},
{
"name": "R",
"bytes": "313"
},
{
"name": "Shell",
"bytes": "495253"
},
{
"name": "TypeScript",
"bytes": "326556"
}
],
"symlink_target": ""
} |
from __future__ import print_function
try:
# Python 2
import ConfigParser
except ImportError:
# Python 3
import configparser as ConfigParser
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), 'swift_build_support'))
# E402 means module level import not at top of file
from swift_build_support import diagnostics # noqa (E402)
from swift_build_support import shell # noqa (E402)
HOME = os.environ.get("HOME", "/")
def _get_default_source_root():
result = ""
# Are we in a Swift checkout? Start from this file and check its parent
# directories.
#
# $SWIFT_SOURCE_ROOT/swift/utils/SwiftBuildSupport.py
(swift_path, parent_dirname) = os.path.split(os.path.dirname(__file__))
if parent_dirname != "utils":
return result
if not os.path.exists(os.path.join(swift_path, 'CMakeLists.txt')):
return result
result = os.path.dirname(swift_path)
# Are we in an LLVM checkout? Start from the Swift checkout and check /its/
# parent directories.
#
# $SWIFT_SOURCE_ROOT/llvm/tools/swift/utils/SwiftBuildSupport.py
(llvm_path, parent_dirname) = os.path.split(result)
if parent_dirname != "tools":
return result
if not os.path.exists(os.path.join(llvm_path, 'CMakeLists.txt')):
return result
result = os.path.dirname(llvm_path)
return result
# Set SWIFT_SOURCE_ROOT in your environment to control where the sources
# are found.
SWIFT_SOURCE_ROOT = os.environ.get(
"SWIFT_SOURCE_ROOT", _get_default_source_root())
# Set SWIFT_BUILD_ROOT to a directory that will contain a subdirectory
# for each build configuration
SWIFT_BUILD_ROOT = os.environ.get(
"SWIFT_BUILD_ROOT", os.path.join(SWIFT_SOURCE_ROOT, "build"))
def _load_preset_files_impl(preset_file_names, substitutions={}):
config = ConfigParser.SafeConfigParser(substitutions, allow_no_value=True)
if config.read(preset_file_names) == []:
diagnostics.fatal(
"preset file not found (tried " + str(preset_file_names) + ")")
return config
_PRESET_PREFIX = "preset: "
def _get_preset_options_impl(config, substitutions, preset_name):
section_name = _PRESET_PREFIX + preset_name
if section_name not in config.sections():
return (None, None, None)
build_script_opts = []
build_script_impl_opts = []
missing_opts = []
dash_dash_seen = False
for o in config.options(section_name):
try:
a = config.get(section_name, o)
except ConfigParser.InterpolationMissingOptionError as e:
# e.reference contains the correctly formatted option
missing_opts.append(e.reference)
continue
if not a:
a = ""
if o in substitutions:
continue
opt = None
if o == "mixin-preset":
# Split on newlines and filter out empty lines.
mixins = filter(None, [m.strip() for m in a.splitlines()])
for mixin in mixins:
(base_build_script_opts,
base_build_script_impl_opts,
base_missing_opts) = \
_get_preset_options_impl(config, substitutions, mixin)
build_script_opts += base_build_script_opts
build_script_impl_opts += base_build_script_impl_opts
missing_opts += base_missing_opts
elif o == "dash-dash":
dash_dash_seen = True
elif a == "":
opt = "--" + o
else:
opt = "--" + o + "=" + a
if opt:
if not dash_dash_seen:
build_script_opts.append(opt)
else:
build_script_impl_opts.append(opt)
return (build_script_opts, build_script_impl_opts, missing_opts)
def get_preset_options(substitutions, preset_file_names, preset_name):
config = _load_preset_files_impl(preset_file_names, substitutions)
(build_script_opts, build_script_impl_opts, missing_opts) = \
_get_preset_options_impl(config, substitutions, preset_name)
if not build_script_opts and not build_script_impl_opts:
diagnostics.fatal("preset '" + preset_name + "' not found")
if missing_opts:
diagnostics.fatal("missing option(s) for preset '" + preset_name +
"': " + ", ".join(missing_opts))
# Migrate 'swift-sdks' parameter to 'stdlib-deployment-targets'
for opt in build_script_impl_opts:
if opt.startswith("--swift-sdks"):
sdks_to_configure = opt.split("=")[1].split(";")
tgts = []
# Expand SDKs in to their deployment targets
from swift_build_support.targets import StdlibDeploymentTarget
for sdk in sdks_to_configure:
if sdk == "OSX":
tgts += StdlibDeploymentTarget.OSX.targets
elif sdk == "IOS":
tgts += StdlibDeploymentTarget.iOS.targets
elif sdk == "IOS_SIMULATOR":
tgts += StdlibDeploymentTarget.iOSSimulator.targets
elif sdk == "TVOS":
tgts += StdlibDeploymentTarget.AppleTV.targets
elif sdk == "TVOS_SIMULATOR":
tgts += StdlibDeploymentTarget.AppleTVSimulator.targets
elif sdk == "WATCHOS":
tgts += StdlibDeploymentTarget.AppleWatch.targets
elif sdk == "WATCHOS_SIMULATOR":
tgts += StdlibDeploymentTarget.AppleWatchSimulator.targets
build_script_opts.append("--stdlib-deployment-targets=" +
" ".join([tgt.name for tgt in tgts]))
# Filter the swift-sdks parameter
build_script_impl_opts = [opt for opt in build_script_impl_opts
if not opt.startswith("--swift-sdks")]
return build_script_opts + ["--"] + build_script_impl_opts
def get_all_preset_names(preset_file_names):
config = _load_preset_files_impl(preset_file_names)
return [name[len(_PRESET_PREFIX):] for name in config.sections()
if name.startswith(_PRESET_PREFIX)]
| {
"content_hash": "274e7f4a5b78ceb61b21cab25a44d3e9",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 79,
"avg_line_length": 36.22352941176471,
"alnum_prop": 0.6070152646963299,
"repo_name": "russbishop/swift",
"id": "bf5a117108389cd0fef2932fc2cc53d47a27e20c",
"size": "6583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/SwiftBuildSupport.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "2142"
},
{
"name": "C",
"bytes": "46250"
},
{
"name": "C++",
"bytes": "19989180"
},
{
"name": "CMake",
"bytes": "266500"
},
{
"name": "D",
"bytes": "1686"
},
{
"name": "DTrace",
"bytes": "1857"
},
{
"name": "Emacs Lisp",
"bytes": "39337"
},
{
"name": "LLVM",
"bytes": "59306"
},
{
"name": "Makefile",
"bytes": "1841"
},
{
"name": "Objective-C",
"bytes": "207722"
},
{
"name": "Objective-C++",
"bytes": "166358"
},
{
"name": "Perl",
"bytes": "2216"
},
{
"name": "Python",
"bytes": "536347"
},
{
"name": "Ruby",
"bytes": "2091"
},
{
"name": "Shell",
"bytes": "153110"
},
{
"name": "Swift",
"bytes": "14298185"
},
{
"name": "VimL",
"bytes": "13394"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('teams', '0001_initial'),
('substitutes', '0001_initial'),
('invites', '0004_auto_20180105_0213'),
]
operations = [
migrations.AlterUniqueTogether(
name='sessioneventinvite',
unique_together=set([('sub', 'team')]),
),
]
| {
"content_hash": "fe6c234ae29e11092d2f73a96c768971",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 51,
"avg_line_length": 22.894736842105264,
"alnum_prop": 0.5839080459770115,
"repo_name": "eSmelser/SnookR",
"id": "9eefdb3de321effc9d9e668b40c9019f33d82a58",
"size": "508",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "SnookR/invites/migrations/0005_auto_20180106_0333.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "42731"
},
{
"name": "HTML",
"bytes": "83044"
},
{
"name": "JavaScript",
"bytes": "7810344"
},
{
"name": "PHP",
"bytes": "6093"
},
{
"name": "Python",
"bytes": "115524"
}
],
"symlink_target": ""
} |
PILE_OF_POO = u"\U0001F4A9"
def pytest_addoption(parser):
group = parser.getgroup('Poo', 'Poo')
group._addoption('--poo',
action="store_true", dest="poo", default=False,
help="Show crappy tests.")
def pytest_report_teststatus(report, config):
if (not config.option.poo) or ('poo' not in report.keywords) or (report.when != 'call'):
return
if (config.option.verbose == -1 and report.passed) or config.option.verbose >= 0:
return (report.outcome, PILE_OF_POO, '%s (%s)' % (report.outcome.upper(), PILE_OF_POO))
def pytest_configure(config):
config.addinivalue_line(
'markers',
'poo: Mark the test as crappy. When using --poo, pile of poo '
'will be shown with the test outcome.')
| {
"content_hash": "b4246ecd619e1c167574492c5da0ed4a",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 95,
"avg_line_length": 37.38095238095238,
"alnum_prop": 0.6127388535031847,
"repo_name": "pelme/pytest-poo",
"id": "9fa6f1edaad3118be712c8f157f9fbf2557848cb",
"size": "786",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytest_poo.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "4162"
}
],
"symlink_target": ""
} |
import os
PROJECT_DIR = os.path.dirname(os.path.realpath(__file__))
ENVIRONMENT_DIR = os.path.abspath(os.path.join(PROJECT_DIR, "..", "..", ".."))
HTDOCS_DIR = os.path.join(ENVIRONMENT_DIR, 'htdocs',)
LOG_DIR = os.path.join(ENVIRONMENT_DIR, 'var', 'log')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ("Example", "example@example.com"),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'psafe.db',
# The following settings are not used with sqlite3:
'USER': 'psafe',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '',
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
LANGUAGE_COOKIE_NAME = "language"
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = os.path.join(HTDOCS_DIR, 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = os.path.join(HTDOCS_DIR, 'static')
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'p@%8m9m6!-zg=9muwc=t&*inii7a_(p!k>33ozv5=h9@!hca'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'psafe.score.context_processors.site',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'psafe.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'psafe.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'django_extensions',
'mptt',
'safe',
'psafe.score',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.path.join(LOG_DIR, "psafe.log"),
'formatter': 'verbose'
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'psafe': {
'handlers': ['file', 'console'],
'level': 'DEBUG',
'propagate': True,
},
}
}
try:
from local_settings import *
except ImportError:
pass
| {
"content_hash": "732621c29b46f791c428a8f95c4065b0",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 108,
"avg_line_length": 32.02358490566038,
"alnum_prop": 0.6640153188982177,
"repo_name": "JoeJasinski/password-safe",
"id": "db3e3d3bfe2ecb44bffbd14beaceb18bd1ac7aef",
"size": "6827",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "psafe/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3984"
},
{
"name": "HTML",
"bytes": "50515"
},
{
"name": "JavaScript",
"bytes": "731350"
},
{
"name": "Makefile",
"bytes": "4583"
},
{
"name": "Nginx",
"bytes": "1549"
},
{
"name": "Python",
"bytes": "33895"
},
{
"name": "Shell",
"bytes": "196"
}
],
"symlink_target": ""
} |
import utils
import sys
import sortClass
class InsertionSort(sortClass.SortClass):
@staticmethod
def findSlot(myList, value, max):
for i in range(max):
if (value < myList[i]):
return i
return max-1
@staticmethod
def basicSort(myList):
# If the length is one or zero
# Nothing more to do.
# List is already sorted.
size = len(myList)
if size <= 1:
return myList;
for i in range(1,size):
if (myList[i].compareKeys(myList[i-1]) < 0):
value = myList.pop(i)
slot = InsertionSort.findSlot(myList, value, i)
myList.insert(slot, value)
return myList
#end
if __name__ == "__main__":
sys.setrecursionlimit(10000)
sizes = [5, 100]
for size in sizes:
myList = utils.createInputArray(size)
print myList
InsertionSort.basicSort(myList)
print myList | {
"content_hash": "0c1156c01db81c660502cab36b2f86d0",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 51,
"avg_line_length": 21.026315789473685,
"alnum_prop": 0.6795994993742178,
"repo_name": "bhagatyj/algorithms",
"id": "1c4bb4446a5d7975051bf48b0d599b0bc6f51ac0",
"size": "819",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sorting/python/insertionSort.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "268434"
},
{
"name": "C++",
"bytes": "168902"
},
{
"name": "Makefile",
"bytes": "5400"
},
{
"name": "Python",
"bytes": "50556"
},
{
"name": "Shell",
"bytes": "1193"
}
],
"symlink_target": ""
} |
from datetime import timedelta
from mock import patch
import re
from django.test import TestCase
from django.test.utils import override_settings
from django.utils.timezone import now
import requests
from .factories import ProxyGrantingTicketFactory
from .factories import ProxyTicketFactory
from .factories import ServiceTicketFactory
from .factories import UserFactory
from mama_cas.models import ProxyGrantingTicket
from mama_cas.models import ServiceTicket
from mama_cas.exceptions import InvalidProxyCallback
from mama_cas.exceptions import InvalidRequest
from mama_cas.exceptions import InvalidService
from mama_cas.exceptions import InvalidTicket
from mama_cas.exceptions import UnauthorizedServiceProxy
class TicketManagerTests(TestCase):
"""
Test the ``TicketManager`` model manager.
"""
url = 'http://www.example.com/'
def setUp(self):
self.user = UserFactory()
def test_create_ticket(self):
"""
A ticket ought to be created with a generated ticket string.
"""
st = ServiceTicket.objects.create_ticket(user=self.user)
self.assertTrue(re.search(st.TICKET_RE, st.ticket))
def test_create_ticket_ticket(self):
"""
A ticket ought to be created with a provided ticket string,
if present.
"""
ticket = 'ST-0000000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
st = ServiceTicket.objects.create_ticket(ticket=ticket, user=self.user)
self.assertEqual(st.ticket, ticket)
def test_create_ticket_service(self):
"""
If a service is provided, it should be cleaned.
"""
service = 'http://www.example.com/test?test3=blue#green'
st = ServiceTicket.objects.create_ticket(service=service, user=self.user)
self.assertEqual(st.service, 'http://www.example.com/test')
def test_create_ticket_no_expires(self):
"""
A ticket ought to be created with a calculated expiry value.
"""
st = ServiceTicket.objects.create_ticket(user=self.user)
self.assertTrue(st.expires > now())
def test_create_ticket_expires(self):
"""
A ticket ought to be created with a provided expiry value,
if present.
"""
expires = now() + timedelta(seconds=30)
st = ServiceTicket.objects.create_ticket(expires=expires, user=self.user)
self.assertEqual(st.expires, expires)
def test_create_ticket_str(self):
"""
A ticket string should be created with the appropriate model
prefix and format.
"""
str = ServiceTicket.objects.create_ticket_str()
self.assertTrue(re.search('^ST-[0-9]{10,}-[a-zA-Z0-9]{32}$', str))
def test_create_ticket_str_prefix(self):
"""
A ticket string should be created with the provided prefix
string and format.
"""
str = ProxyGrantingTicket.objects.create_ticket_str(prefix='PGTIOU')
self.assertTrue(re.search('^PGTIOU-[0-9]{10,}-[a-zA-Z0-9]{32}$', str))
def test_validate_ticket(self):
"""
Validation ought to succeed when provided with a valid ticket
string and data. The ticket ought to be consumed in the process.
"""
st = ServiceTicketFactory()
ticket = ServiceTicket.objects.validate_ticket(st.ticket, self.url)
self.assertEqual(ticket, st)
self.assertTrue(ticket.is_consumed())
def test_validate_ticket_no_ticket(self):
"""
The validation process ought to fail when no ticket string is
provided.
"""
with self.assertRaises(InvalidRequest):
ServiceTicket.objects.validate_ticket(None, self.url)
def test_validate_ticket_invalid_ticket(self):
"""
The validation process ought to fail when an invalid ticket
string is provided.
"""
with self.assertRaises(InvalidTicket):
ServiceTicket.objects.validate_ticket('12345', self.url)
def test_validate_ticket_does_not_exist(self):
"""
The validation process ought to fail when a valid ticket string
cannot be found in the database.
"""
ticket = 'ST-0000000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
with self.assertRaises(InvalidTicket):
ServiceTicket.objects.validate_ticket(ticket, self.url)
def test_validate_ticket_consumed_ticket(self):
"""
The validation process ought to fail when a consumed ticket
is provided.
"""
st = ServiceTicketFactory(consume=True)
with self.assertRaises(InvalidTicket):
ServiceTicket.objects.validate_ticket(st.ticket, self.url)
def test_validate_ticket_expired_ticket(self):
"""
The validation process ought to fail when an expired ticket
is provided.
"""
st = ServiceTicketFactory(expire=True)
with self.assertRaises(InvalidTicket):
ServiceTicket.objects.validate_ticket(st.ticket, self.url)
def test_validate_ticket_no_service(self):
"""
The validation process ought to fail when no service identifier
is provided. The ticket ought to be consumed in the process.
"""
st = ServiceTicketFactory()
with self.assertRaises(InvalidRequest):
ServiceTicket.objects.validate_ticket(st.ticket, None)
st = ServiceTicket.objects.get(ticket=st.ticket)
self.assertTrue(st.is_consumed())
def test_validate_ticket_invalid_service(self):
"""
The validation process ought to fail when an invalid service
identifier is provided.
"""
service = 'http://www.example.org'
st = ServiceTicketFactory()
with self.assertRaises(InvalidService):
ServiceTicket.objects.validate_ticket(st.ticket, service)
def test_validate_ticket_service_mismatch(self):
"""
The validation process ought to fail when the provided service
identifier does not match the ticket's service.
"""
service = 'http://sub.example.com/'
st = ServiceTicketFactory()
with self.assertRaises(InvalidService):
ServiceTicket.objects.validate_ticket(st.ticket, service)
def test_validate_ticket_renew(self):
"""
When ``renew`` is set, the validation process should succeed
if the ticket was issued from the presentation of the user's
primary credentials.
"""
st = ServiceTicketFactory(primary=True)
ticket = ServiceTicket.objects.validate_ticket(st.ticket, self.url,
renew=True)
self.assertEqual(ticket, st)
def test_validate_ticket_renew_secondary(self):
"""
When ``renew`` is set, the validation process should fail if
the ticket was not issued from the presentation of the user's
primary credentials.
"""
st = ServiceTicketFactory()
with self.assertRaises(InvalidTicket):
ServiceTicket.objects.validate_ticket(st.ticket, self.url,
renew=True)
def test_delete_invalid_tickets(self):
"""
Expired or consumed tickets should be deleted. Invalid tickets
referenced by other tickets should not be deleted.
"""
ServiceTicketFactory() # Should not be deleted
expired = ServiceTicketFactory(expire=True)
consumed = ServiceTicketFactory(consume=True)
referenced = ServiceTicketFactory(consume=True) # Should not be deleted
ProxyGrantingTicketFactory(granted_by_st=referenced)
ServiceTicket.objects.delete_invalid_tickets()
self.assertEqual(ServiceTicket.objects.count(), 2)
self.assertRaises(ServiceTicket.DoesNotExist,
ServiceTicket.objects.get,
ticket=expired.ticket)
self.assertRaises(ServiceTicket.DoesNotExist,
ServiceTicket.objects.get,
ticket=consumed.ticket)
def test_consume_tickets(self):
"""
All tickets belonging to the specified user should be consumed.
"""
st1 = ServiceTicketFactory()
st2 = ServiceTicketFactory()
ServiceTicket.objects.consume_tickets(self.user)
self.assertTrue(ServiceTicket.objects.get(ticket=st1).is_consumed())
self.assertTrue(ServiceTicket.objects.get(ticket=st2).is_consumed())
class TicketTests(TestCase):
"""
Test the ``Ticket`` abstract model.
"""
def test_ticket_consumed(self):
"""
``is_consumed()`` should return ``True`` for a consumed ticket.
"""
st = ServiceTicketFactory()
st.consume()
st = ServiceTicket.objects.get(ticket=st.ticket)
self.assertTrue(st.is_consumed())
def test_ticket_not_consumed(self):
"""
``is_consumed()`` should return ``False`` for a valid ticket.
"""
st = ServiceTicketFactory()
self.assertFalse(st.is_consumed())
def test_ticket_expired(self):
"""
``is_expired()`` should return ``True`` for an expired ticket.
"""
st = ServiceTicketFactory(expire=True)
self.assertTrue(st.is_expired())
def test_ticket_not_expired(self):
"""
``is_expired()`` should return ``False`` for a valid ticket.
"""
st = ServiceTicketFactory()
self.assertFalse(st.is_expired())
@override_settings(MAMA_CAS_ENABLE_SINGLE_SIGN_OUT=True)
class ServiceTicketManagerTests(TestCase):
"""
Test the ``ServiceTicketManager`` model manager.
"""
def setUp(self):
self.user = UserFactory()
def test_request_sign_out(self):
"""
Calling the ``request_sign_out()`` manager method should
issue a POST request for each consumed ticket for the
provided user.
"""
ServiceTicketFactory(consume=True)
ServiceTicketFactory(consume=True)
with patch('requests.Session.post') as mock:
mock.return_value.status_code = 200
ServiceTicket.objects.request_sign_out(self.user)
self.assertEqual(mock.call_count, 2)
class ServiceTicketTests(TestCase):
"""
Test the ``ServiceTicket`` model.
"""
def test_create_service_ticket(self):
"""
A ``ServiceTicket`` ought to be created with an appropriate
prefix.
"""
st = ServiceTicketFactory()
self.assertTrue(st.ticket.startswith(st.TICKET_PREFIX))
def test_primary(self):
"""
``is_primary()`` should return ``True`` if the ``ServiceTicket``
was created from the presentation of a user's credentials.
"""
st = ServiceTicketFactory(primary=True)
self.assertTrue(st.is_primary())
def test_secondary(self):
"""
``is_primary()`` should return ``False`` if the ``ServiceTicket``
was not created from the presentation of a user's credentials.
"""
st = ServiceTicketFactory()
self.assertFalse(st.is_primary())
def test_request_sign_out(self):
"""
A successful sign-out request to a service should not
cause any side-effects.
"""
st = ServiceTicketFactory()
with patch('requests.Session.post') as mock:
mock.return_value.status_code = 200
st.request_sign_out()
def test_request_sign_out_exception(self):
"""
If a sign-out request to a service raises an exception,
it should be handled.
"""
st = ServiceTicketFactory()
with patch('requests.Session.post') as mock:
mock.side_effect = requests.exceptions.RequestException
st.request_sign_out()
def test_request_sign_out_invalid_status(self):
"""
If a sign-out request to a service returns an invalid
status code, the resulting exception should be handled.
"""
st = ServiceTicketFactory()
with patch('requests.Session.post') as mock:
mock.return_value.status_code = 500
st.request_sign_out()
def test_request_sign_out_logout_allow_false(self):
"""
If SLO requests are disabled for a service, the logout
request should not be sent.
"""
st = ServiceTicketFactory(service='http://example.com')
with patch('requests.Session.post') as mock:
mock.return_value.status_code = 500
st.request_sign_out()
self.assertEqual(mock.call_count, 0)
class ProxyTicketTests(TestCase):
"""
Test the ``ProxyTicket`` model.
"""
def test_create_proxy_ticket(self):
"""
A ``ProxyTicket`` ought to be created with an appropriate
prefix.
"""
pt = ProxyTicketFactory()
self.assertTrue(pt.ticket.startswith(pt.TICKET_PREFIX))
class ProxyGrantingTicketManager(TestCase):
"""
Test the ``ProxyGrantingTicketManager`` model manager.
"""
def setUp(self):
self.user = UserFactory()
self.pt = ProxyTicketFactory()
self.pgtid = ProxyGrantingTicket.objects.create_ticket_str()
self.pgtiou = ProxyGrantingTicket.objects.create_ticket_str(prefix=ProxyGrantingTicket.IOU_PREFIX)
def test_create_ticket(self):
"""
A ``ProxyGrantingTicket`` ought to be created with the
appropriate ticket strings.
"""
with patch('requests.get') as mock:
mock.return_value.status_code = 200
pgt = ProxyGrantingTicket.objects.create_ticket('https://www.example.com', 'https://www.example.com/',
user=self.user, granted_by_pt=self.pt)
self.assertTrue(re.search(pgt.TICKET_RE, pgt.ticket))
self.assertTrue(pgt.iou.startswith(pgt.IOU_PREFIX))
def test_create_ticket_invalid_pgturl(self):
"""
If callback validation fails, ``None`` should be returned
instead of a ``ProxyGrantingTicket``.
"""
with patch('requests.get') as mock:
mock.side_effect = requests.exceptions.ConnectionError
pgt = ProxyGrantingTicket.objects.create_ticket('https://www.example.com', 'https://www.example.com/',
user=self.user, granted_by_pt=self.pt)
self.assertEqual(mock.call_count, 1)
self.assertIsNone(pgt)
def test_validate_callback(self):
"""
If a valid PGTURL is provided, an exception should not be raised.
"""
with patch('requests.get') as mock:
mock.return_value.status_code = 200
try:
ProxyGrantingTicket.objects.validate_callback('https://www.example.com', 'https://www.example.com/',
self.pgtid, self.pgtiou)
except InvalidProxyCallback:
self.fail("Exception raised validating proxy callback URL")
self.assertEqual(mock.call_count, 2)
def test_validate_callback_unauthorized_service(self):
"""
If an unauthorized service is provided, `UnauthorizedServiceProxy`
should be raised.
"""
with self.assertRaises(UnauthorizedServiceProxy):
ProxyGrantingTicket.objects.validate_callback('http://example.com/', 'https://www.example.com/',
self.pgtid, self.pgtiou)
def test_validate_callback_http_pgturl(self):
"""
If an HTTP PGTURL is provided, InvalidProxyCallback should be raised.
"""
with self.assertRaises(InvalidProxyCallback):
ProxyGrantingTicket.objects.validate_callback('http://www.example.com/', 'http://www.example.com/',
self.pgtid, self.pgtiou)
def test_validate_callback_invalid_pgturl(self):
"""If an invalid PGTURL is provided, InvalidProxyCallback should be raised."""
with self.assertRaises(InvalidProxyCallback):
ProxyGrantingTicket.objects.validate_callback('http://www.example.com/', 'https://www.example.org/',
self.pgtid, self.pgtiou)
def test_validate_callback_ssl_error(self):
"""
If the validation request encounters an SSL error, an
InvalidProxyCallback should be raised.
"""
with patch('requests.get') as mock:
mock.side_effect = requests.exceptions.SSLError
with self.assertRaises(InvalidProxyCallback):
ProxyGrantingTicket.objects.validate_callback('http://www.example.com/', 'https://www.example.org/',
self.pgtid, self.pgtiou)
def test_validate_callback_connection_error(self):
"""
If the validation request encounters an exception, an
InvalidProxyCallback should be raised.
"""
with patch('requests.get') as mock:
mock.side_effect = requests.exceptions.ConnectionError
with self.assertRaises(InvalidProxyCallback):
ProxyGrantingTicket.objects.validate_callback('http://www.example.com/', 'https://www.example.org/',
self.pgtid, self.pgtiou)
def test_validate_callback_invalid_status(self):
"""
If the validation request returns an invalid status code, an
InvalidProxyCallback should be raised.
"""
with patch('requests.get') as mock:
mock.return_value.raise_for_status.side_effect = requests.exceptions.HTTPError
with self.assertRaises(InvalidProxyCallback):
ProxyGrantingTicket.objects.validate_callback('http://www.example.com/', 'https://www.example.org/',
self.pgtid, self.pgtiou)
def test_validate_ticket(self):
"""
Validation ought to succeed when provided with a valid ticket
string and data. The ticket should not be consumed in the
process.
"""
pgt = ProxyGrantingTicketFactory()
ticket = ProxyGrantingTicket.objects.validate_ticket(pgt.ticket, 'https://www.example.com')
self.assertEqual(ticket, pgt)
self.assertFalse(ticket.is_consumed())
def test_validate_ticket_no_ticket(self):
"""
The validation process ought to fail when no ticket string is
provided.
"""
with self.assertRaises(InvalidRequest):
ProxyGrantingTicket.objects.validate_ticket(None, 'https://www.example.com')
def test_validate_ticket_no_service(self):
"""
The validation process ought to fail when no service identifier
is provided.
"""
pgt = ProxyGrantingTicketFactory()
with self.assertRaises(InvalidRequest):
ProxyGrantingTicket.objects.validate_ticket(pgt.ticket, None)
def test_validate_ticket_invalid_ticket(self):
"""
The validation process ought to fail when an invalid ticket
string is provided.
"""
with self.assertRaises(InvalidTicket):
ProxyGrantingTicket.objects.validate_ticket('12345', 'https://www.example.com')
def test_validate_ticket_does_not_exist(self):
"""
The validation process ought to fail when a valid ticket string
cannot be found in the database.
"""
ticket = 'PGT-0000000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
with self.assertRaises(InvalidTicket):
ProxyGrantingTicket.objects.validate_ticket(ticket, 'https://www.example.com')
def test_validate_ticket_consumed_ticket(self):
"""
The validation process ought to fail when a consumed ticket
is provided.
"""
pgt = ProxyGrantingTicketFactory(consume=True)
with self.assertRaises(InvalidTicket):
ProxyGrantingTicket.objects.validate_ticket(pgt.ticket, 'https://www.example.com')
def test_validate_ticket_expired_ticket(self):
"""
The validation process ought to fail when an expired ticket
is provided.
"""
pgt = ProxyGrantingTicketFactory(expire=True)
with self.assertRaises(InvalidTicket):
ProxyGrantingTicket.objects.validate_ticket(pgt.ticket, 'https://www.example.com')
def test_validate_ticket_invalid_service(self):
"""
The validation process ought to fail when an invalid service
identifier is provided.
"""
pgt = ProxyGrantingTicketFactory()
with self.assertRaises(InvalidService):
ProxyGrantingTicket.objects.validate_ticket(pgt.ticket, 'http://www.example.org')
class ProxyGrantingTicketTests(TestCase):
"""
Test the ``ProxyGrantingTicket`` model.
"""
def test_create_proxy_granting_ticket(self):
"""
A ``ProxyGrantingTicket`` ought to be created with an
appropriate prefix.
"""
pgt = ProxyGrantingTicketFactory()
self.assertTrue(pgt.ticket.startswith(pgt.TICKET_PREFIX))
| {
"content_hash": "b7225acafbfbf108379c3ece73f5880d",
"timestamp": "",
"source": "github",
"line_count": 552,
"max_line_length": 116,
"avg_line_length": 38.68840579710145,
"alnum_prop": 0.6234313541861772,
"repo_name": "orbitvu/django-mama-cas",
"id": "3d7cb6d447adc83f5b3a853c244d869f14d85c79",
"size": "21356",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mama_cas/tests/test_models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "3971"
},
{
"name": "Python",
"bytes": "164680"
}
],
"symlink_target": ""
} |
'''
This checks if all command line args are documented.
Return value is 0 to indicate no error.
Author: @MarcoFalke
'''
from subprocess import check_output
import re
import sys
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
REGEX_ARG = '(?:ForceSet|SoftSet|Get|Is)(?:Bool)?Args?(?:Set)?\("(-[^"]+)"'
REGEX_DOC = 'AddArg\("(-[^"=]+?)(?:=|")'
CMD_ROOT_DIR = '`git rev-parse --show-toplevel`/{}'.format(FOLDER_GREP)
CMD_GREP_ARGS = r"git grep --perl-regexp '{}' -- {} ':(exclude){}'".format(REGEX_ARG, CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_DOCS = r"git grep --perl-regexp '{}' {}".format(REGEX_DOC, CMD_ROOT_DIR)
# list unsupported, deprecated and duplicate args as they need no documentation
SET_DOC_OPTIONAL = set(['-h', '-help', '-dbcrashratio', '-forcecompactdb'])
def main():
if sys.version_info >= (3, 6):
used = check_output(CMD_GREP_ARGS, shell=True, universal_newlines=True, encoding='utf8')
docd = check_output(CMD_GREP_DOCS, shell=True, universal_newlines=True, encoding='utf8')
else:
used = check_output(CMD_GREP_ARGS, shell=True).decode('utf8').strip()
docd = check_output(CMD_GREP_DOCS, shell=True).decode('utf8').strip()
args_used = set(re.findall(re.compile(REGEX_ARG), used))
args_docd = set(re.findall(re.compile(REGEX_DOC), docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print("Args used : {}".format(len(args_used)))
print("Args documented : {}".format(len(args_docd)))
print("Args undocumented: {}".format(len(args_need_doc)))
print(args_need_doc)
print("Args unknown : {}".format(len(args_unknown)))
print(args_unknown)
sys.exit(len(args_need_doc))
if __name__ == "__main__":
main()
| {
"content_hash": "7cb0347823dc688cf6ab08cd09aa8d58",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 112,
"avg_line_length": 38.06382978723404,
"alnum_prop": 0.6467300167691448,
"repo_name": "droark/bitcoin",
"id": "3b05d5055cf1da072017458e0a698a487da1dafe",
"size": "2004",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/lint/check-doc.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28453"
},
{
"name": "C",
"bytes": "696871"
},
{
"name": "C++",
"bytes": "6310531"
},
{
"name": "HTML",
"bytes": "21860"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "198257"
},
{
"name": "Makefile",
"bytes": "119862"
},
{
"name": "Objective-C",
"bytes": "123749"
},
{
"name": "Objective-C++",
"bytes": "5382"
},
{
"name": "Python",
"bytes": "1583778"
},
{
"name": "QMake",
"bytes": "756"
},
{
"name": "Shell",
"bytes": "98048"
}
],
"symlink_target": ""
} |
from utils.codegen import format_type, get_detailed_extern_callinfos
from utils.extern import extern_has_tuple_params
from compiler_common import generate_var_name
from more_itertools import unique_everseen
#[ #include "dpdk_lib.h"
#[ #include "util_debug.h"
detailed_callinfos = get_detailed_extern_callinfos(hlir)
def get_externtype(part, partypeinfolen, partype_suffix, varname):
return f'EXTERNTYPE{partypeinfolen}({part}{partype_suffix})* {varname}'
for partypeinfolen, mname_parts, partype_suffix, params, params_as_buf, ret, mname_postfix, mname_postfix_as_buf, args, args_as_buf, refvars, arginfos, parinfos in sorted(unique_everseen(detailed_callinfos, key=lambda c: c[0:3])):
if len(mname_parts) == 1:
call = f'SHORT_EXTERNCALL{partypeinfolen + len(mname_parts)-1}'
else:
call = f'EXTERNCALL{partypeinfolen + len(mname_parts)-2}'
varname = generate_var_name('extern')
externtype = get_externtype(mname_parts[0], partypeinfolen, partype_suffix, varname)
params = f'{externtype}, {params}'
args_as_buf = f'{varname}, ' + args_as_buf
return_stmt = '' if ret != 'void' else 'return '
#[ $ret $call(${",".join(mname_parts)}${partype_suffix})($params);
#[
for partypeinfolen, mname_parts, partype_suffix, params, params_as_buf, ret, mname_postfix, mname_postfix_as_buf, args, args_as_buf, refvars, arginfos, parinfos in sorted(unique_everseen(detailed_callinfos, key=lambda c: (c[0:2], c[4]))):
if len(mname_parts) == 1:
call = f'SHORT_EXTERNCALL{partypeinfolen + len(mname_parts)-1}'
else:
call = f'EXTERNCALL{partypeinfolen + len(mname_parts)-2}'
varname = generate_var_name('extern')
externtype = get_externtype(mname_parts[0], partypeinfolen, partype_suffix, varname)
params_as_buf = f'{externtype}, {params_as_buf}'
args_as_buf = f'{varname}, ' + args_as_buf
#[ $ret EXTERNIMPL${partypeinfolen + len(mname_parts)-1}(${",".join(mname_parts)}${mname_postfix_as_buf})(${params_as_buf});
#[
| {
"content_hash": "2318f1b7de6278a913a1b7829b0aefcc",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 238,
"avg_line_length": 47.325581395348834,
"alnum_prop": 0.6845208845208846,
"repo_name": "P4ELTE/t4p4s",
"id": "6e0c6aeb50bf5d1bedef1213804d82d4cde5ddae",
"size": "2135",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/hardware_indep/dpdkx_gen_extern.h.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "462753"
},
{
"name": "Makefile",
"bytes": "2617"
},
{
"name": "Python",
"bytes": "313481"
},
{
"name": "Shell",
"bytes": "86070"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, print_function, division)
import copy
import os
from netlib import tcp, certutils
from .. import stateobject, utils
class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
def __init__(self, client_connection, address, server):
# Eventually, this object is restored from state. We don't have a
# connection then.
if client_connection:
super(ClientConnection, self).__init__(client_connection, address, server)
else:
self.connection = None
self.server = None
self.wfile = None
self.rfile = None
self.address = None
self.clientcert = None
self.ssl_established = None
self.timestamp_start = utils.timestamp()
self.timestamp_end = None
self.timestamp_ssl_setup = None
self.protocol = None
def __nonzero__(self):
return bool(self.connection) and not self.finished
def __repr__(self):
return "<ClientConnection: {ssl}{host}:{port}>".format(
ssl="[ssl] " if self.ssl_established else "",
host=self.address.host,
port=self.address.port
)
@property
def tls_established(self):
return self.ssl_established
_stateobject_attributes = dict(
ssl_established=bool,
timestamp_start=float,
timestamp_end=float,
timestamp_ssl_setup=float
)
def get_state(self, short=False):
d = super(ClientConnection, self).get_state(short)
d.update(
address={
"address": self.address(),
"use_ipv6": self.address.use_ipv6},
clientcert=self.cert.to_pem() if self.clientcert else None)
return d
def load_state(self, state):
super(ClientConnection, self).load_state(state)
self.address = tcp.Address(
**state["address"]) if state["address"] else None
self.clientcert = certutils.SSLCert.from_pem(
state["clientcert"]) if state["clientcert"] else None
def copy(self):
return copy.copy(self)
def send(self, message):
if isinstance(message, list):
message = b''.join(message)
self.wfile.write(message)
self.wfile.flush()
@classmethod
def from_state(cls, state):
f = cls(None, tuple(), None)
f.load_state(state)
return f
def convert_to_ssl(self, *args, **kwargs):
super(ClientConnection, self).convert_to_ssl(*args, **kwargs)
self.timestamp_ssl_setup = utils.timestamp()
def finish(self):
super(ClientConnection, self).finish()
self.timestamp_end = utils.timestamp()
class ServerConnection(tcp.TCPClient, stateobject.StateObject):
def __init__(self, address):
tcp.TCPClient.__init__(self, address)
self.via = None
self.timestamp_start = None
self.timestamp_end = None
self.timestamp_tcp_setup = None
self.timestamp_ssl_setup = None
self.protocol = None
def __nonzero__(self):
return bool(self.connection) and not self.finished
def __repr__(self):
if self.ssl_established and self.sni:
ssl = "[ssl: {0}] ".format(self.sni)
elif self.ssl_established:
ssl = "[ssl] "
else:
ssl = ""
return "<ServerConnection: {ssl}{host}:{port}>".format(
ssl=ssl,
host=self.address.host,
port=self.address.port
)
@property
def tls_established(self):
return self.ssl_established
_stateobject_attributes = dict(
timestamp_start=float,
timestamp_end=float,
timestamp_tcp_setup=float,
timestamp_ssl_setup=float,
address=tcp.Address,
source_address=tcp.Address,
cert=certutils.SSLCert,
ssl_established=bool,
sni=str
)
_stateobject_long_attributes = {"cert"}
def get_state(self, short=False):
d = super(ServerConnection, self).get_state(short)
d.update(
address={"address": self.address(),
"use_ipv6": self.address.use_ipv6},
source_address=({"address": self.source_address(),
"use_ipv6": self.source_address.use_ipv6} if self.source_address else None),
cert=self.cert.to_pem() if self.cert else None
)
return d
def load_state(self, state):
super(ServerConnection, self).load_state(state)
self.address = tcp.Address(
**state["address"]) if state["address"] else None
self.source_address = tcp.Address(
**state["source_address"]) if state["source_address"] else None
self.cert = certutils.SSLCert.from_pem(
state["cert"]) if state["cert"] else None
@classmethod
def from_state(cls, state):
f = cls(tuple())
f.load_state(state)
return f
def copy(self):
return copy.copy(self)
def connect(self):
self.timestamp_start = utils.timestamp()
tcp.TCPClient.connect(self)
self.timestamp_tcp_setup = utils.timestamp()
def send(self, message):
if isinstance(message, list):
message = b''.join(message)
self.wfile.write(message)
self.wfile.flush()
def establish_ssl(self, clientcerts, sni, **kwargs):
clientcert = None
if clientcerts:
path = os.path.join(
clientcerts,
self.address.host.encode("idna")) + ".pem"
if os.path.exists(path):
clientcert = path
self.convert_to_ssl(cert=clientcert, sni=sni, **kwargs)
self.sni = sni
self.timestamp_ssl_setup = utils.timestamp()
def finish(self):
tcp.TCPClient.finish(self)
self.timestamp_end = utils.timestamp()
ServerConnection._stateobject_attributes["via"] = ServerConnection
| {
"content_hash": "dee8d02230b6eb97f6750d4f9cb0db30",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 105,
"avg_line_length": 30.95360824742268,
"alnum_prop": 0.5866777685262281,
"repo_name": "scriptmediala/mitmproxy",
"id": "f1e10de9f68ea3c020acd89d986293f5f6a15ba6",
"size": "6005",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "libmproxy/models/connections.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "425"
},
{
"name": "CSS",
"bytes": "188807"
},
{
"name": "HTML",
"bytes": "2824"
},
{
"name": "JavaScript",
"bytes": "1728524"
},
{
"name": "Python",
"bytes": "653717"
},
{
"name": "Shell",
"bytes": "2303"
}
],
"symlink_target": ""
} |
from urllib3 import HTTPConnectionPool, HTTPSConnectionPool
from urllib3.poolmanager import proxy_from_url
from urllib3.exceptions import (
MaxRetryError,
ProxyError,
ReadTimeoutError,
SSLError,
ProtocolError,
)
from urllib3.response import httplib
from urllib3.util.ssl_ import HAS_SNI
from urllib3.util.timeout import Timeout
from urllib3.util.retry import Retry
from urllib3._collections import HTTPHeaderDict
from dummyserver.testcase import SocketDummyServerTestCase
from dummyserver.server import (
DEFAULT_CERTS, DEFAULT_CA, get_unreachable_address)
from .. import onlyPy3, LogRecorder
from nose.plugins.skip import SkipTest
try:
from mimetools import Message as MimeToolMessage
except ImportError:
class MimeToolMessage(object):
pass
from threading import Event
import socket
import ssl
class TestCookies(SocketDummyServerTestCase):
def test_multi_setcookie(self):
def multicookie_response_handler(listener):
sock = listener.accept()[0]
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += sock.recv(65536)
sock.send(b'HTTP/1.1 200 OK\r\n'
b'Set-Cookie: foo=1\r\n'
b'Set-Cookie: bar=1\r\n'
b'\r\n')
sock.close()
self._start_server(multicookie_response_handler)
pool = HTTPConnectionPool(self.host, self.port)
r = pool.request('GET', '/', retries=0)
self.assertEqual(r.headers, {'set-cookie': 'foo=1, bar=1'})
self.assertEqual(r.headers.getlist('set-cookie'), ['foo=1', 'bar=1'])
class TestSNI(SocketDummyServerTestCase):
def test_hostname_in_first_request_packet(self):
if not HAS_SNI:
raise SkipTest('SNI-support not available')
done_receiving = Event()
self.buf = b''
def socket_handler(listener):
sock = listener.accept()[0]
self.buf = sock.recv(65536) # We only accept one packet
done_receiving.set() # let the test know it can proceed
sock.close()
self._start_server(socket_handler)
pool = HTTPSConnectionPool(self.host, self.port)
try:
pool.request('GET', '/', retries=0)
except SSLError: # We are violating the protocol
pass
done_receiving.wait()
self.assertTrue(self.host.encode() in self.buf,
"missing hostname in SSL handshake")
class TestSocketClosing(SocketDummyServerTestCase):
def test_recovery_when_server_closes_connection(self):
# Does the pool work seamlessly if an open connection in the
# connection pool gets hung up on by the server, then reaches
# the front of the queue again?
done_closing = Event()
def socket_handler(listener):
for i in 0, 1:
sock = listener.accept()[0]
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf = sock.recv(65536)
body = 'Response %d' % i
sock.send(('HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n'
'%s' % (len(body), body)).encode('utf-8'))
sock.close() # simulate a server timing out, closing socket
done_closing.set() # let the test know it can proceed
self._start_server(socket_handler)
pool = HTTPConnectionPool(self.host, self.port)
response = pool.request('GET', '/', retries=0)
self.assertEqual(response.status, 200)
self.assertEqual(response.data, b'Response 0')
done_closing.wait() # wait until the socket in our pool gets closed
response = pool.request('GET', '/', retries=0)
self.assertEqual(response.status, 200)
self.assertEqual(response.data, b'Response 1')
def test_connection_refused(self):
# Does the pool retry if there is no listener on the port?
host, port = get_unreachable_address()
http = HTTPConnectionPool(host, port, maxsize=3, block=True)
self.assertRaises(MaxRetryError, http.request, 'GET', '/', retries=0, release_conn=False)
self.assertEqual(http.pool.qsize(), http.pool.maxsize)
def test_connection_read_timeout(self):
timed_out = Event()
def socket_handler(listener):
sock = listener.accept()[0]
while not sock.recv(65536).endswith(b'\r\n\r\n'):
pass
timed_out.wait()
sock.close()
self._start_server(socket_handler)
http = HTTPConnectionPool(self.host, self.port, timeout=0.001, retries=False, maxsize=3, block=True)
try:
self.assertRaises(ReadTimeoutError, http.request, 'GET', '/', release_conn=False)
finally:
timed_out.set()
self.assertEqual(http.pool.qsize(), http.pool.maxsize)
def test_https_connection_read_timeout(self):
""" Handshake timeouts should fail with a Timeout"""
timed_out = Event()
def socket_handler(listener):
sock = listener.accept()[0]
while not sock.recv(65536):
pass
timed_out.wait()
sock.close()
self._start_server(socket_handler)
pool = HTTPSConnectionPool(self.host, self.port, timeout=0.001, retries=False)
try:
self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/')
finally:
timed_out.set()
def test_timeout_errors_cause_retries(self):
def socket_handler(listener):
sock_timeout = listener.accept()[0]
# Wait for a second request before closing the first socket.
sock = listener.accept()[0]
sock_timeout.close()
# Second request.
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += sock.recv(65536)
# Now respond immediately.
body = 'Response 2'
sock.send(('HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n'
'%s' % (len(body), body)).encode('utf-8'))
sock.close()
# In situations where the main thread throws an exception, the server
# thread can hang on an accept() call. This ensures everything times
# out within 1 second. This should be long enough for any socket
# operations in the test suite to complete
default_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(1)
try:
self._start_server(socket_handler)
t = Timeout(connect=0.001, read=0.001)
pool = HTTPConnectionPool(self.host, self.port, timeout=t)
response = pool.request('GET', '/', retries=1)
self.assertEqual(response.status, 200)
self.assertEqual(response.data, b'Response 2')
finally:
socket.setdefaulttimeout(default_timeout)
def test_delayed_body_read_timeout(self):
timed_out = Event()
def socket_handler(listener):
sock = listener.accept()[0]
buf = b''
body = 'Hi'
while not buf.endswith(b'\r\n\r\n'):
buf = sock.recv(65536)
sock.send(('HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n' % len(body)).encode('utf-8'))
timed_out.wait()
sock.send(body.encode('utf-8'))
sock.close()
self._start_server(socket_handler)
pool = HTTPConnectionPool(self.host, self.port)
response = pool.urlopen('GET', '/', retries=0, preload_content=False,
timeout=Timeout(connect=1, read=0.001))
try:
self.assertRaises(ReadTimeoutError, response.read)
finally:
timed_out.set()
def test_incomplete_response(self):
body = 'Response'
partial_body = body[:2]
def socket_handler(listener):
sock = listener.accept()[0]
# Consume request
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf = sock.recv(65536)
# Send partial response and close socket.
sock.send((
'HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n'
'%s' % (len(body), partial_body)).encode('utf-8')
)
sock.close()
self._start_server(socket_handler)
pool = HTTPConnectionPool(self.host, self.port)
response = pool.request('GET', '/', retries=0, preload_content=False)
self.assertRaises(ProtocolError, response.read)
def test_retry_weird_http_version(self):
""" Retry class should handle httplib.BadStatusLine errors properly """
def socket_handler(listener):
sock = listener.accept()[0]
# First request.
# Pause before responding so the first request times out.
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += sock.recv(65536)
# send unknown http protocol
body = "bad http 0.5 response"
sock.send(('HTTP/0.5 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n'
'%s' % (len(body), body)).encode('utf-8'))
sock.close()
# Second request.
sock = listener.accept()[0]
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += sock.recv(65536)
# Now respond immediately.
sock.send(('HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n'
'foo' % (len('foo'))).encode('utf-8'))
sock.close() # Close the socket.
self._start_server(socket_handler)
pool = HTTPConnectionPool(self.host, self.port)
retry = Retry(read=1)
response = pool.request('GET', '/', retries=retry)
self.assertEqual(response.status, 200)
self.assertEqual(response.data, b'foo')
def test_connection_cleanup_on_read_timeout(self):
timed_out = Event()
def socket_handler(listener):
sock = listener.accept()[0]
buf = b''
body = 'Hi'
while not buf.endswith(b'\r\n\r\n'):
buf = sock.recv(65536)
sock.send(('HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n' % len(body)).encode('utf-8'))
timed_out.wait()
sock.close()
self._start_server(socket_handler)
with HTTPConnectionPool(self.host, self.port) as pool:
poolsize = pool.pool.qsize()
response = pool.urlopen('GET', '/', retries=0, preload_content=False,
timeout=Timeout(connect=1, read=0.001))
try:
self.assertRaises(ReadTimeoutError, response.read)
self.assertEqual(poolsize, pool.pool.qsize())
finally:
timed_out.set()
def test_connection_cleanup_on_protocol_error_during_read(self):
body = 'Response'
partial_body = body[:2]
def socket_handler(listener):
sock = listener.accept()[0]
# Consume request
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf = sock.recv(65536)
# Send partial response and close socket.
sock.send((
'HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n'
'%s' % (len(body), partial_body)).encode('utf-8')
)
sock.close()
self._start_server(socket_handler)
with HTTPConnectionPool(self.host, self.port) as pool:
poolsize = pool.pool.qsize()
response = pool.request('GET', '/', retries=0, preload_content=False)
self.assertRaises(ProtocolError, response.read)
self.assertEqual(poolsize, pool.pool.qsize())
class TestProxyManager(SocketDummyServerTestCase):
def test_simple(self):
def echo_socket_handler(listener):
sock = listener.accept()[0]
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += sock.recv(65536)
sock.send(('HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n'
'%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8'))
sock.close()
self._start_server(echo_socket_handler)
base_url = 'http://%s:%d' % (self.host, self.port)
proxy = proxy_from_url(base_url)
r = proxy.request('GET', 'http://google.com/')
self.assertEqual(r.status, 200)
# FIXME: The order of the headers is not predictable right now. We
# should fix that someday (maybe when we migrate to
# OrderedDict/MultiDict).
self.assertEqual(sorted(r.data.split(b'\r\n')),
sorted([
b'GET http://google.com/ HTTP/1.1',
b'Host: google.com',
b'Accept-Encoding: identity',
b'Accept: */*',
b'',
b'',
]))
def test_headers(self):
def echo_socket_handler(listener):
sock = listener.accept()[0]
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += sock.recv(65536)
sock.send(('HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n'
'%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8'))
sock.close()
self._start_server(echo_socket_handler)
base_url = 'http://%s:%d' % (self.host, self.port)
# Define some proxy headers.
proxy_headers = HTTPHeaderDict({'For The Proxy': 'YEAH!'})
proxy = proxy_from_url(base_url, proxy_headers=proxy_headers)
conn = proxy.connection_from_url('http://www.google.com/')
r = conn.urlopen('GET', 'http://www.google.com/', assert_same_host=False)
self.assertEqual(r.status, 200)
# FIXME: The order of the headers is not predictable right now. We
# should fix that someday (maybe when we migrate to
# OrderedDict/MultiDict).
self.assertTrue(b'For The Proxy: YEAH!\r\n' in r.data)
def test_retries(self):
def echo_socket_handler(listener):
sock = listener.accept()[0]
# First request, which should fail
sock.close()
# Second request
sock = listener.accept()[0]
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += sock.recv(65536)
sock.send(('HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: %d\r\n'
'\r\n'
'%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8'))
sock.close()
self._start_server(echo_socket_handler)
base_url = 'http://%s:%d' % (self.host, self.port)
proxy = proxy_from_url(base_url)
conn = proxy.connection_from_url('http://www.google.com')
r = conn.urlopen('GET', 'http://www.google.com',
assert_same_host=False, retries=1)
self.assertEqual(r.status, 200)
self.assertRaises(ProxyError, conn.urlopen, 'GET',
'http://www.google.com',
assert_same_host=False, retries=False)
def test_connect_reconn(self):
def proxy_ssl_one(listener):
sock = listener.accept()[0]
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += sock.recv(65536)
s = buf.decode('utf-8')
if not s.startswith('CONNECT '):
sock.send(('HTTP/1.1 405 Method not allowed\r\n'
'Allow: CONNECT\r\n\r\n').encode('utf-8'))
sock.close()
return
if not s.startswith('CONNECT %s:443' % (self.host,)):
sock.send(('HTTP/1.1 403 Forbidden\r\n\r\n').encode('utf-8'))
sock.close()
return
sock.send(('HTTP/1.1 200 Connection Established\r\n\r\n').encode('utf-8'))
ssl_sock = ssl.wrap_socket(sock,
server_side=True,
keyfile=DEFAULT_CERTS['keyfile'],
certfile=DEFAULT_CERTS['certfile'],
ca_certs=DEFAULT_CA)
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += ssl_sock.recv(65536)
ssl_sock.send(('HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: 2\r\n'
'Connection: close\r\n'
'\r\n'
'Hi').encode('utf-8'))
ssl_sock.close()
def echo_socket_handler(listener):
proxy_ssl_one(listener)
proxy_ssl_one(listener)
self._start_server(echo_socket_handler)
base_url = 'http://%s:%d' % (self.host, self.port)
proxy = proxy_from_url(base_url)
url = 'https://{0}'.format(self.host)
conn = proxy.connection_from_url(url)
r = conn.urlopen('GET', url, retries=0)
self.assertEqual(r.status, 200)
r = conn.urlopen('GET', url, retries=0)
self.assertEqual(r.status, 200)
class TestSSL(SocketDummyServerTestCase):
def test_ssl_failure_midway_through_conn(self):
def socket_handler(listener):
sock = listener.accept()[0]
sock2 = sock.dup()
ssl_sock = ssl.wrap_socket(sock,
server_side=True,
keyfile=DEFAULT_CERTS['keyfile'],
certfile=DEFAULT_CERTS['certfile'],
ca_certs=DEFAULT_CA)
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += ssl_sock.recv(65536)
# Deliberately send from the non-SSL socket.
sock2.send((
'HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: 2\r\n'
'\r\n'
'Hi').encode('utf-8'))
sock2.close()
ssl_sock.close()
self._start_server(socket_handler)
pool = HTTPSConnectionPool(self.host, self.port)
self.assertRaises(SSLError, pool.request, 'GET', '/', retries=0)
def test_ssl_read_timeout(self):
timed_out = Event()
def socket_handler(listener):
sock = listener.accept()[0]
ssl_sock = ssl.wrap_socket(sock,
server_side=True,
keyfile=DEFAULT_CERTS['keyfile'],
certfile=DEFAULT_CERTS['certfile'],
ca_certs=DEFAULT_CA)
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += ssl_sock.recv(65536)
# Send incomplete message (note Content-Length)
ssl_sock.send((
'HTTP/1.1 200 OK\r\n'
'Content-Type: text/plain\r\n'
'Content-Length: 10\r\n'
'\r\n'
'Hi-').encode('utf-8'))
timed_out.wait()
sock.close()
ssl_sock.close()
self._start_server(socket_handler)
pool = HTTPSConnectionPool(self.host, self.port)
response = pool.urlopen('GET', '/', retries=0, preload_content=False,
timeout=Timeout(connect=1, read=0.001))
try:
self.assertRaises(ReadTimeoutError, response.read)
finally:
timed_out.set()
def test_ssl_failed_fingerprint_verification(self):
def socket_handler(listener):
for i in range(2):
sock = listener.accept()[0]
ssl_sock = ssl.wrap_socket(sock,
server_side=True,
keyfile=DEFAULT_CERTS['keyfile'],
certfile=DEFAULT_CERTS['certfile'],
ca_certs=DEFAULT_CA)
ssl_sock.send(b'HTTP/1.1 200 OK\r\n'
b'Content-Type: text/plain\r\n'
b'Content-Length: 5\r\n\r\n'
b'Hello')
ssl_sock.close()
sock.close()
self._start_server(socket_handler)
# GitHub's fingerprint. Valid, but not matching.
fingerprint = ('A0:C4:A7:46:00:ED:A7:2D:C0:BE:CB'
':9A:8C:B6:07:CA:58:EE:74:5E')
def request():
try:
pool = HTTPSConnectionPool(self.host, self.port,
assert_fingerprint=fingerprint)
response = pool.urlopen('GET', '/', preload_content=False,
timeout=Timeout(connect=1, read=0.001))
response.read()
finally:
pool.close()
self.assertRaises(SSLError, request)
# Should not hang, see https://github.com/shazow/urllib3/issues/529
self.assertRaises(SSLError, request)
def consume_socket(sock, chunks=65536):
while not sock.recv(chunks).endswith(b'\r\n\r\n'):
pass
def create_response_handler(response, num=1):
def socket_handler(listener):
for _ in range(num):
sock = listener.accept()[0]
consume_socket(sock)
sock.send(response)
sock.close()
return socket_handler
class TestErrorWrapping(SocketDummyServerTestCase):
def test_bad_statusline(self):
handler = create_response_handler(
b'HTTP/1.1 Omg What Is This?\r\n'
b'Content-Length: 0\r\n'
b'\r\n'
)
self._start_server(handler)
pool = HTTPConnectionPool(self.host, self.port, retries=False)
self.assertRaises(ProtocolError, pool.request, 'GET', '/')
def test_unknown_protocol(self):
handler = create_response_handler(
b'HTTP/1000 200 OK\r\n'
b'Content-Length: 0\r\n'
b'\r\n'
)
self._start_server(handler)
pool = HTTPConnectionPool(self.host, self.port, retries=False)
self.assertRaises(ProtocolError, pool.request, 'GET', '/')
class TestHeaders(SocketDummyServerTestCase):
@onlyPy3
def test_httplib_headers_case_insensitive(self):
handler = create_response_handler(
b'HTTP/1.1 200 OK\r\n'
b'Content-Length: 0\r\n'
b'Content-type: text/plain\r\n'
b'\r\n'
)
self._start_server(handler)
pool = HTTPConnectionPool(self.host, self.port, retries=False)
HEADERS = {'Content-Length': '0', 'Content-type': 'text/plain'}
r = pool.request('GET', '/')
self.assertEqual(HEADERS, dict(r.headers.items())) # to preserve case sensitivity
def test_headers_are_sent_with_the_original_case(self):
headers = {'foo': 'bar', 'bAz': 'quux'}
parsed_headers = {}
def socket_handler(listener):
sock = listener.accept()[0]
buf = b''
while not buf.endswith(b'\r\n\r\n'):
buf += sock.recv(65536)
headers_list = [header for header in buf.split(b'\r\n')[1:] if header]
for header in headers_list:
(key, value) = header.split(b': ')
parsed_headers[key.decode()] = value.decode()
# Send incomplete message (note Content-Length)
sock.send((
'HTTP/1.1 204 No Content\r\n'
'Content-Length: 0\r\n'
'\r\n').encode('utf-8'))
sock.close()
self._start_server(socket_handler)
expected_headers = {'Accept-Encoding': 'identity',
'Host': '{0}:{1}'.format(self.host, self.port)}
expected_headers.update(headers)
pool = HTTPConnectionPool(self.host, self.port, retries=False)
pool.request('GET', '/', headers=HTTPHeaderDict(headers))
self.assertEqual(expected_headers, parsed_headers)
class TestBrokenHeaders(SocketDummyServerTestCase):
def setUp(self):
if issubclass(httplib.HTTPMessage, MimeToolMessage):
raise SkipTest('Header parsing errors not available')
super(TestBrokenHeaders, self).setUp()
def _test_broken_header_parsing(self, headers):
handler = create_response_handler((
b'HTTP/1.1 200 OK\r\n'
b'Content-Length: 0\r\n'
b'Content-type: text/plain\r\n'
) + b'\r\n'.join(headers) + b'\r\n'
)
self._start_server(handler)
pool = HTTPConnectionPool(self.host, self.port, retries=False)
with LogRecorder() as logs:
pool.request('GET', '/')
for record in logs:
if 'Failed to parse headers' in record.msg and \
pool._absolute_url('/') == record.args[0]:
return
self.fail('Missing log about unparsed headers')
def test_header_without_name(self):
self._test_broken_header_parsing([
b': Value\r\n',
b'Another: Header\r\n',
])
def test_header_without_name_or_value(self):
self._test_broken_header_parsing([
b':\r\n',
b'Another: Header\r\n',
])
def test_header_without_colon_or_value(self):
self._test_broken_header_parsing([
b'Broken Header',
b'Another: Header',
])
class TestHEAD(SocketDummyServerTestCase):
def test_chunked_head_response_does_not_hang(self):
handler = create_response_handler(
b'HTTP/1.1 200 OK\r\n'
b'Transfer-Encoding: chunked\r\n'
b'Content-type: text/plain\r\n'
b'\r\n'
)
self._start_server(handler)
pool = HTTPConnectionPool(self.host, self.port, retries=False)
r = pool.request('HEAD', '/', timeout=1, preload_content=False)
# stream will use the read_chunked method here.
self.assertEqual([], list(r.stream()))
def test_empty_head_response_does_not_hang(self):
handler = create_response_handler(
b'HTTP/1.1 200 OK\r\n'
b'Content-Length: 256\r\n'
b'Content-type: text/plain\r\n'
b'\r\n'
)
self._start_server(handler)
pool = HTTPConnectionPool(self.host, self.port, retries=False)
r = pool.request('HEAD', '/', timeout=1, preload_content=False)
# stream will use the read method here.
self.assertEqual([], list(r.stream()))
| {
"content_hash": "f0446a81f5cd2d794e793ee19865ecec",
"timestamp": "",
"source": "github",
"line_count": 792,
"max_line_length": 108,
"avg_line_length": 35.39646464646464,
"alnum_prop": 0.5239708924876935,
"repo_name": "luca3m/urllib3",
"id": "5af00e0bf5d3e3c89c7eab3d4f2531c1853b9922",
"size": "28156",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/with_dummyserver/test_socketlevel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "906"
},
{
"name": "Python",
"bytes": "416214"
},
{
"name": "Shell",
"bytes": "1036"
}
],
"symlink_target": ""
} |
from rllab.baselines.linear_feature_baseline import LinearFeatureBaseline
from rllab.envs.normalized_env import normalize
from rllab.misc.instrument import stub, run_experiment_lite
from sandbox.snn4hrl.algos.trpo_snn import TRPO_snn
from sandbox.snn4hrl.bonus_evaluators.grid_bonus_evaluator import GridBonusEvaluator
from sandbox.snn4hrl.envs.mujoco.swimmer_env import SwimmerEnv
from sandbox.snn4hrl.policies.snn_mlp_policy import GaussianMLPPolicy_snn
from sandbox.snn4hrl.regressors.latent_regressor import Latent_regressor
stub(globals())
# SNN policy settings
latent_dim = 6 # dim of the latent variables in the SNN
# Bonus evaluator settings
mesh_density = 5 # for the discretization of the x-y space
snn_H_bonus = 0.05 # coef of the MI bonus
# extra arguments, not used in the paper
switch_lat_every = 0 # switch latents during the pre-training
virtual_reset = False
# Latent regressor (to avoid using the GridBonus evaluator and its discretization)
noisify_coef = 0 # noise injected int the state while fitting/predicting latents
reward_regressor_mi = 0 # bonus associated to the MI computed with the regressor
# choose your environment. For later hierarchization, choose ego_obs=True
env = normalize(SwimmerEnv(ego_obs=True))
policy = GaussianMLPPolicy_snn(
env_spec=env.spec,
latent_dim=latent_dim,
latent_name='categorical',
bilinear_integration=True, # concatenate also the outer product
hidden_sizes=(64, 64),
min_std=1e-6,
)
baseline = LinearFeatureBaseline(env_spec=env.spec)
if latent_dim:
latent_regressor = Latent_regressor(
env_spec=env.spec,
policy=policy,
predict_all=True, # use all the predictions and not only the last
obs_regressed='all', # [-3] is the x-position of the com, otherwise put 'all'
act_regressed=[], # use [] for nothing or 'all' for all.
noisify_traj_coef=noisify_coef,
regressor_args={
'hidden_sizes': (32, 32),
'name': 'latent_reg',
'use_trust_region': False,
}
)
else:
latent_regressor = None
bonus_evaluators = [GridBonusEvaluator(mesh_density=mesh_density, snn_H_bonus=snn_H_bonus,
virtual_reset=virtual_reset,
switch_lat_every=switch_lat_every,
)]
reward_coef_bonus = [1]
algo = TRPO_snn(
env=env,
policy=policy,
baseline=baseline,
self_normalize=True,
log_individual_latents=True,
log_deterministic=True,
latent_regressor=latent_regressor,
reward_regressor_mi=reward_regressor_mi,
bonus_evaluator=bonus_evaluators,
reward_coef_bonus=reward_coef_bonus,
switch_lat_every=switch_lat_every,
batch_size=50000,
whole_paths=True,
max_path_length=500,
n_itr=500,
discount=0.99,
step_size=0.01,
)
for s in range(10, 110, 10): # [10, 20, 30, 40, 50]:
exp_prefix = 'egoSwimmer-snn'
exp_name = exp_prefix + '_{}MI_{}grid_{}latCat_bil_{:04d}'.format(
''.join(str(snn_H_bonus).split('.')), mesh_density,
latent_dim, s)
run_experiment_lite(
stub_method_call=algo.train(),
use_cloudpickle=False,
mode='local',
pre_commands=['pip install --upgrade pip',
'pip install --upgrade theano',
],
# Number of parallel workers for sampling
n_parallel=4,
# Only keep the snapshot parameters for the last iteration
snapshot_mode="last",
# Specifies the seed for the experiment. If this is not provided, a random seed
# will be used
seed=s,
# plot=True,
# Save to data/ec2/exp_prefix/exp_name/
exp_prefix=exp_prefix,
exp_name=exp_name,
sync_s3_pkl=True, # for sync the pkl file also during the training
sync_s3_png=True,
terminate_machine=True, # dangerous to have False!
)
| {
"content_hash": "a3253ebcbae04e8fe21244ea604e3959",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 90,
"avg_line_length": 36.18181818181818,
"alnum_prop": 0.6540201005025126,
"repo_name": "florensacc/snn4hrl",
"id": "02393c428a90678d5a11a5606e7c8b314fee3fb3",
"size": "3980",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runs/train_snn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "268571"
}
],
"symlink_target": ""
} |
import spinalcordtoolbox as sct
def main():
print(sct.__version__)
if __name__ == "__main__":
main()
| {
"content_hash": "a21860e4a0b776511230c21ded52ff27",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 31,
"avg_line_length": 12.555555555555555,
"alnum_prop": 0.5663716814159292,
"repo_name": "neuropoly/spinalcordtoolbox",
"id": "6eafa777b39dff7d5a6d6418a43268c86f10521f",
"size": "136",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spinalcordtoolbox/scripts/sct_version.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5931"
},
{
"name": "C++",
"bytes": "629016"
},
{
"name": "CMake",
"bytes": "7000"
},
{
"name": "CSS",
"bytes": "1237"
},
{
"name": "Dockerfile",
"bytes": "293"
},
{
"name": "HTML",
"bytes": "11480"
},
{
"name": "JavaScript",
"bytes": "3171"
},
{
"name": "MATLAB",
"bytes": "120557"
},
{
"name": "Python",
"bytes": "2052822"
},
{
"name": "Rich Text Format",
"bytes": "1619"
},
{
"name": "Shell",
"bytes": "61227"
}
],
"symlink_target": ""
} |
"""Support for MySensors covers."""
from __future__ import annotations
from enum import Enum, unique
from typing import Any
from homeassistant.components import mysensors
from homeassistant.components.cover import ATTR_POSITION, DOMAIN, CoverEntity
from homeassistant.components.mysensors.const import MYSENSORS_DISCOVERY, DiscoveryInfo
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.core import HomeAssistant
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .helpers import on_unload
@unique
class CoverState(Enum):
"""An enumeration of the standard cover states."""
OPEN = 0
OPENING = 1
CLOSING = 2
CLOSED = 3
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up this platform for a specific ConfigEntry(==Gateway)."""
async def async_discover(discovery_info: DiscoveryInfo) -> None:
"""Discover and add a MySensors cover."""
mysensors.setup_mysensors_platform(
hass,
DOMAIN,
discovery_info,
MySensorsCover,
async_add_entities=async_add_entities,
)
on_unload(
hass,
config_entry.entry_id,
async_dispatcher_connect(
hass,
MYSENSORS_DISCOVERY.format(config_entry.entry_id, DOMAIN),
async_discover,
),
)
class MySensorsCover(mysensors.device.MySensorsEntity, CoverEntity):
"""Representation of the value of a MySensors Cover child node."""
def get_cover_state(self) -> CoverState:
"""Return a CoverState enum representing the state of the cover."""
set_req = self.gateway.const.SetReq
v_up = self._values.get(set_req.V_UP) == STATE_ON
v_down = self._values.get(set_req.V_DOWN) == STATE_ON
v_stop = self._values.get(set_req.V_STOP) == STATE_ON
# If a V_DIMMER or V_PERCENTAGE is available, that is the amount
# the cover is open. Otherwise, use 0 or 100 based on the V_LIGHT
# or V_STATUS.
amount = 100
if set_req.V_DIMMER in self._values:
amount = self._values[set_req.V_DIMMER]
else:
amount = 100 if self._values.get(set_req.V_LIGHT) == STATE_ON else 0
if amount == 0:
return CoverState.CLOSED
if v_up and not v_down and not v_stop:
return CoverState.OPENING
if not v_up and v_down and not v_stop:
return CoverState.CLOSING
return CoverState.OPEN
@property
def is_closed(self) -> bool:
"""Return True if the cover is closed."""
return self.get_cover_state() == CoverState.CLOSED
@property
def is_closing(self) -> bool:
"""Return True if the cover is closing."""
return self.get_cover_state() == CoverState.CLOSING
@property
def is_opening(self) -> bool:
"""Return True if the cover is opening."""
return self.get_cover_state() == CoverState.OPENING
@property
def current_cover_position(self) -> int | None:
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
set_req = self.gateway.const.SetReq
return self._values.get(set_req.V_DIMMER)
async def async_open_cover(self, **kwargs: Any) -> None:
"""Move the cover up."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_UP, 1, ack=1
)
if self.assumed_state:
# Optimistically assume that cover has changed state.
if set_req.V_DIMMER in self._values:
self._values[set_req.V_DIMMER] = 100
else:
self._values[set_req.V_LIGHT] = STATE_ON
self.async_write_ha_state()
async def async_close_cover(self, **kwargs: Any) -> None:
"""Move the cover down."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_DOWN, 1, ack=1
)
if self.assumed_state:
# Optimistically assume that cover has changed state.
if set_req.V_DIMMER in self._values:
self._values[set_req.V_DIMMER] = 0
else:
self._values[set_req.V_LIGHT] = STATE_OFF
self.async_write_ha_state()
async def async_set_cover_position(self, **kwargs: Any) -> None:
"""Move the cover to a specific position."""
position = kwargs.get(ATTR_POSITION)
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_DIMMER, position, ack=1
)
if self.assumed_state:
# Optimistically assume that cover has changed state.
self._values[set_req.V_DIMMER] = position
self.async_write_ha_state()
async def async_stop_cover(self, **kwargs: Any) -> None:
"""Stop the device."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_STOP, 1, ack=1
)
| {
"content_hash": "b1aba3b944937141ab365156552fb72f",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 87,
"avg_line_length": 35.11764705882353,
"alnum_prop": 0.6195793783733482,
"repo_name": "lukas-hetzenecker/home-assistant",
"id": "9219097bea455b40ee4fe8fbe9f60c511082d058",
"size": "5373",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/mysensors/cover.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2443"
},
{
"name": "Python",
"bytes": "38023745"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
import math
import torch
import torch.nn as nn
from fairseq import utils
class SinusoidalPositionalEmbedding(nn.Module):
"""This module produces sinusoidal positional embeddings of any length.
Padding symbols are ignored, but it is necessary to specify whether padding
is added on the left side (left_pad=True) or right side (left_pad=False).
"""
def __init__(self, embedding_dim, padding_idx, left_pad, init_size=1024):
super().__init__()
self.embedding_dim = embedding_dim
self.padding_idx = padding_idx
self.left_pad = left_pad
self.weights = SinusoidalPositionalEmbedding.get_embedding(
init_size,
embedding_dim,
padding_idx,
)
self.register_buffer('_float_tensor', torch.FloatTensor(1))
@staticmethod
def get_embedding(num_embeddings, embedding_dim, padding_idx=None):
"""Build sinusoidal embeddings.
This matches the implementation in tensor2tensor, but differs slightly
from the description in Section 3.5 of "Attention Is All You Need".
"""
half_dim = embedding_dim // 2
emb = math.log(10000) / (half_dim - 1)
emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)
emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(1) * emb.unsqueeze(0)
emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(num_embeddings, -1)
if embedding_dim % 2 == 1:
# zero pad
emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1)
if padding_idx is not None:
emb[padding_idx, :] = 0
return emb
def forward(self, input, incremental_state=None):
"""Input is expected to be of size [bsz x seqlen]."""
# recompute/expand embeddings if needed
bsz, seq_len = input.size()
max_pos = self.padding_idx + 1 + seq_len
if self.weights is None or max_pos > self.weights.size(0):
self.weights = SinusoidalPositionalEmbedding.get_embedding(
max_pos,
self.embedding_dim,
self.padding_idx,
)
self.weights = self.weights.type_as(self._float_tensor)
if incremental_state is not None:
# positions is the same for every token when decoding a single step
return self.weights[self.padding_idx + seq_len, :].expand(bsz, 1, -1)
positions = utils.make_positions(input.data, self.padding_idx, self.left_pad)
return self.weights.index_select(0, positions.view(-1)).view(bsz, seq_len, -1).detach()
def max_positions(self):
"""Maximum number of supported positions."""
return int(1e5) # an arbitrary large number
| {
"content_hash": "535f630cf458b264b12597429b10ba20",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 95,
"avg_line_length": 40.01449275362319,
"alnum_prop": 0.6247736327417602,
"repo_name": "mlperf/training_results_v0.6",
"id": "4b000e892eef4a94a3faa4477a9a878b3eca8f41",
"size": "3047",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "NVIDIA/benchmarks/transformer/implementations/pytorch/fairseq/modules/sinusoidal_positional_embedding.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Batchfile",
"bytes": "13941"
},
{
"name": "C",
"bytes": "208630"
},
{
"name": "C++",
"bytes": "10999411"
},
{
"name": "CMake",
"bytes": "129712"
},
{
"name": "CSS",
"bytes": "64767"
},
{
"name": "Clojure",
"bytes": "396764"
},
{
"name": "Cuda",
"bytes": "2272433"
},
{
"name": "Dockerfile",
"bytes": "67820"
},
{
"name": "Groovy",
"bytes": "62557"
},
{
"name": "HTML",
"bytes": "19753082"
},
{
"name": "Java",
"bytes": "166294"
},
{
"name": "JavaScript",
"bytes": "71846"
},
{
"name": "Julia",
"bytes": "408765"
},
{
"name": "Jupyter Notebook",
"bytes": "2713169"
},
{
"name": "Lua",
"bytes": "4430"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "115694"
},
{
"name": "Perl",
"bytes": "1535873"
},
{
"name": "Perl 6",
"bytes": "7280"
},
{
"name": "PowerShell",
"bytes": "6150"
},
{
"name": "Python",
"bytes": "24905683"
},
{
"name": "R",
"bytes": "351865"
},
{
"name": "Roff",
"bytes": "293052"
},
{
"name": "Scala",
"bytes": "1189019"
},
{
"name": "Shell",
"bytes": "794096"
},
{
"name": "Smalltalk",
"bytes": "3497"
},
{
"name": "TypeScript",
"bytes": "361164"
}
],
"symlink_target": ""
} |
"""
http://developer.openstack.org/api-ref-identity-v3.html#credentials-v3
"""
from oslo_serialization import jsonutils as json
from tempest.lib.common import rest_client
class CredentialsClient(rest_client.RestClient):
api_version = "v3"
def create_credential(self, **kwargs):
"""Creates a credential.
Available params: see http://developer.openstack.org/
api-ref-identity-v3.html#createCredential
"""
post_body = json.dumps({'credential': kwargs})
resp, body = self.post('credentials', post_body)
self.expected_success(201, resp.status)
body = json.loads(body)
body['credential']['blob'] = json.loads(body['credential']['blob'])
return rest_client.ResponseBody(resp, body)
def update_credential(self, credential_id, **kwargs):
"""Updates a credential.
Available params: see http://developer.openstack.org/
api-ref-identity-v3.html#updateCredential
"""
post_body = json.dumps({'credential': kwargs})
resp, body = self.patch('credentials/%s' % credential_id, post_body)
self.expected_success(200, resp.status)
body = json.loads(body)
body['credential']['blob'] = json.loads(body['credential']['blob'])
return rest_client.ResponseBody(resp, body)
def show_credential(self, credential_id):
"""To GET Details of a credential."""
resp, body = self.get('credentials/%s' % credential_id)
self.expected_success(200, resp.status)
body = json.loads(body)
body['credential']['blob'] = json.loads(body['credential']['blob'])
return rest_client.ResponseBody(resp, body)
def list_credentials(self):
"""Lists out all the available credentials."""
resp, body = self.get('credentials')
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def delete_credential(self, credential_id):
"""Deletes a credential."""
resp, body = self.delete('credentials/%s' % credential_id)
self.expected_success(204, resp.status)
return rest_client.ResponseBody(resp, body)
| {
"content_hash": "9315fc52fcf5f8ca3c7eadd12557c7cc",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 76,
"avg_line_length": 38.827586206896555,
"alnum_prop": 0.6309946714031972,
"repo_name": "LIS/lis-tempest",
"id": "6ab94d049596ff384fd39aa1348787f99001d028",
"size": "2888",
"binary": false,
"copies": "6",
"ref": "refs/heads/LIS",
"path": "tempest/services/identity/v3/json/credentials_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3607367"
},
{
"name": "Shell",
"bytes": "50010"
}
],
"symlink_target": ""
} |
"""
Shell utility functions which use non-blocking and eventlet / gevent friendly code.
"""
from __future__ import absolute_import
import os
import subprocess
import six
from st2common import log as logging
from st2common.util import concurrency
__all__ = ["run_command"]
TIMEOUT_EXIT_CODE = -9
LOG = logging.getLogger(__name__)
def run_command(
cmd,
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
cwd=None,
env=None,
timeout=60,
preexec_func=None,
kill_func=None,
read_stdout_func=None,
read_stderr_func=None,
read_stdout_buffer=None,
read_stderr_buffer=None,
stdin_value=None,
bufsize=0,
):
"""
Run the provided command in a subprocess and wait until it completes.
:param cmd: Command to run.
:type cmd: ``str`` or ``list``
:param stdin: Process stdin.
:type stdin: ``object``
:param stdout: Process stdout.
:type stdout: ``object``
:param stderr: Process stderr.
:type stderr: ``object``
:param shell: True to use a shell.
:type shell ``boolean``
:param cwd: Optional working directory.
:type cwd: ``str``
:param env: Optional environment to use with the command. If not provided,
environment from the current process is inherited.
:type env: ``dict``
:param timeout: How long to wait before timing out.
:type timeout: ``float``
:param preexec_func: Optional pre-exec function.
:type preexec_func: ``callable``
:param kill_func: Optional function which will be called on timeout to kill the process.
If not provided, it defaults to `process.kill`
NOTE: If you are utilizing shell=True, you shoulf always specify a custom
kill function which correctly kills shell process + the shell children
process.
If you don't do that, timeout handler won't work correctly / as expected -
only the shell process will be killed, but not also the child processs
spawned by the shell.
:type kill_func: ``callable``
:param read_stdout_func: Function which is responsible for reading process stdout when
using live read mode.
:type read_stdout_func: ``func``
:param read_stdout_func: Function which is responsible for reading process stderr when
using live read mode.
:type read_stdout_func: ``func``
:param bufsize: Buffer size argument to pass to subprocess.popen function.
:type bufsize: ``int``
:rtype: ``tuple`` (exit_code, stdout, stderr, timed_out)
"""
LOG.debug("Entering st2common.util.green.run_command.")
if not isinstance(cmd, (list, tuple) + six.string_types):
raise TypeError(
f"Command must be a type of list, tuple, or string, not '{type(cmd)}'."
)
if (read_stdout_func and not read_stderr_func) or (
read_stderr_func and not read_stdout_func
):
raise ValueError(
"Both read_stdout_func and read_stderr_func arguments need "
"to be provided."
)
if read_stdout_func and not (read_stdout_buffer or read_stderr_buffer):
raise ValueError(
"read_stdout_buffer and read_stderr_buffer arguments need to be provided "
"when read_stdout_func is provided"
)
if not env:
LOG.debug("env argument not provided. using process env (os.environ).")
env = os.environ.copy()
subprocess = concurrency.get_subprocess_module()
# Note: We are using eventlet / gevent friendly implementation of subprocess which uses
# GreenPipe so it doesn't block
LOG.debug("Creating subprocess.")
process = concurrency.subprocess_popen(
args=cmd,
stdin=stdin,
stdout=stdout,
stderr=stderr,
env=env,
cwd=cwd,
shell=shell,
preexec_fn=preexec_func,
bufsize=bufsize,
)
if read_stdout_func:
LOG.debug("Spawning read_stdout_func function")
read_stdout_thread = concurrency.spawn(
read_stdout_func, process.stdout, read_stdout_buffer
)
if read_stderr_func:
LOG.debug("Spawning read_stderr_func function")
read_stderr_thread = concurrency.spawn(
read_stderr_func, process.stderr, read_stderr_buffer
)
# Special attribute we use to determine if the process timed out or not
process._timed_out = False
# TODO: Now that we support Python >= 3.6 we should utilize timeout argument for the
# communicate() method and handle killing the process + read threads there.
def on_timeout_expired(timeout):
global timed_out
try:
LOG.debug("Starting process wait inside timeout handler.")
process.wait(timeout=timeout)
except subprocess.TimeoutExpired:
# Command has timed out, kill the process and propagate the error.
# Note: We explicitly set the returncode to indicate the timeout.
LOG.debug("Command execution timeout reached.")
process._timed_out = True
if kill_func:
LOG.debug("Calling kill_func.")
kill_func(process=process)
else:
LOG.debug("Killing process.")
process.kill()
process.wait()
process._timed_out = True
if read_stdout_func and read_stderr_func:
LOG.debug("Killing read_stdout_thread and read_stderr_thread")
concurrency.kill(read_stdout_thread)
concurrency.kill(read_stderr_thread)
LOG.debug("Spawning timeout handler thread.")
timeout_thread = concurrency.spawn(on_timeout_expired, timeout)
LOG.debug("Attaching to process.")
if stdin_value:
if six.PY3:
stdin_value = stdin_value.encode("utf-8")
process.stdin.write(stdin_value)
if read_stdout_func and read_stderr_func:
LOG.debug("Using real-time stdout and stderr read mode, calling process.wait()")
process.wait()
else:
LOG.debug(
"Using delayed stdout and stderr read mode, calling process.communicate()"
)
stdout, stderr = process.communicate()
concurrency.cancel(timeout_thread)
if getattr(process, "_timed_out", False):
exit_code = TIMEOUT_EXIT_CODE
else:
exit_code = process.returncode
if read_stdout_func and read_stderr_func:
# Wait on those green threads to finish reading from stdout and stderr before continuing
concurrency.wait(read_stdout_thread)
concurrency.wait(read_stderr_thread)
stdout = read_stdout_buffer.getvalue()
stderr = read_stderr_buffer.getvalue()
if exit_code == TIMEOUT_EXIT_CODE:
LOG.debug("Timeout.")
timed_out = True
else:
LOG.debug("No timeout.")
timed_out = False
LOG.debug("Returning.")
return (exit_code, stdout, stderr, timed_out)
| {
"content_hash": "5510adcd7826ca53019f861ee3c28d53",
"timestamp": "",
"source": "github",
"line_count": 225,
"max_line_length": 96,
"avg_line_length": 31.662222222222223,
"alnum_prop": 0.623385738349242,
"repo_name": "nzlosh/st2",
"id": "64830c9423c25c5e10f3ab13852b736caee2b6ef",
"size": "7752",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "st2common/st2common/util/green/shell.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "JavaScript",
"bytes": "444"
},
{
"name": "Jinja",
"bytes": "174532"
},
{
"name": "Makefile",
"bytes": "75242"
},
{
"name": "PowerShell",
"bytes": "856"
},
{
"name": "Python",
"bytes": "6453910"
},
{
"name": "Shell",
"bytes": "93607"
},
{
"name": "Starlark",
"bytes": "7236"
}
],
"symlink_target": ""
} |
import six
from django.db import models, transaction
from django.db.models import F, Max, Min
from django.contrib.comments.models import Comment
from django.contrib.contenttypes.models import ContentType
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext, ugettext_lazy as _
from django_comments_xtd.conf import settings
def max_thread_level_for_content_type(content_type):
app_model = "%s.%s" % (content_type.app_label, content_type.model)
if app_model in settings.COMMENTS_XTD_MAX_THREAD_LEVEL_BY_APP_MODEL:
return settings.COMMENTS_XTD_MAX_THREAD_LEVEL_BY_APP_MODEL[app_model]
else:
return settings.COMMENTS_XTD_MAX_THREAD_LEVEL
class MaxThreadLevelExceededException(Exception):
def __init__(self, content_type=None):
self.max_by_app = max_thread_level_for_content_type(content_type)
def __str__(self):
return ugettext("Can not post comments over the thread level %{max_thread_level}") % {"max_thread_level": self.max_by_app}
class XtdCommentManager(models.Manager):
def for_app_models(self, *args):
"""Return XtdComments for pairs "app.model" given in args"""
content_types = []
for app_model in args:
app, model = app_model.split(".")
content_types.append(ContentType.objects.get(app_label=app,
model=model))
return self.for_content_types(content_types)
def for_content_types(self, content_types):
qs = self.get_query_set().filter(content_type__in=content_types).reverse()
return qs
class XtdComment(Comment):
thread_id = models.IntegerField(default=0, db_index=True)
parent_id = models.IntegerField(default=0)
level = models.SmallIntegerField(default=0)
order = models.IntegerField(default=1, db_index=True)
followup = models.BooleanField(help_text=_("Receive by email further comments in this conversation"), blank=True, default=False)
objects = XtdCommentManager()
class Meta:
ordering = settings.COMMENTS_XTD_LIST_ORDER
def save(self, *args, **kwargs):
is_new = self.pk == None
super(Comment, self).save(*args, **kwargs)
if is_new:
if not self.parent_id:
self.parent_id = self.id
self.thread_id = self.id
else:
if max_thread_level_for_content_type(self.content_type):
with transaction.commit_on_success():
self._calculate_thread_data()
else:
raise MaxThreadLevelExceededException(self.content_type)
kwargs["force_insert"] = False
super(Comment, self).save(*args, **kwargs)
def _calculate_thread_data(self):
# Implements the following approach:
# http://www.sqlteam.com/article/sql-for-threaded-discussion-forums
parent = XtdComment.objects.get(pk=self.parent_id)
if parent.level == max_thread_level_for_content_type(self.content_type):
raise MaxThreadLevelExceededException(self.content_type)
self.thread_id = parent.thread_id
self.level = parent.level + 1
qc_eq_thread = XtdComment.objects.filter(thread_id = parent.thread_id)
qc_ge_level = qc_eq_thread.filter(level__lte = parent.level,
order__gt = parent.order)
if qc_ge_level.count():
min_order = qc_ge_level.aggregate(Min('order'))['order__min']
XtdComment.objects.filter(thread_id = parent.thread_id,
order__gte = min_order).update(order=F('order')+1)
self.order = min_order
else:
max_order = qc_eq_thread.aggregate(Max('order'))['order__max']
self.order = max_order + 1
@models.permalink
def get_reply_url(self):
return ("comments-xtd-reply", None, {"cid": self.pk})
def allow_thread(self):
if self.level < max_thread_level_for_content_type(self.content_type):
return True
else:
return False
class DummyDefaultManager:
"""
Dummy Manager to mock django's CommentForm.check_for_duplicate method.
"""
def __getattr__(self, name):
return lambda *args, **kwargs: []
def using(self, *args, **kwargs):
return self
class TmpXtdComment(dict):
"""
Temporary XtdComment to be pickled, ziped and appended to a URL.
"""
_default_manager = DummyDefaultManager()
def __getattr__(self, key):
try:
return self[key]
except KeyError:
return None
def __setattr__(self, key, value):
self[key] = value
def save(self, *args, **kwargs):
pass
def _get_pk_val(self):
if self.xtd_comment:
return self.xtd_comment._get_pk_val()
else:
return ""
def __reduce__(self):
return (TmpXtdComment, (), None, None, six.iteritems(self))
| {
"content_hash": "c7fabb4b897bce62584d22ece97bc949",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 132,
"avg_line_length": 36.737226277372265,
"alnum_prop": 0.6171269620504669,
"repo_name": "CantemoInternal/django-comments-xtd",
"id": "02ab19bf26c879ecfc5ea41cb55765cf6598cc3f",
"size": "5033",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_comments_xtd/models.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [],
"symlink_target": ""
} |
import datetime
from keystoneclient import access
from keystoneclient.openstack.common import timeutils
from keystoneclient.tests import client_fixtures as token_data
from keystoneclient.tests.v2_0 import client_fixtures
from keystoneclient.tests.v2_0 import utils
UNSCOPED_TOKEN = client_fixtures.UNSCOPED_TOKEN
PROJECT_SCOPED_TOKEN = client_fixtures.PROJECT_SCOPED_TOKEN
DIABLO_TOKEN = token_data.TOKEN_RESPONSES[token_data.VALID_DIABLO_TOKEN]
GRIZZLY_TOKEN = token_data.TOKEN_RESPONSES[token_data.SIGNED_TOKEN_SCOPED_KEY]
class AccessInfoTest(utils.TestCase):
def test_building_unscoped_accessinfo(self):
auth_ref = access.AccessInfo.factory(body=UNSCOPED_TOKEN)
self.assertTrue(auth_ref)
self.assertIn('token', auth_ref)
self.assertIn('serviceCatalog', auth_ref)
self.assertFalse(auth_ref['serviceCatalog'])
self.assertEqual(auth_ref.auth_token,
'3e2813b7ba0b4006840c3825860b86ed')
self.assertEqual(auth_ref.username, 'exampleuser')
self.assertEqual(auth_ref.user_id, 'c4da488862bd435c9e6c0275a0d0e49a')
self.assertEqual(auth_ref.tenant_name, None)
self.assertEqual(auth_ref.tenant_id, None)
self.assertEqual(auth_ref.auth_url, None)
self.assertEqual(auth_ref.management_url, None)
self.assertFalse(auth_ref.scoped)
self.assertFalse(auth_ref.domain_scoped)
self.assertFalse(auth_ref.project_scoped)
self.assertFalse(auth_ref.trust_scoped)
self.assertIsNone(auth_ref.project_domain_id)
self.assertIsNone(auth_ref.project_domain_name)
self.assertEqual(auth_ref.user_domain_id, 'default')
self.assertEqual(auth_ref.user_domain_name, 'Default')
self.assertEqual(auth_ref.expires, timeutils.parse_isotime(
UNSCOPED_TOKEN['access']['token']['expires']))
def test_will_expire_soon(self):
expires = timeutils.utcnow() + datetime.timedelta(minutes=5)
UNSCOPED_TOKEN['access']['token']['expires'] = expires.isoformat()
auth_ref = access.AccessInfo.factory(body=UNSCOPED_TOKEN)
self.assertFalse(auth_ref.will_expire_soon(stale_duration=120))
self.assertTrue(auth_ref.will_expire_soon(stale_duration=300))
self.assertFalse(auth_ref.will_expire_soon())
def test_building_scoped_accessinfo(self):
auth_ref = access.AccessInfo.factory(body=PROJECT_SCOPED_TOKEN)
self.assertTrue(auth_ref)
self.assertIn('token', auth_ref)
self.assertIn('serviceCatalog', auth_ref)
self.assertTrue(auth_ref['serviceCatalog'])
self.assertEqual(auth_ref.auth_token,
'04c7d5ffaeef485f9dc69c06db285bdb')
self.assertEqual(auth_ref.username, 'exampleuser')
self.assertEqual(auth_ref.user_id, 'c4da488862bd435c9e6c0275a0d0e49a')
self.assertEqual(auth_ref.tenant_name, 'exampleproject')
self.assertEqual(auth_ref.tenant_id,
'225da22d3ce34b15877ea70b2a575f58')
self.assertEqual(auth_ref.tenant_name, auth_ref.project_name)
self.assertEqual(auth_ref.tenant_id, auth_ref.project_id)
self.assertEqual(auth_ref.auth_url, ('http://public.com:5000/v2.0',))
self.assertEqual(auth_ref.management_url, ('http://admin:35357/v2.0',))
self.assertEqual(auth_ref.project_domain_id, 'default')
self.assertEqual(auth_ref.project_domain_name, 'Default')
self.assertEqual(auth_ref.user_domain_id, 'default')
self.assertEqual(auth_ref.user_domain_name, 'Default')
self.assertTrue(auth_ref.scoped)
self.assertTrue(auth_ref.project_scoped)
self.assertFalse(auth_ref.domain_scoped)
def test_diablo_token(self):
auth_ref = access.AccessInfo.factory(body=DIABLO_TOKEN)
self.assertTrue(auth_ref)
self.assertEqual(auth_ref.username, 'user_name1')
self.assertEqual(auth_ref.project_id, 'tenant_id1')
self.assertEqual(auth_ref.project_name, 'tenant_id1')
self.assertEqual(auth_ref.project_domain_id, 'default')
self.assertEqual(auth_ref.project_domain_name, 'Default')
self.assertEqual(auth_ref.user_domain_id, 'default')
self.assertEqual(auth_ref.user_domain_name, 'Default')
self.assertFalse(auth_ref.scoped)
def test_grizzly_token(self):
auth_ref = access.AccessInfo.factory(body=GRIZZLY_TOKEN)
self.assertEqual(auth_ref.project_id, 'tenant_id1')
self.assertEqual(auth_ref.project_name, 'tenant_name1')
self.assertEqual(auth_ref.project_domain_id, 'default')
self.assertEqual(auth_ref.project_domain_name, 'Default')
self.assertEqual(auth_ref.user_domain_id, 'default')
self.assertEqual(auth_ref.user_domain_name, 'Default')
| {
"content_hash": "16612d4d23fa842051d428b6b62ed8b6",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 79,
"avg_line_length": 44.26605504587156,
"alnum_prop": 0.6905699481865285,
"repo_name": "ntt-sic/python-keystoneclient",
"id": "e097120e8220b1ccb5ecf52fb80c9505d90ef1de",
"size": "5443",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "keystoneclient/tests/v2_0/test_access.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "661153"
},
{
"name": "Shell",
"bytes": "11342"
}
],
"symlink_target": ""
} |
"""Contains functionality to use flic buttons as a binary sensor."""
import asyncio
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
CONF_HOST, CONF_PORT, CONF_DISCOVERY, CONF_TIMEOUT,
EVENT_HOMEASSISTANT_STOP)
from homeassistant.components.binary_sensor import (
BinarySensorDevice, PLATFORM_SCHEMA)
from homeassistant.util.async import run_callback_threadsafe
REQUIREMENTS = ['https://github.com/soldag/pyflic/archive/0.4.zip#pyflic==0.4']
_LOGGER = logging.getLogger(__name__)
DEFAULT_TIMEOUT = 3
CLICK_TYPE_SINGLE = "single"
CLICK_TYPE_DOUBLE = "double"
CLICK_TYPE_HOLD = "hold"
CLICK_TYPES = [CLICK_TYPE_SINGLE, CLICK_TYPE_DOUBLE, CLICK_TYPE_HOLD]
CONF_IGNORED_CLICK_TYPES = "ignored_click_types"
EVENT_NAME = "flic_click"
EVENT_DATA_NAME = "button_name"
EVENT_DATA_ADDRESS = "button_address"
EVENT_DATA_TYPE = "click_type"
EVENT_DATA_QUEUED_TIME = "queued_time"
# Validation of the user's configuration
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST, default='localhost'): cv.string,
vol.Optional(CONF_PORT, default=5551): cv.port,
vol.Optional(CONF_DISCOVERY, default=True): cv.boolean,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_IGNORED_CLICK_TYPES): vol.All(cv.ensure_list,
[vol.In(CLICK_TYPES)])
})
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Setup the flic platform."""
import pyflic
# Initialize flic client responsible for
# connecting to buttons and retrieving events
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
discovery = config.get(CONF_DISCOVERY)
try:
client = pyflic.FlicClient(host, port)
except ConnectionRefusedError:
_LOGGER.error("Failed to connect to flic server.")
return
def new_button_callback(address):
"""Setup newly verified button as device in home assistant."""
hass.add_job(async_setup_button(hass, config, async_add_entities,
client, address))
client.on_new_verified_button = new_button_callback
if discovery:
start_scanning(hass, config, async_add_entities, client)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP,
lambda event: client.close())
hass.loop.run_in_executor(None, client.handle_events)
# Get addresses of already verified buttons
addresses = yield from async_get_verified_addresses(client)
if addresses:
for address in addresses:
yield from async_setup_button(hass, config, async_add_entities,
client, address)
def start_scanning(hass, config, async_add_entities, client):
"""Start a new flic client for scanning & connceting to new buttons."""
import pyflic
scan_wizard = pyflic.ScanWizard()
def scan_completed_callback(scan_wizard, result, address, name):
"""Restart scan wizard to constantly check for new buttons."""
if result == pyflic.ScanWizardResult.WizardSuccess:
_LOGGER.info("Found new button (%s)", address)
elif result != pyflic.ScanWizardResult.WizardFailedTimeout:
_LOGGER.warning("Failed to connect to button (%s). Reason: %s",
address, result)
# Restart scan wizard
start_scanning(hass, config, async_add_entities, client)
scan_wizard.on_completed = scan_completed_callback
client.add_scan_wizard(scan_wizard)
@asyncio.coroutine
def async_setup_button(hass, config, async_add_entities, client, address):
"""Setup single button device."""
timeout = config.get(CONF_TIMEOUT)
ignored_click_types = config.get(CONF_IGNORED_CLICK_TYPES)
button = FlicButton(hass, client, address, timeout, ignored_click_types)
_LOGGER.info("Connected to button (%s)", address)
yield from async_add_entities([button])
@asyncio.coroutine
def async_get_verified_addresses(client):
"""Retrieve addresses of verified buttons."""
future = asyncio.Future()
loop = asyncio.get_event_loop()
def get_info_callback(items):
"""Set the addressed of connected buttons as result of the future."""
addresses = items["bd_addr_of_verified_buttons"]
run_callback_threadsafe(loop, future.set_result, addresses)
client.get_info(get_info_callback)
return future
class FlicButton(BinarySensorDevice):
"""Representation of a flic button."""
def __init__(self, hass, client, address, timeout, ignored_click_types):
"""Initialize the flic button."""
import pyflic
self._hass = hass
self._address = address
self._timeout = timeout
self._is_down = False
self._ignored_click_types = ignored_click_types or []
self._hass_click_types = {
pyflic.ClickType.ButtonClick: CLICK_TYPE_SINGLE,
pyflic.ClickType.ButtonSingleClick: CLICK_TYPE_SINGLE,
pyflic.ClickType.ButtonDoubleClick: CLICK_TYPE_DOUBLE,
pyflic.ClickType.ButtonHold: CLICK_TYPE_HOLD,
}
self._channel = self._create_channel()
client.add_connection_channel(self._channel)
def _create_channel(self):
"""Create a new connection channel to the button."""
import pyflic
channel = pyflic.ButtonConnectionChannel(self._address)
channel.on_button_up_or_down = self._on_up_down
# If all types of clicks should be ignored, skip registering callbacks
if set(self._ignored_click_types) == set(CLICK_TYPES):
return channel
if CLICK_TYPE_DOUBLE in self._ignored_click_types:
# Listen to all but double click type events
channel.on_button_click_or_hold = self._on_click
elif CLICK_TYPE_HOLD in self._ignored_click_types:
# Listen to all but hold click type events
channel.on_button_single_or_double_click = self._on_click
else:
# Listen to all click type events
channel.on_button_single_or_double_click_or_hold = self._on_click
return channel
@property
def name(self):
"""Return the name of the device."""
return "flic_%s" % self.address.replace(":", "")
@property
def address(self):
"""Return the bluetooth address of the device."""
return self._address
@property
def is_on(self):
"""Return true if sensor is on."""
return self._is_down
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def state_attributes(self):
"""Return device specific state attributes."""
attr = super(FlicButton, self).state_attributes
attr["address"] = self.address
return attr
def _queued_event_check(self, click_type, time_diff):
"""Generate a log message and returns true if timeout exceeded."""
time_string = "{:d} {}".format(
time_diff, "second" if time_diff == 1 else "seconds")
if time_diff > self._timeout:
_LOGGER.warning(
"Queued %s dropped for %s. Time in queue was %s.",
click_type, self.address, time_string)
return True
else:
_LOGGER.info(
"Queued %s allowed for %s. Time in queue was %s.",
click_type, self.address, time_string)
return False
def _on_up_down(self, channel, click_type, was_queued, time_diff):
"""Update device state, if event was not queued."""
import pyflic
if was_queued and self._queued_event_check(click_type, time_diff):
return
self._is_down = click_type == pyflic.ClickType.ButtonDown
self.schedule_update_ha_state()
def _on_click(self, channel, click_type, was_queued, time_diff):
"""Fire click event, if event was not queued."""
# Return if click event was queued beyond allowed timeout
if was_queued and self._queued_event_check(click_type, time_diff):
return
# Return if click event is in ignored click types
hass_click_type = self._hass_click_types[click_type]
if hass_click_type in self._ignored_click_types:
return
self._hass.bus.fire(EVENT_NAME, {
EVENT_DATA_NAME: self.name,
EVENT_DATA_ADDRESS: self.address,
EVENT_DATA_QUEUED_TIME: time_diff,
EVENT_DATA_TYPE: hass_click_type
})
def _connection_status_changed(self, channel,
connection_status, disconnect_reason):
"""Remove device, if button disconnects."""
import pyflic
if connection_status == pyflic.ConnectionStatus.Disconnected:
_LOGGER.info("Button (%s) disconnected. Reason: %s",
self.address, disconnect_reason)
self.remove()
| {
"content_hash": "f5f9bae456dbf42e5d49c0cdd8f06593",
"timestamp": "",
"source": "github",
"line_count": 257,
"max_line_length": 79,
"avg_line_length": 35.53307392996109,
"alnum_prop": 0.6371003066141042,
"repo_name": "ma314smith/home-assistant",
"id": "980af069f38dbb944d0478f1454d1eaa3c321181",
"size": "9132",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/binary_sensor/flic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1436909"
},
{
"name": "Python",
"bytes": "4511947"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "4460"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class NameValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(self, plotly_name="name", parent_name="layout.mapbox.layer", **kwargs):
super(NameValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
role=kwargs.pop("role", "style"),
**kwargs
)
| {
"content_hash": "809cf80127b89c32e551080065c29231",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 88,
"avg_line_length": 37.083333333333336,
"alnum_prop": 0.6067415730337079,
"repo_name": "plotly/python-api",
"id": "d3b3e2d971a17c8b92fb2e68bcc9e5a0c8e471ea",
"size": "445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/layout/mapbox/layer/_name.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
} |
from morse.builder import *
# Land robot
ATRV = Robot('atrv')
Pose = Sensor('pose')
Pose.translate(x=-0.2000, z=0.9000)
ATRV.append(Pose)
Camera = Sensor('video_camera')
Camera.translate(x=0.2000, z=0.9000)
ATRV.append(Camera)
Motion_Controller = Actuator('waypoint')
ATRV.append(Motion_Controller)
# Scene configuration
Motion_Controller.configure_mw('yarp')
Pose.configure_mw('yarp')
Camera.configure_mw('yarp')
env = Environment('indoors-1/indoor-1')
env.aim_camera([1.0470, 0, 0.7854])
| {
"content_hash": "d8a727f7b9b4ed2ca294a505f9515585",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 40,
"avg_line_length": 20.708333333333332,
"alnum_prop": 0.7283702213279678,
"repo_name": "Arkapravo/morse-0.6",
"id": "ffa7a1c06ffe841d4d3813eab788492f91412c6c",
"size": "497",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/tutorials/tutorial-2-yarp.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "46148"
},
{
"name": "C++",
"bytes": "30878"
},
{
"name": "Perl",
"bytes": "1705"
},
{
"name": "Python",
"bytes": "1117700"
},
{
"name": "Shell",
"bytes": "684"
}
],
"symlink_target": ""
} |
"""
hyper/http20/bufsocket.py
~~~~~~~~~~~~~~~~~~~~~~~~~
This file implements a buffered socket wrapper.
The purpose of this is to avoid the overhead of unnecessary syscalls while
allowing small reads from the network. This represents a potentially massive
performance optimisation at the cost of burning some memory in the userspace
process.
"""
import select
from .exceptions import ConnectionResetError, LineTooLongError
# import logging
# logger = logging.getLogger()
# logger.setLevel(logging.DEBUG)
class WriteBuffer(object):
def __init__(self, s=None):
if isinstance(s, str):
self.string_len = len(s)
self.buffer_list = [s]
else:
self.reset()
def reset(self):
self.buffer_list = []
self.string_len = 0
def __len__(self):
return self.string_len
def __add__(self, other):
self.append(other)
return self
def insert(self, s):
if isinstance(s, WriteBuffer):
self.buffer_list = s.buffer_list + self.buffer_list
self.string_len += s.string_len
elif isinstance(s, str):
self.buffer_list.insert(0, s)
self.string_len += len(s)
else:
raise Exception("WriteBuffer append not string or StringBuffer")
def append(self, s):
if isinstance(s, WriteBuffer):
self.buffer_list.extend(s.buffer_list)
self.string_len += s.string_len
elif isinstance(s, str):
self.buffer_list.append(s)
self.string_len += len(s)
else:
raise Exception("WriteBuffer append not string or StringBuffer")
def __str__(self):
return self.get_string()
def get_string(self):
return "".join(self.buffer_list)
class BufferedSocket(object):
"""
A buffered socket wrapper.
The purpose of this is to avoid the overhead of unnecessary syscalls while
allowing small reads from the network. This represents a potentially
massive performance optimisation at the cost of burning some memory in the
userspace process.
"""
def __init__(self, sck, buffer_size=1000):
"""
Create the buffered socket.
:param sck: The socket to wrap.
:param buffer_size: The size of the backing buffer in bytes. This
parameter should be set to an appropriate value for your use case.
Small values of ``buffer_size`` increase the overhead of buffer
management: large values cause more memory to be used.
"""
# The wrapped socket.
self._sck = sck
# The buffer we're using.
self._backing_buffer = bytearray(buffer_size)
self._buffer_view = memoryview(self._backing_buffer)
# The size of the buffer.
self._buffer_size = buffer_size
# The start index in the memory view.
self._index = 0
# The number of bytes in the buffer.
self._bytes_in_buffer = 0
# following is define for send buffer
# all send will be cache and send when flush called,
# combine data to reduce the api call
self.send_buffer = WriteBuffer()
def send(self, buf, flush=True):
self.send_buffer.append(buf)
if len(self.send_buffer) > 1300 or flush:
self.flush()
def flush(self):
if len(self.send_buffer):
data = self.send_buffer.get_string()
# logger.debug("buffer socket flush:%d", len(data))
self.send_buffer.reset()
data_len = len(data)
start = 0
while start < data_len:
send_size = min(data_len - start, 65535)
sended = self._sck.send(data[start:start+send_size])
start += sended
@property
def _remaining_capacity(self):
"""
The maximum number of bytes the buffer could still contain.
"""
return self._buffer_size - self._index
@property
def _buffer_end(self):
"""
The index of the first free byte in the buffer.
"""
return self._index + self._bytes_in_buffer
@property
def can_read(self):
"""
Whether or not there is more data to read from the socket.
"""
if self._bytes_in_buffer:
return True
read = select.select([self._sck], [], [], 0)[0]
if read:
return True
return False
@property
def buffer(self):
"""
Get access to the buffer itself.
"""
return self._buffer_view[self._index:self._buffer_end]
def advance_buffer(self, count):
"""
Advances the buffer by the amount of data consumed outside the socket.
"""
self._index += count
self._bytes_in_buffer -= count
def new_buffer(self):
"""
This method moves all the data in the backing buffer to the start of
a new, fresh buffer. This gives the ability to read much more data.
"""
def read_all_from_buffer():
end = self._index + self._bytes_in_buffer
return self._buffer_view[self._index:end]
new_buffer = bytearray(self._buffer_size)
new_buffer_view = memoryview(new_buffer)
new_buffer_view[0:self._bytes_in_buffer] = read_all_from_buffer()
self._index = 0
self._backing_buffer = new_buffer
self._buffer_view = new_buffer_view
return
def recv(self, amt):
"""
Read some data from the socket.
:param amt: The amount of data to read.
:returns: A ``memoryview`` object containing the appropriate number of
bytes. The data *must* be copied out by the caller before the next
call to this function.
"""
# In this implementation you can never read more than the number of
# bytes in the buffer.
if amt > self._buffer_size:
amt = self._buffer_size
# If the amount of data we've been asked to read is less than the
# remaining space in the buffer, we need to clear out the buffer and
# start over.
if amt > self._remaining_capacity:
self.new_buffer()
# If there's still some room in the buffer, opportunistically attempt
# to read into it.
# If we don't actually _need_ the data (i.e. there's enough in the
# buffer to satisfy the request), use select to work out if the read
# attempt will block. If it will, don't bother reading. If we need the
# data, always do the read.
if self._bytes_in_buffer >= amt:
should_read = select.select([self._sck], [], [], 0)[0]
else:
should_read = True
if ((self._remaining_capacity > self._bytes_in_buffer) and (should_read)):
count = self._sck.recv_into(self._buffer_view[self._buffer_end:])
# The socket just got closed. We should throw an exception if we
# were asked for more data than we can return.
if not count and amt > self._bytes_in_buffer:
raise ConnectionResetError()
self._bytes_in_buffer += count
# Read out the bytes and update the index.
amt = min(amt, self._bytes_in_buffer)
data = self._buffer_view[self._index:self._index+amt]
self._index += amt
self._bytes_in_buffer -= amt
return data
def fill(self):
"""
Attempts to fill the buffer as much as possible. It will block for at
most the time required to have *one* ``recv_into`` call return.
"""
if not self._remaining_capacity:
self.new_buffer()
count = self._sck.recv_into(self._buffer_view[self._buffer_end:])
if not count:
raise ConnectionResetError()
self._bytes_in_buffer += count
return
def readline(self):
"""
Read up to a newline from the network and returns it. The implicit
maximum line length is the buffer size of the buffered socket.
Note that, unlike recv, this method absolutely *does* block until it
can read the line.
:returns: A ``memoryview`` object containing the appropriate number of
bytes. The data *must* be copied out by the caller before the next
call to this function.
"""
# First, check if there's anything in the buffer. This is one of those
# rare circumstances where this will work correctly on all platforms.
index = self._backing_buffer.find(
b'\n',
self._index,
self._index + self._bytes_in_buffer
)
if index != -1:
length = index + 1 - self._index
data = self._buffer_view[self._index:self._index+length]
self._index += length
self._bytes_in_buffer -= length
return data
# In this case, we didn't find a newline in the buffer. To fix that,
# read some data into the buffer. To do our best to satisfy the read,
# we should shunt the data down in the buffer so that it's right at
# the start. We don't bother if we're already at the start of the
# buffer.
if self._index != 0:
self.new_buffer()
while self._bytes_in_buffer < self._buffer_size:
count = self._sck.recv_into(self._buffer_view[self._buffer_end:])
if not count:
raise ConnectionResetError()
# We have some more data. Again, look for a newline in that gap.
first_new_byte = self._buffer_end
self._bytes_in_buffer += count
index = self._backing_buffer.find(
b'\n',
first_new_byte,
first_new_byte + count,
)
if index != -1:
# The length of the buffer is the index into the
# buffer at which we found the newline plus 1, minus the start
# index of the buffer, which really should be zero.
assert not self._index
length = index + 1
data = self._buffer_view[:length]
self._index += length
self._bytes_in_buffer -= length
return data
# If we got here, it means we filled the buffer without ever getting
# a newline. Time to throw an exception.
raise LineTooLongError()
def __getattr__(self, name):
return getattr(self._sck, name)
| {
"content_hash": "8f35784e2fd581bca39ddf89602dfc26",
"timestamp": "",
"source": "github",
"line_count": 316,
"max_line_length": 82,
"avg_line_length": 33.50316455696203,
"alnum_prop": 0.5800510059506943,
"repo_name": "qqzwc/XX-Net",
"id": "62c79cf58c94b9ac5237b181365867ed50f9033f",
"size": "10611",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "code/default/python27/1.0/lib/noarch/hyper/common/bufsocket.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3884"
},
{
"name": "C",
"bytes": "53301"
},
{
"name": "CSS",
"bytes": "86883"
},
{
"name": "HTML",
"bytes": "190128"
},
{
"name": "JavaScript",
"bytes": "6524"
},
{
"name": "Python",
"bytes": "15368059"
},
{
"name": "Shell",
"bytes": "7812"
},
{
"name": "Visual Basic",
"bytes": "1700"
}
],
"symlink_target": ""
} |
import unittest
from sql import For, Table
class TestFor(unittest.TestCase):
def test_for(self):
for_ = For('UPDATE', Table('t1'), Table('t2'), nowait=True)
self.assertEqual(str(for_), 'FOR UPDATE OF "t1", "t2" NOWAIT')
| {
"content_hash": "81ba9f6e2d50d336edfe6840fda871c3",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 70,
"avg_line_length": 27,
"alnum_prop": 0.6378600823045267,
"repo_name": "shnergle/ShnergleServer",
"id": "cf0b940206e6dea8a27195030978dfcf4d44b154",
"size": "1853",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "site-packages/sql/tests/test_for.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "774901"
}
],
"symlink_target": ""
} |
import os
import csv
from person import Person
import urllib2
class Members:
members = {}
def add(self, person):
self.members[person.name] = person
def parse_tsv(self, string):
lines = iter(string.splitlines())
r = csv.DictReader(lines, delimiter='\t')
for row in r:
person = Person(row)
template = os.environ.get('PHOTO_URL_TEMPLATE')
if template:
person.photo_url = str.replace(os.environ.get('PHOTO_URL_TEMPLATE'), '%NAME%', person.photo)
self.add(person)
return self
def load(self, resource):
if (resource.startswith('http')):
response = urllib2.urlopen(resource)
tsv = response.read()
return self.parse_tsv(tsv)
else:
return self.parse_tsv(open(resource).read())
| {
"content_hash": "3a8bf8f7bf6717c4142737a1834b8868",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 108,
"avg_line_length": 29.517241379310345,
"alnum_prop": 0.5794392523364486,
"repo_name": "LandRegistry/team-dashboard",
"id": "c67c6cb06ad6d9bec3924a40493a869f38c65cc3",
"size": "856",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "team/members.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1580"
},
{
"name": "Python",
"bytes": "11682"
},
{
"name": "Shell",
"bytes": "429"
}
],
"symlink_target": ""
} |
from huxley.api.tests import (DestroyAPITestCase, ListAPITestCase,
PartialUpdateAPITestCase, RetrieveAPITestCase)
from huxley.core.models import School
from huxley.utils.test import TestSchools, TestUsers
class SchoolDetailGetTestCase(RetrieveAPITestCase):
url_name = 'api:school_detail'
def test_anonymous_user(self):
'''It should reject request from an anonymous user.'''
school = TestSchools.new_school()
response = self.get_response(school.id)
self.assertNotAuthenticated(response)
def test_self(self):
'''It should allow the get request from the user.'''
school = TestSchools.new_school()
self.client.login(username=school.advisor.username, password='test')
response = self.get_response(school.id)
self.assertEqual(response.data, {
'id': school.id,
'registered': school.registered.isoformat(),
'name': school.name,
'address': school.address,
'city': school.city,
'state': school.state,
'zip_code': school.zip_code,
'country': school.country,
'primary_name': school.primary_name,
'primary_gender': school.primary_gender,
'primary_email': school.primary_email,
'primary_phone': school.primary_phone,
'primary_type': school.primary_type,
'secondary_name': school.secondary_name,
'secondary_gender': school.secondary_gender,
'secondary_email': school.secondary_email,
'secondary_phone': school.secondary_phone,
'secondary_type': school.secondary_type,
'program_type': school.program_type,
'times_attended': school.times_attended,
'international': school.international,
'waitlist': school.waitlist,
'beginner_delegates':school.beginner_delegates,
'intermediate_delegates': school.intermediate_delegates,
'advanced_delegates': school.advanced_delegates,
'spanish_speaking_delegates': school.spanish_speaking_delegates,
'country_preferences': school.country_preference_ids,
'committeepreferences': list(school.committeepreferences.all()),
'registration_comments': school.registration_comments,
'fees_owed': float(school.fees_owed),
'fees_paid': float(school.fees_paid),
})
def test_other_user(self):
'''it should not allow a get request from another user.'''
school = TestSchools.new_school()
TestUsers.new_user(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(school.id)
self.assertPermissionDenied(response)
def test_superuser(self):
'''it should allow a get request from a superuser.'''
school = TestSchools.new_school()
TestUsers.new_superuser(username='user1', password='user1')
self.client.login(username='user1', password='user1')
response = self.get_response(school.id)
self.assertEqual(response.data, {
'id': school.id,
'registered': school.registered.isoformat(),
'name': school.name,
'address': school.address,
'city': school.city,
'state': school.state,
'zip_code': school.zip_code,
'country': school.country,
'primary_name': school.primary_name,
'primary_gender': school.primary_gender,
'primary_email': school.primary_email,
'primary_phone': school.primary_phone,
'primary_type': school.primary_type,
'secondary_name': school.secondary_name,
'secondary_gender': school.secondary_gender,
'secondary_email': school.secondary_email,
'secondary_phone': school.secondary_phone,
'secondary_type': school.secondary_type,
'program_type': school.program_type,
'times_attended': school.times_attended,
'international': school.international,
'waitlist': school.waitlist,
'beginner_delegates': school.beginner_delegates,
'intermediate_delegates': school.intermediate_delegates,
'advanced_delegates': school.advanced_delegates,
'spanish_speaking_delegates': school.spanish_speaking_delegates,
'country_preferences': school.country_preference_ids,
'committeepreferences': list(school.committeepreferences.all()),
'registration_comments': school.registration_comments,
'fees_owed': float(school.fees_owed),
'fees_paid': float(school.fees_paid),
})
class SchoolDetailPatchTestCase(PartialUpdateAPITestCase):
url_name = 'api:school_detail'
params = {'name': 'name', 'city': 'city'}
def setUp(self):
self.school = TestSchools.new_school()
self.user = self.school.advisor
def test_anonymous_user(self):
'''Should not be able to update with an anonymous user.'''
response = self.get_response(self.school.id)
updated_school = School.objects.get(id=self.school.id)
self.assertNotAuthenticated(response)
self.assertEqual(updated_school.name, self.school.name)
self.assertEqual(updated_school.city, self.school.city)
def test_self(self):
'''You should be able to update with an anonymous user.'''
self.client.login(username=self.user.username, password='test')
response = self.get_response(self.school.id, params=self.params)
self.school = School.objects.get(id=self.school.id)
self.assertEqual(response.data['name'], self.school.name)
self.assertEqual(response.data['city'], self.school.city)
def test_other_user(self):
'''Should not allow another user to change a school's data'''
TestUsers.new_user(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.school.id, params=self.params)
updated_school = School.objects.get(id=self.school.id)
self.assertPermissionDenied(response)
self.assertEqual(updated_school.name, self.school.name)
self.assertEqual(updated_school.city, self.school.city)
def test_superuser(self):
'''This should allow a superuser to change school data.'''
TestUsers.new_superuser(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.school.id, params=self.params)
self.school = School.objects.get(id=self.school.id)
self.assertEqual(response.data['name'], self.school.name)
self.assertEqual(response.data['city'], self.school.city)
class SchoolDetailDeleteTestCase(DestroyAPITestCase):
url_name = 'api:school_detail'
def setUp(self):
self.school = TestSchools.new_school()
self.user = self.school.advisor
def test_anonymous_user(self):
'''Should not be able to delete anonymously.'''
response = self.get_response(self.school.id)
self.assertNotAuthenticated(response)
self.assertTrue(School.objects.filter(id=self.school.id).exists())
def test_self(self):
'''One user should be able to delete their own account.'''
self.client.login(username=self.user.username, password='test')
response = self.get_response(self.school.id)
self.assertEqual(response.data, None)
self.assertFalse(School.objects.filter(id=self.school.id).exists())
def test_other_user(self):
'''One user should not be able to delete another user.'''
TestUsers.new_user(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.school.id)
self.assertPermissionDenied(response)
self.assertTrue(School.objects.filter(id=self.school.id).exists())
def test_superuser(self):
'''A superuser should not be able to delete an account.'''
TestUsers.new_user(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.school.id)
self.assertPermissionDenied(response)
self.assertTrue(School.objects.filter(id=self.school.id).exists())
class SchoolListGetTestCase(ListAPITestCase):
url_name = 'api:school_list'
def setUp(self):
self.school = TestSchools.new_school()
self.user = self.school.advisor
def test_anonymous_user(self):
'''It should reject an anonymous user.'''
response = self.get_response()
self.assertMethodNotAllowed(response, 'GET')
def test_self(self):
'''It should reject a request from an unauthorized user.'''
self.client.login(username='testuser', password='test')
response = self.get_response()
self.assertMethodNotAllowed(response, 'GET')
def test_superuser(self):
'''It should reject a request from a superuser.'''
TestUsers.new_superuser(username='user', password='user')
self.client.login(username='user', password='user')
response = self.get_response()
self.assertMethodNotAllowed(response, 'GET')
| {
"content_hash": "70c50a2bb9ff4a638907a2b7f7905817",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 76,
"avg_line_length": 41.80530973451327,
"alnum_prop": 0.6436282811176969,
"repo_name": "ctmunwebmaster/huxley",
"id": "a4d3d780b0f9688df842cda599cc6feb58a73ab7",
"size": "9597",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "huxley/api/tests/school/test_school.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "20631"
},
{
"name": "HTML",
"bytes": "5628"
},
{
"name": "JavaScript",
"bytes": "87792"
},
{
"name": "Python",
"bytes": "191860"
},
{
"name": "Shell",
"bytes": "2855"
}
],
"symlink_target": ""
} |
"""Tasks related to format"""
import os
import subprocess
from artman.tasks import task_base
from artman.tasks.requirements import go_requirements
from artman.tasks.requirements import php_requirements
from artman.utils import task_utils
from artman.utils.logger import logger
# TODO: Store both intermediate and final output in all format tasks.
class JavaFormatTask(task_base.TaskBase):
def execute(self, gapic_code_dir, toolkit_path):
logger.info('Formatting files in %s.' %
os.path.abspath(gapic_code_dir))
# TODO(shinfan): Move gradle task into requirement
path = task_utils.get_gradle_task_output(
'showJavaFormatterPath', toolkit_path)
targetFiles = []
for root, dirs, files in os.walk(gapic_code_dir):
for filename in files:
if filename.endswith('.java'):
targetFile = os.path.abspath(os.path.join(root, filename))
targetFiles.append(targetFile)
self.exec_command(
['java', '-jar', path, '--replace'] + targetFiles)
def validate(self):
return []
class PythonFormatTask(task_base.TaskBase):
def execute(self, gapic_code_dir):
logger.info('Formatting files in %s.' %
os.path.abspath(gapic_code_dir))
targetFiles = []
for root, dirs, files in os.walk(gapic_code_dir):
for filename in files:
if filename.endswith('.py'):
targetFile = os.path.abspath(os.path.join(root, filename))
targetFiles.append(targetFile)
# yapf returns code 2 when it formats, so we can't use `check_call`.
exit_code = subprocess.call(['yapf', '-i'] + targetFiles)
if exit_code not in [0, 2]:
raise subprocess.CalledProcessError(exit_code, 'yapf')
# yapf is installed by tox for the entire pipeline project's virtualenv,
# so we shouldn't need a separate validation task.
def validate(self):
return []
class GoFormatTask(task_base.TaskBase):
def execute(self, gapic_code_dir):
logger.info('Formatting files in %s.' %
os.path.abspath(gapic_code_dir))
self.exec_command(['gofmt', '-w', gapic_code_dir])
def validate(self):
return [go_requirements.GoFormatRequirements]
class PhpFormatTask(task_base.TaskBase):
def execute(self, gapic_code_dir):
abs_code_dir = os.path.abspath(gapic_code_dir)
logger.info('Formatting file using php-cs-fixer in %s.' % abs_code_dir)
subprocess.call(['php-cs-fixer', 'fix', gapic_code_dir])
# We require a second call to php-cs-fixer because instances of @type
# have been converted to @var. We cannot disable this conversion in
# the first call without affecting other aspects of the formatting.
subprocess.call(['php-cs-fixer', 'fix', gapic_code_dir,
'--fixers=phpdoc_var_to_type'])
logger.info('Formatting file using phpcbf in %s.' % abs_code_dir)
subprocess.call(['phpcbf', '--standard=PSR2', '--no-patch',
gapic_code_dir])
def validate(self):
return [php_requirements.PhpFormatRequirements]
_FORMAT_TASK_DICT = {
'java': JavaFormatTask,
'python': PythonFormatTask,
'go': GoFormatTask,
'php': PhpFormatTask,
}
def get_format_task(language):
return _FORMAT_TASK_DICT.get(language, task_base.EmptyTask)
| {
"content_hash": "236ce617b436ac1b0059fb70415948d1",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 79,
"avg_line_length": 37.53763440860215,
"alnum_prop": 0.6299054712116872,
"repo_name": "shinfan/artman",
"id": "a4c6e5f765c7851cfba24137e51bf824ee928a2d",
"size": "4084",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "artman/tasks/format_tasks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Protocol Buffer",
"bytes": "40829"
},
{
"name": "Python",
"bytes": "316278"
}
],
"symlink_target": ""
} |
from language import pluralize
import termcolor
def colored(msg, color):
if color == 'term':
return msg
return termcolor.colored(msg, color)
class OutputWriter(object):
def __init__(self, stream, language, should_be_colored=True):
self._stream = stream
self._language = language
self._should_be_colored = should_be_colored
def _colored(self, msg, color):
if self._should_be_colored:
return colored(msg, color)
return msg
def _output_problems_info(self, problems, problem_type, color='red'):
if not problems:
return
self._stream.write(self._colored('\n %ss:\n' %
self._language[problem_type],
color))
for problem in problems:
self._stream.write(self._colored('%s\n' % problem,
color))
def _output_step_line(self, step_name, message, status, color):
self._stream.write(self._colored(' %s %s ... %s\n' % (
self._language[step_name].capitalize(),
message,
self._language[status].upper()),
color))
def output_ok_step_line(self, step_name, message, color='green'):
self._output_step_line(step_name, message, 'ok', color)
def output_pending_step_line(self, step_name, message, color='blue'):
self._output_step_line(step_name, message, 'pending', color)
def output_fail_step_line(self, step_name, message, color='red'):
self._output_step_line(step_name, message, 'fail', color)
def output_error_step_line(self, step_name, message, color='red'):
self._output_step_line(step_name, message, 'error', color)
def output_failures_info(self, problems, color='red'):
self._output_problems_info(problems, 'failure', color)
def output_errors_info(self, problems, color='red'):
self._output_problems_info(problems, 'error', color)
def output_statistics(self, number_of_scenarios,
number_of_failures,
number_of_errors,
number_of_pendings,
color='white'):
scenario_word = pluralize(self._language['scenario'],
number_of_scenarios).lower()
failure_word = pluralize(self._language['failure'],
number_of_failures).lower()
error_word = pluralize(self._language['error'],
number_of_errors).lower()
step_word = pluralize(self._language['step'],
number_of_pendings).lower()
pending_word = self._language['pending'].lower()
steps_pending = pending_word + ' ' + step_word
if self._language['pending'].lower() == 'pendente':
pending_word = pluralize('pendente',
number_of_pendings).lower()
steps_pending = step_word + ' ' + pending_word
ran = self._language['ran'].capitalize()
with_word = self._language['with'].lower()
and_word = self._language['and'].lower()
self._stream.write(self._colored('\n %s\n' % ' '.join(map(str,
[ran,
number_of_scenarios,
scenario_word,
with_word,
number_of_failures,
failure_word+',',
number_of_errors,
error_word,
and_word,
number_of_pendings,
steps_pending,])),
color))
| {
"content_hash": "5e29ed33650358c2149a39486266752b",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 77,
"avg_line_length": 45.795698924731184,
"alnum_prop": 0.45550598732096736,
"repo_name": "hltbra/pyhistorian",
"id": "30d38202b693e1b5d7d55618e83583c450389368",
"size": "4259",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyhistorian/output.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "79586"
}
],
"symlink_target": ""
} |
"""PT: PyTorch frontend."""
import itertools
import logging
import sys
import numpy as np
import tvm
from tvm.topi.util import get_const_tuple
from .. import analysis as _analysis
from .. import expr as _expr
from .. import op as _op
from ..ty import TupleType, TensorType, Any
from ..loops import while_loop
from .. import transform
from .common import AttrCvt, get_relay_op
from .common import infer_shape as _infer_shape
from .common import infer_value as _infer_value
from .common import try_infer_value
from .common import infer_value_simulated as _infer_value_simulated
from .common import infer_type as _infer_type
from ..prelude import Prelude, StaticTensorArrayOps
from . import qnn_torch
__all__ = ["from_pytorch"]
def _is_version_greater_than(ver):
import torch
from packaging import version
# Torch version > 1.4 changed upsampling API
return version.parse(torch.__version__) > version.parse(ver)
# List ADT utilities
def _infer_type_with_prelude(val, prelude):
body = _infer_type(val, prelude.mod)
return body.checked_type
def _convert_to_list_adt(py_lst, prelude):
elem_tys = [_infer_type_with_prelude(elem, prelude) for elem in py_lst]
msg = "List elements should have identical types"
assert all(map(lambda ty: ty == elem_tys[0], elem_tys)), msg
# get_type returns type_name, ctor1, ..., ctorN
# 1 is nil
_, cons, nil = prelude.mod.get_type("List")
adt_lst = nil()
for elem in reversed(py_lst):
adt_lst = cons(elem, adt_lst)
return adt_lst
def _map_tensor_array_constructor(adt_lst, prelude, shape):
static_tensor_array_ops = StaticTensorArrayOps(prelude, "float32", shape)
static_tensor_array_ops.register()
tensor_create = prelude.get_tensor_ctor_static("tensor_constructor", "float32", shape)
return prelude.map(tensor_create, adt_lst)
def _convert_to_tensor_array(adt_lst, prelude):
_, cons, nil = prelude.mod.get_type("List")
if prelude.length(adt_lst) == 0:
return nil()
checked_type = _infer_type_with_prelude(prelude.hd(adt_lst), prelude)
shape = checked_type.shape
tensor_array = _map_tensor_array_constructor(adt_lst, prelude, shape)
return tensor_array, tuple(shape)
def _should_construct_dynamic_list(list_construct_node):
# if this list is element-accessed or modified at runtime, generate List ADT
def inplace_add_to_add(op_name):
if op_name == "aten::add_":
return "aten::add"
else:
return op_name
uses = _get_uses(list_construct_node)
for loop_use in filter(lambda use: use.user.kind() == "prim::Loop", uses):
block_input_index = loop_use.offset - 1
block = list(loop_use.user.blocks())[0]
list_loop_var = list(block.inputs())[block_input_index]
uses += _get_uses(list_loop_var.node())
op_names = map(inplace_add_to_add, set(use.user.kind() for use in uses))
list_ops = set(["aten::add", "aten::__getitem__"])
intersect = list_ops.intersection(op_names)
if len(intersect) > 0 and intersect != set(["aten::add"]):
return True
# if add op outputs list, it is dynamic so we need to construct List ADT
for use in filter(lambda use: use.user.kind() in ["aten::add", "aten::add_"], uses):
output_type = _get_node_type(use.user)
if output_type == "ListType":
return True
return False
def _is_int_seq(seq):
# TODO (t-vi): handle non-int constants? (like numpy.intXX)
return len(seq) > 0 and all([isinstance(i, int) for i in seq])
def _is_quantized_tensor(data, prelude):
# If a quantized Torch module is saved and loaded back, dtype will be dropped
# Since dtypes from Torch tensors are not reliable in such cases, we use
# Relay's type inference result to decide if an input tensor is quantized
ty = _infer_type_with_prelude(data, prelude)
return ty.dtype == "uint8"
# operator implementation
def _elemwise(name):
def _impl(inputs, input_types):
data0, data1 = _pytorch_promote_types(inputs[:2], input_types[:2])
return get_relay_op(name)(data0, data1)
return _impl
def _min_max_common(name_elemwise, name_reduce):
def _impl(inputs, input_types):
if len(inputs) == 1:
data = _pytorch_promote_types(inputs[:1], input_types[:1])
return get_relay_op(name_reduce)(data[0])
elif len(inputs) >= 2 and isinstance(inputs[1], int):
data = _pytorch_promote_types(inputs[:1], input_types[:1])
dim = inputs[1]
keepdims = inputs[2] if len(inputs) > 2 else False
# also return dummy indices
return get_relay_op(name_reduce)(data[0], axis=dim, keepdims=keepdims), None
else:
data0, data1 = _pytorch_promote_types(inputs[:2], input_types[:2])
return get_relay_op(name_elemwise)(data0, data1)
return _impl
def _max():
return _min_max_common("maximum", "max")
def _min():
return _min_max_common("minimum", "min")
def _unary(name):
def _impl(inputs, input_types):
input_type = input_types[0]
# this is just to ensure tensor input
(data,) = _pytorch_promote_types(inputs[:1], input_types[:1])
return get_relay_op(name)(data)
return _impl
def _log1p():
def _impl(inputs, input_types):
# 1_plus_log x = log(x + 1)
(dtype,) = input_types
one = _expr.const(1, dtype=dtype)
return _op.log(inputs[0] + one)
return _impl
def _arange():
def _impl(inputs, input_types):
def _get_value(val, dtype):
# dtype is a tvm dtype
if isinstance(val, _expr.Expr):
inp = _op.cast(val, dtype)
ret, _ = try_infer_value(inp, lambda ret: _expr.const(ret, dtype))
else:
ret = _create_typed_const(val, dtype)
return ret
def _get_type(val, inp_type):
if isinstance(val, _expr.Expr):
dtype = str(_infer_type(val).checked_type)
return dtype
return inp_type
# PyTorch arange uses the following type semantics:
# - if a dtype is given, start, stop, step are converted to that dtype
# - if no dtype is given and all args are integral, dtype is int64
# - if no dtype is given and there is a float arg, dtype is float32
if len(inputs) == 5:
dtype0 = _get_type(inputs[0], input_types[0])
if inputs[1] is not None:
dtype = _convert_dtype_value(inputs[1])
elif dtype0.startswith("float"):
dtype = "float32"
else:
dtype = "int64"
start = _expr.const(0, dtype)
stop = _get_value(inputs[0], dtype)
step = _expr.const(1, dtype)
elif len(inputs) == 7:
types = [_get_type(inputs[i], input_types[i]) for i in range(3)]
if inputs[3] is not None:
dtype = _convert_dtype_value(inputs[3])
elif any([t.startswith("float") for t in types]):
dtype = "float32"
else:
dtype = "int64"
start = _get_value(inputs[0], dtype)
stop = _get_value(inputs[1], dtype)
step = _get_value(inputs[2], dtype)
else:
msg = "Unknown number of arguments (%d) to parse." % (len(inputs))
raise AssertionError(msg)
return _op.transform.arange(start=start, stop=stop, step=step, dtype=dtype)
return _impl
def _squeeze():
def _impl(inputs, input_types):
data = inputs[0]
if len(inputs) == 1:
axis = None
else:
# TODO (t-vi): why is the cast to int needed? similarly elsewhere
axis = [int(inputs[1])]
return _op.transform.squeeze(data, axis)
return _impl
def _unsqueeze():
def _impl(inputs, input_types):
data = inputs[0]
axis = inputs[1]
return _op.transform.expand_dims(data, int(axis), 1)
return _impl
def _concatenate(prelude):
def tensor_array_concat(lst, axis):
assert axis == 0, "Tensor array concat supported only for axis 0"
tensor_array, shape = _convert_to_tensor_array(lst, prelude)
concat_shape = (Any(),) + shape[1:]
concat = prelude.get_global_var_static("tensor_array_concat", "float32", shape)
concatenated = concat(tensor_array)
static_tensor_array_ops = StaticTensorArrayOps(prelude, "float32", concat_shape)
static_tensor_array_ops.register()
get_tensor = prelude.get_global_var_static("tensor_get_data", "float32", concat_shape)
return get_tensor(concatenated)
def _impl(inputs, input_types):
data = inputs[0]
axis = inputs[1]
if not isinstance(data, list):
return tensor_array_concat(data, axis)
if isinstance(data, _expr.Expr):
data = [data]
return _op.tensor.concatenate(data, int(axis))
return _impl
def _slice():
def _impl(inputs, input_types):
axis_dtype = "int64"
index_size_limit = 2 ** 63 - 1
data = inputs[0]
dshape = _infer_shape(data)
ndim = len(dshape)
end = []
for dim in dshape:
if isinstance(dim, tvm.tir.Any):
end = _op.shape_of(data)
break
end.append(int(dim))
begin = [0] * ndim
dim = int(inputs[1])
stride = int(inputs[4])
if isinstance(inputs[2], _expr.Call):
begin[dim], _ = try_infer_value(inputs[2], lambda ret: np.asscalar(ret.astype(np.int)))
else:
begin[dim] = int(inputs[2])
# Process begin
if not isinstance(begin[dim], int):
tmp = []
for b in begin:
if isinstance(b, int):
tmp.append(_op.expand_dims(_expr.const(b, axis_dtype), axis=0))
else:
tmp.append(_op.cast(_op.expand_dims(b, axis=0), axis_dtype))
begin = _op.concatenate(tmp, axis=0)
btype = _infer_type(begin).checked_type.dtype
if str(btype) != axis_dtype:
begin = _op.cast(begin, axis_dtype)
if isinstance(inputs[3], str) and inputs[3].isdigit():
target_end = int(inputs[3])
else:
if isinstance(inputs[3], _expr.Expr):
target_end, _ = try_infer_value(
inputs[3], lambda ret: np.asscalar(ret.astype(np.int))
)
else:
target_end = inputs[3]
if isinstance(target_end, int) and target_end >= index_size_limit:
# Quick path for original data.
if (
isinstance(begin, _expr.Constant)
and begin.data.asnumpy().tolist()[dim] == 0
and stride == 1
):
return data
target_end = dshape[dim]
# Process end
if isinstance(target_end, int):
if isinstance(end, list):
end[dim] = target_end
else:
all_static = True
for i, shape_dim in enumerate(dshape):
if i != dim and isinstance(shape_dim, tvm.tir.Any):
all_static = False
if all_static:
end = list(get_const_tuple(dshape))
end[dim] = target_end
else:
target_end = _expr.const(target_end)
end = _op.scatter(
end,
_op.expand_dims(_expr.const(dim), axis=0),
_op.expand_dims(target_end, axis=0),
axis=0,
)
else:
end = _op.cast(_op.shape_of(data), axis_dtype)
if not isinstance(target_end, tvm.tir.Any):
ttype = _infer_type(target_end).checked_type.dtype
if str(ttype) != axis_dtype:
target_end = _op.cast(target_end, axis_dtype)
end = _op.scatter(
end,
_op.expand_dims(_expr.const(dim), axis=0),
_op.expand_dims(target_end, axis=0),
axis=0,
)
if not isinstance(end, list):
etype = _infer_type(end).checked_type.dtype
if str(etype) != axis_dtype:
end = _op.cast(end, axis_dtype)
strides = [1] * ndim
strides[dim] = int(inputs[4])
return _op.transform.strided_slice(
data, begin=begin, end=end, strides=strides, slice_mode="end"
)
return _impl
def _split():
def _impl(inputs, input_types):
data = inputs[0]
split_size = int(inputs[1])
dim = int(inputs[2])
split_index = split_size
indices = []
while split_index < _infer_shape(data)[dim]:
indices.append(split_index)
split_index += split_size
return _op.split(data, indices, dim)
return _impl
def _split_with_sizes():
def _impl(inputs, input_types):
data = inputs[0]
sections = inputs[1]
dim = int(inputs[2])
if len(sections) == 1:
# a special case used in torchvision detection models
return _expr.TupleWrapper(_expr.Tuple([data]), 1)
split_index = 0
indices = []
for i in range(len(sections) - 1):
index, _ = try_infer_value(sections[i], lambda ret: int(ret))
split_index += index
indices.append(split_index)
return _op.split(data, indices, dim)
return _impl
def _select():
def _impl(inputs, input_types):
data = inputs[0]
dim = int(inputs[1])
index = _wrap_const(inputs[2])
return _op.transform.take(data, index, axis=dim)
return _impl
def _take():
def _impl(inputs, input_types):
data = inputs[0]
indices = _op.cast(inputs[1], "int32")
return _op.transform.take(data, indices=indices)
return _impl
def _topk():
def _impl(inputs, input_types):
data = inputs[0]
axis = int(inputs[2])
is_ascend = not bool(inputs[3])
sort = bool(inputs[4])
if isinstance(inputs[1], _expr.Expr):
k, _ = try_infer_value(inputs[1], lambda ret: ret.tolist())
else:
k = inputs[1]
if not sort:
msg = "Currently supports only sorted output for topk operator."
raise AssertionError(msg)
outs = _op.topk(data, k=k, axis=axis, is_ascend=is_ascend, ret_type="both", dtype="int64")
return outs[0], outs[1]
return _impl
def _reciprocal():
def _impl(inputs, input_types):
data = inputs[0]
return _expr.const(1.0, dtype=input_types[0]) / data
return _impl
def _repeat():
def _impl(inputs, input_types):
data = inputs[0]
reps = inputs[1]
return _op.transform.tile(data, reps=reps)
return _impl
def _repeat_interleave():
def _impl(inputs, input_types):
data = inputs[0]
if isinstance(inputs[1], int):
repeats = inputs[1]
axis = inputs[2]
else:
msg = "Only repeat with one value as repeat is currently supported."
raise AssertionError(msg)
if axis is None: # Flatten the data if no axis is given from torch
data = _op.transform.reshape(data, [-1])
axis = 0
return _op.transform.repeat(data, repeats=repeats, axis=axis)
return _impl
def _addcdiv():
def _impl(inputs, input_types):
data, t1, t2, c = _pytorch_promote_types(inputs[:4], input_types[:4])
return data + (c * (t1 / t2))
return _impl
def _addcmul():
def _impl(inputs, input_types):
data, t1, t2, c = _pytorch_promote_types(inputs[:4], input_types[:4])
return data + (c * (t1 * t2))
return _impl
def _where():
def _impl(inputs, input_types):
if len(inputs) == 1:
return _nonzero(False)([inputs[0], True], input_types)
cond = inputs[0]
x, y = _pytorch_promote_types(inputs[1:3], input_types[1:3])
return _op.where(cond, x, y)
return _impl
def _full_impl(data, fill_value, dtype):
size = []
need_reshape = False
new_shape = []
for dim in data:
if isinstance(dim, _expr.Expr):
if isinstance(dim, _expr.Constant):
dim = int(dim.data.asnumpy())
if isinstance(size, list):
size.append(dim)
new_shape.append(dim)
else:
dim, success = try_infer_value(dim, lambda ret: int(ret), lambda: 0)
new_shape.append(dim)
if success:
if isinstance(size, list):
size.append(dim)
else:
size = None
need_reshape = True
else:
if isinstance(size, list):
size.append(dim)
new_shape.append(dim)
if size is None:
tmp = []
for dim in data:
tmp.append(_op.cast(_op.expand_dims(dim, axis=0), "int64"))
size = _op.concatenate(tmp, axis=0)
out = _op.full(_expr.const(fill_value), size, dtype=dtype)
if need_reshape:
out = _op.reshape(out, new_shape)
return out
def _ones(default_dtype):
def _impl(inputs, input_types):
data = inputs[0]
import torch
if not isinstance(data, (_expr.Expr, list, torch.Tensor, np.ndarray)):
msg = "Data type %s could not be parsed in ones op" % (type(data))
raise AssertionError(msg)
if inputs[1] is not None:
dtype = _convert_dtype_value(inputs[1])
else:
dtype = default_dtype
return _full_impl(data, 1, dtype)
return _impl
def _ones_like(default_dtype):
def _impl(inputs, input_types):
data = inputs[0]
out = _op.ones_like(data)
# If the input and the output datatype is different, do a cast
if inputs[1] is not None:
dtype = _convert_dtype_value(inputs[1])
else:
dtype = default_dtype
if input_types[0] != dtype:
out = _op.cast(out, dtype)
return out
return _impl
def _zeros(default_dtype):
def _impl(inputs, input_types):
data = inputs[0]
import torch
if not isinstance(data, (_expr.Expr, list, torch.Tensor, np.ndarray)):
msg = "Data type %s could not be parsed in zeros op" % (type(data))
raise AssertionError(msg)
if inputs[1] is not None:
dtype = _convert_dtype_value(inputs[1])
else:
dtype = default_dtype
return _full_impl(data, 0, dtype)
return _impl
def _zeros_like(default_dtype):
def _impl(inputs, input_types):
data = inputs[0]
out = _op.zeros_like(data)
# If the input and the output datatype is different, do a cast
if inputs[1] is not None:
dtype = _convert_dtype_value(inputs[1])
else:
dtype = default_dtype
if input_types[0] not in dtype:
out = _op.cast(out, dtype)
return out
return _impl
def _full(default_dtype):
def _impl(inputs, input_types):
data = inputs[0]
fill_value = inputs[1]
import torch
if not isinstance(data, (_expr.Expr, list, torch.Tensor, np.ndarray)):
msg = "Data type %s could not be parsed in full op" % (type(data))
raise AssertionError(msg)
if inputs[2] is not None: # dtype given
dtype = _convert_dtype_value(inputs[2])
else:
# if dtype is None, torch uses a global default set by torch.set_default_tensor_type()
dtype = default_dtype
return _full_impl(data, fill_value, dtype)
return _impl
def _full_like(default_dtype):
def _impl(inputs, input_types):
data = inputs[0]
fill_value = inputs[1]
out = _op.full_like(data, _expr.const(fill_value))
# If the input and the output datatype is different, do a cast
if inputs[2] is not None: # dtype given
dtype = _convert_dtype_value(inputs[2])
else:
# if dtype is None, torch uses a global default set by torch.set_default_tensor_type()
dtype = default_dtype
if input_types[0] not in dtype:
out = _op.cast(out, dtype)
return out
return _impl
def _linspace():
def _impl(inputs, input_types):
start = inputs[0]
stop = inputs[1]
step = inputs[2]
# Find the spacing between values as step
if step != 1:
step = (stop - start) / (step - 1)
stop = stop + step
else:
stop = start + step
dtype = "float32" if inputs[3] is not None else _convert_dtype_value(inputs[3])
start = _create_typed_const(start, dtype)
stop = _create_typed_const(stop, dtype)
step = _create_typed_const(step, dtype)
return _op.transform.arange(start=start, stop=stop, step=step, dtype=dtype)
return _impl
def _relu(prelude):
def _impl(inputs, input_types):
data = inputs[0]
if _is_quantized_tensor(data, prelude):
assert len(inputs) == 3, "Input quant param not found in op inputs"
input_zero_point = _expr.const(inputs[2], dtype="int32")
return qnn_torch.quantized_relu(data, input_zero_point)
return _op.nn.relu(data)
return _impl
def _prelu():
def _impl(inputs, input_types):
data = inputs[0]
alpha = inputs[1]
return _op.nn.prelu(data, alpha)
return _impl
def _leaky_relu():
def _impl(inputs, input_types):
data = inputs[0]
alpha = float(inputs[1])
return _op.nn.leaky_relu(data, alpha)
return _impl
def _elu():
def _impl(inputs, input_types):
data = inputs[0]
dtype = input_types[0]
alpha = _expr.const(float(inputs[1]), dtype=dtype)
return alpha * _op.nn.relu(_expr.const(1, dtype=dtype) - _op.exp(data)) + _op.nn.relu(data)
return _impl
def _celu():
def _impl(inputs, input_types):
data = inputs[0]
dtype = input_types[0]
alpha = _expr.const(float(inputs[1]), dtype=dtype)
return alpha * _op.nn.relu(
_expr.const(1, dtype=dtype) - _op.exp(data / alpha)
) + _op.nn.relu(data)
return _impl
def _gelu():
def _impl(inputs, input_types):
data = inputs[0]
dtype = input_types[0]
# gelu is data * normcdf(data)
# normcdf expressed as erf because we don't currently have that intrinsic
# note that there is also a fastgelu variant approximating normcdf
# with tanh and third order polynomials, but this is "true" gelu
return data * (
_expr.const(0.5, dtype=dtype)
+ _op.erf(data * _expr.const(0.5 ** 0.5, dtype=dtype)) * _expr.const(0.5, dtype=dtype)
)
return _impl
def _selu():
def _impl(inputs, input_types):
data = inputs[0]
# https://pytorch.org/docs/stable/nn.html#selu
dtype = input_types[0]
alpha = _expr.const(-1.6732632423543772848170429916717, dtype=dtype)
gamma = _expr.const(1.0507009873554804934193349852946, dtype=dtype)
return gamma * (
alpha * _op.nn.relu(_expr.const(1.0, dtype=dtype) - _op.exp(data)) + _op.nn.relu(data)
)
return _impl
def _log_sigmoid():
def _impl(inputs, input_types):
data = inputs[0]
return _op.log(_op.tensor.sigmoid(data))
return _impl
def _adaptive_avg_pool_2d(prelude):
def _impl(inputs, input_types):
data = inputs[0]
output_size = inputs[1]
def func(x):
return _op.nn.adaptive_avg_pool2d(x, output_size=output_size)
if _is_quantized_tensor(data, prelude):
return qnn_torch.apply_with_upcast(data, func)
return func(data)
return _impl
def _adaptive_max_pool_2d():
def _impl(inputs, input_types):
data = inputs[0]
output_size = inputs[1]
# returns dummy indices too
return _op.nn.adaptive_max_pool2d(data, output_size=output_size), None
return _impl
def _adaptive_max_pool_3d():
def _impl(inputs, input_types):
data = inputs[0]
output_size = inputs[1]
# returns dummy indices too
return _op.nn.adaptive_max_pool3d(data, output_size=output_size), None
return _impl
def _adaptive_avg_pool_3d():
def _impl(inputs, input_types):
data = inputs[0]
output_size = inputs[1]
return _op.nn.adaptive_avg_pool3d(data, output_size=output_size)
return _impl
def _maxpool_2d():
def _impl(inputs, input_types):
data = inputs[0]
pool_size = inputs[1]
strides = inputs[2] if inputs[2] else pool_size
padding = inputs[3]
dilation = inputs[4]
ceil_mode = int(inputs[5])
if dilation != [1, 1]:
msg = "MaxPool2d with dilation %s is not implemented" % (str(dilation))
raise NotImplementedError(msg)
return _op.nn.max_pool2d(data, pool_size, strides, padding, "NCHW", ceil_mode)
return _impl
def _maxpool_2d_with_indices():
def _impl(inputs, input_types):
# returns dummy indices too
return _maxpool_2d()(inputs, input_types), None
return _impl
def _maxpool_1d():
def _impl(inputs, input_types):
data = inputs[0]
pool_size = inputs[1]
strides = inputs[2] if inputs[2] else pool_size
padding = inputs[3]
dilation = inputs[4]
ceil_mode = int(inputs[5])
if dilation != [1]:
msg = "MaxPool1d with dilation %s is not implemented" % (str(dilation))
raise NotImplementedError(msg)
return _op.nn.max_pool1d(data, pool_size, strides, padding, "NCW", ceil_mode)
return _impl
def _maxpool_3d():
def _impl(inputs, input_types):
data = inputs[0]
pool_size = inputs[1]
strides = inputs[2] if inputs[2] else pool_size
padding = inputs[3]
dilation = inputs[4]
ceil_mode = int(inputs[5])
if dilation != [1, 1, 1]:
msg = "MaxPool3d with dilation %s is not implemented" % (str(dilation))
raise NotImplementedError(msg)
return _op.nn.max_pool3d(
data, pool_size=pool_size, strides=strides, padding=padding, ceil_mode=ceil_mode
)
return _impl
def _hardtanh():
def _impl(inputs, input_types):
a = inputs[0]
tanh_min = float(inputs[1])
tanh_max = float(inputs[2])
return _op.tensor.clip(a, tanh_min, tanh_max)
return _impl
def _convolution():
def _impl(inputs, input_types):
# Use transpose or normal
use_transpose = True if inputs[6] == 1 else False
data = inputs[0]
weight = inputs[1]
bias = inputs[2]
strides = tuple(inputs[3])
padding = tuple(inputs[4])
dilation = tuple(inputs[5])
if isinstance(weight, _expr.Expr):
inferred_shape = _infer_shape(weight)
weight_shape = []
for infer in inferred_shape:
weight_shape.append(infer)
else:
msg = "Data type %s could not be parsed in conv op" % (type(weight))
raise AssertionError(msg)
# Transposed convolutions have IOHW layout.
if use_transpose:
weight_shape[0], weight_shape[1] = weight_shape[1], weight_shape[0]
channels = weight_shape[0]
groups = int(inputs[8])
# Check if this is depth wise convolution
# We need to reshape weight so that Relay could recognize this is depth wise
# weight_shape[1] is always in_channels // groups
# For depthwise, in_channels == groups, so weight_shape[1] == 1
# If groups > 1 but weight_shape[1] != 1, this is group convolution
if groups > 1 and weight_shape[1] == 1:
channel_multiplier = channels // groups
new_weight_shape = (groups, channel_multiplier) + tuple(weight_shape[2:])
weight = _op.transform.reshape(weight, new_weight_shape)
kernel_size = weight_shape[2:]
use_bias = isinstance(bias, _expr.Expr)
if len(kernel_size) == 1:
strides = (1,) + strides
padding = (0,) + padding
dilation = (1,) + dilation
if use_transpose:
if len(kernel_size) == 3:
conv_op = _op.nn.conv3d_transpose
else:
conv_op = _op.nn.conv2d_transpose
else:
if len(kernel_size) == 3:
conv_op = _op.nn.conv3d
else:
conv_op = _op.nn.conv2d
if len(kernel_size) == 3:
data_layout = "NCDHW"
kernel_layout = "OIDHW"
else:
data_layout = "NCHW"
kernel_layout = "OIHW"
if len(kernel_size) == 1:
data = _op.expand_dims(data, axis=2)
weight = _op.expand_dims(weight, axis=2)
conv_out = conv_op(
data,
weight,
strides=strides,
padding=padding,
dilation=dilation,
groups=groups,
channels=channels,
kernel_size=[1] + kernel_size if len(kernel_size) == 1 else kernel_size,
data_layout=data_layout,
kernel_layout=kernel_layout,
out_layout="",
out_dtype="",
)
if use_bias:
res = _op.nn.bias_add(conv_out, bias)
else:
res = conv_out
if len(kernel_size) == 1:
res = _op.squeeze(res, axis=[2])
return res
return _impl
def _softmax():
def _impl(inputs, input_types):
data = inputs[0]
axis = inputs[1]
if isinstance(axis, str):
axis = int(axis)
return _op.nn.softmax(data, axis=axis)
return _impl
def _threshold():
def _impl(inputs, input_types):
data = inputs[0]
return _op.nn.relu(data)
return _impl
def _contiguous():
def _impl(inputs, input_types):
data = inputs[0]
return _op.tensor.copy(data)
return _impl
def _batch_norm():
def _impl(inputs, input_types):
data = inputs[0]
data_type = input_types[0]
channels = _infer_shape(data)
if isinstance(inputs[1], _expr.Expr) and isinstance(inputs[2], _expr.Expr):
scale = center = True
weight = inputs[1]
beta = inputs[2]
gamma = weight
else:
scale = center = False
if not scale:
gamma = _create_typed_const(np.ones([int(channels[1])]), data_type)
if not center:
beta = _create_typed_const(np.zeros([int(channels[1])]), data_type)
moving_mean = inputs[3]
moving_var = inputs[4]
epsilon = float(inputs[7])
return _op.nn.batch_norm(
data,
gamma,
beta,
moving_mean,
moving_var,
axis=1,
epsilon=epsilon,
center=center,
scale=scale,
)[0]
return _impl
def _instance_norm():
def _impl(inputs, input_types):
data = inputs[0]
data_type = input_types[0]
channels = _infer_shape(data)
if isinstance(inputs[1], _expr.Expr) and isinstance(inputs[2], _expr.Expr):
scale = center = True
weight = inputs[1]
beta = inputs[2]
gamma = weight
else:
scale = center = False
if not scale:
gamma = _create_typed_const(np.ones([int(channels[1])]), data_type)
if not center:
beta = _create_typed_const(np.zeros([int(channels[1])]), data_type)
epsilon = float(inputs[7])
return _op.nn.instance_norm(
data, gamma, beta, axis=1, epsilon=epsilon, center=center, scale=scale
)
return _impl
def _get_dims(data):
import torch
if isinstance(data, _expr.Expr):
dims = _infer_shape(data)
elif isinstance(data, list):
dims = data
elif isinstance(data, (torch.Tensor, np.ndarray)):
dims = data.shape
else:
msg = "Data type %s could not be parsed" % type(data)
raise AssertionError(msg)
return dims
def _layer_norm():
def _impl(inputs, input_types):
data = inputs[0]
ndims = len(_get_dims(inputs[1]))
assert ndims == 1, "Support only normalization over last one dimension."
return _op.nn.layer_norm(
data,
gamma=inputs[2],
beta=inputs[3],
axis=-1,
epsilon=float(inputs[4]),
center=True,
scale=True,
)
return _impl
def _group_norm():
def _impl(inputs, input_types):
data = inputs[0]
gamma = inputs[2]
beta = inputs[3]
num_groups = inputs[1]
epsilon = float(inputs[4])
return _op.nn.group_norm(
data,
gamma=gamma,
beta=beta,
num_groups=num_groups,
axis=1,
epsilon=epsilon,
center=True,
scale=True,
)
return _impl
def _transpose(prelude):
def _impl(inputs, input_types):
data = inputs[0]
import torch
if isinstance(data, _expr.Expr):
ndims = len(_infer_shape(data, prelude.mod))
elif isinstance(data, list):
ndims = data
elif isinstance(data, (torch.Tensor, np.ndarray)):
ndims = data.shape
else:
msg = "Data type %s could not be parsed in transpose op" % (type(data))
raise AssertionError(msg)
if isinstance(data, tvm.runtime.NDArray):
ndims = len(data.shape)
axes = list(range(ndims))
num_inputs = len(inputs)
if num_inputs == 1:
if ndims >= 2:
axes[-1] = ndims - 2
axes[-2] = ndims - 1
if not isinstance(data, _expr.Expr):
data = _expr.const(data)
elif num_inputs == 3:
parse = lambda i: ndims * (i < 0) + i
src, dst = [parse(int(inputs[i])) for i in [1, 2]]
axes[src] = dst
axes[dst] = src
else:
axes = inputs[1]
return _op.transform.transpose(data, axes)
return _impl
def _flatten():
def _impl(inputs, input_types):
data = inputs[0]
start = int(inputs[1])
end = int(inputs[2])
dshape = get_const_tuple(_infer_shape(data))
ndim = len(dshape)
if end < 0:
end += ndim
new_shape = [0] * start
new_shape.append(-1)
squeeze_axes = []
for i in range(start + 1, end + 1):
new_shape.append(1)
squeeze_axes.append(i)
for _ in range(end + 1, ndim):
new_shape.append(0)
out = _op.reshape(data, new_shape)
if squeeze_axes:
out = _op.squeeze(out, axis=squeeze_axes)
return out
return _impl
def _addmm():
def _impl(inputs, input_types):
input_mat = inputs[0]
mat1 = inputs[1]
data_type = input_types[1]
mat2 = inputs[2]
beta = inputs[3]
alpha = inputs[4]
if not isinstance(alpha, _expr.Expr) and alpha != 1:
alpha = _create_typed_const(alpha, data_type)
mat1 *= alpha
if not isinstance(beta, _expr.Expr) and beta != 1:
beta = _create_typed_const(beta, data_type)
mat2 *= beta
transposed_mat2 = _op.transform.transpose(mat2, axes=[1, 0])
units = _infer_shape(transposed_mat2)[0]
dense_out = _op.nn.dense(mat1, transposed_mat2, units=units)
return dense_out + input_mat
return _impl
def _size(prelude):
def _impl_dynamic(inp, axis):
shape_dynamic = _op.shape_of(inp, dtype="int32")
if axis is not None:
return _op.take(shape_dynamic, _expr.const(axis), 0)
return shape_dynamic
def _impl(inputs, input_types):
shape = _infer_shape(inputs[0], prelude.mod)
axis = None
if len(inputs) > 1:
axis = int(inputs[1])
if any(map(lambda s: isinstance(s, tvm.tir.expr.Any), shape)):
if axis is None or isinstance(shape[axis], tvm.tir.expr.Any):
return _impl_dynamic(inputs[0], axis)
if axis is not None:
return _expr.const(shape[axis])
return _expr.const(shape)
return _impl
def _numtotensor():
def _impl(inputs, input_types):
val = inputs[0]
dtype = input_types[0]
if isinstance(val, _expr.Expr):
return val
if isinstance(val, tvm.tir.IntImm):
val = val.__int__()
dtype = int
arr = val * np.ones([]).astype(dtype)
return arr
return _impl
def _tensortonum():
def _impl(inputs, input_types):
return inputs[0]
return _impl
def _view():
def _impl(inputs, input_types):
data = inputs[0]
if len(inputs) == 3:
shape_inp = [inputs[1], _infer_shape(inputs[2])[0]]
else:
if isinstance(inputs[1], list):
shape_inp = inputs[1]
else:
shape_inp = _infer_shape(inputs[1])
new_shape = shape_inp
for i, shape in enumerate(shape_inp):
if isinstance(shape, _expr.Expr):
val = _infer_value_simulated(shape, {})
new_shape[i] = np.asscalar(val.asnumpy())
return _op.transform.reshape(data, new_shape)
return _impl
def _reshape():
def _impl(inputs, input_types):
data = inputs[0]
new_shape = inputs[1]
tmp_shape = []
is_dyn = False
for s in new_shape:
if isinstance(s, _expr.Constant):
tmp_shape.append(int(s.data.asnumpy()))
elif isinstance(s, _expr.Expr):
dim, success = try_infer_value(s, lambda ret: int(ret))
tmp_shape.append(dim)
if not success:
is_dyn = True
else:
tmp_shape.append(s)
if is_dyn:
new_shape = []
for i, s in enumerate(tmp_shape):
if not isinstance(s, _expr.Expr):
s = _expr.const(s, "int64")
else:
s = _op.cast(s, "int64")
new_shape.append(_op.expand_dims(s, axis=0))
new_shape = _op.concatenate(new_shape, axis=0)
else:
new_shape = tmp_shape
return _op.transform.reshape(data, new_shape)
return _impl
def _pixel_shuffle(prelude):
def _impl(inputs, input_types):
data = inputs[0]
upscale_factor = inputs[1]
upscale_squared = upscale_factor * upscale_factor
b, c, h, w = _infer_shape(data)
assert (
c % upscale_squared == 0
), "input channel should be divisible by square of upscale_factor"
ndims = len(_infer_shape(data, prelude.mod))
axes = list(range(ndims))
num_inputs = len(inputs)
oc = c // upscale_squared
oh = h * upscale_factor
ow = w * upscale_factor
new_shape = [b, oc, upscale_factor, upscale_factor, h, w]
out_shape = [b, oc, oh, ow]
data = _op.transform.reshape(data, new_shape)
# The data will be transposed to
# [b, oc, h, upscale_factor, w, upscale_factor]
# for further reshape
axes = [0, 1, 4, 2, 5, 3]
data = _op.transform.transpose(data, axes)
return _op.transform.reshape(data, out_shape)
return _impl
def _clone():
def _impl(inputs, input_types):
data = inputs[0]
return _op.tensor.copy(data)
return _impl
def _log_softmax():
def _impl(inputs, input_types):
data = inputs[0]
axis = int(inputs[1])
return _op.nn.log_softmax(data, axis)
return _impl
def _sigmoid():
def _impl(inputs, input_types):
data = inputs[0]
return _op.tensor.sigmoid(data)
return _impl
def _softplus():
def _impl(inputs, input_types):
data = inputs[0]
dtype = input_types[0]
beta = _expr.const(float(inputs[1]), dtype=dtype)
return _op.log(_op.exp(inputs[0] * beta) + _expr.const(1.0, dtype=dtype)) / beta
return _impl
def _avg_pool2d(prelude):
def _impl(inputs, input_types):
data = inputs[0]
pool_size = inputs[1]
strides = inputs[2] if inputs[2] else pool_size
padding = inputs[3]
ceil_mode = int(inputs[4])
count_include_pad = int(inputs[5])
def func(x):
return _op.nn.avg_pool2d(
x,
pool_size=pool_size,
strides=strides,
padding=padding,
ceil_mode=ceil_mode,
count_include_pad=count_include_pad,
)
if _is_quantized_tensor(data, prelude):
return qnn_torch.apply_with_upcast(data, func)
return func(data)
return _impl
def _avg_pool3d():
def _impl(inputs, input_types):
data = inputs[0]
pool_size = inputs[1]
strides = inputs[2] if inputs[2] else pool_size
padding = inputs[3]
ceil_mode = int(inputs[4])
count_include_pad = int(inputs[5])
return _op.nn.avg_pool3d(
data,
pool_size=pool_size,
strides=strides,
padding=padding,
ceil_mode=ceil_mode,
count_include_pad=count_include_pad,
)
return _impl
def _dropout():
def _impl(inputs, input_types):
data = inputs[0]
rate = float(inputs[1])
return _op.nn.dropout(data, rate)
return _impl
def _reduce(name):
def _impl(inputs, input_types):
data = inputs[0]
axis = None
keepdims = False
if len(inputs) > 2: # default, torch have only data, axis=None, keepdims=False
if isinstance(inputs[1], int):
axis = int(inputs[1])
elif _is_int_seq(inputs[1]):
axis = inputs[1]
else:
axis = list(_infer_shape(inputs[1]))
keepdims = bool(inputs[2])
return get_relay_op(name)(data, axis=axis, keepdims=keepdims)
return _impl
def _norm():
def _impl(inputs, input_types):
data = inputs[0]
dtype = input_types[0]
axis = None
keepdims = False
if len(inputs) > 3:
axis = inputs[2]
keepdims = bool(inputs[3])
order = inputs[1]
if order == np.inf:
return _op.reduce.max(_op.abs(data), axis=axis, keepdims=keepdims)
elif order == np.NINF:
return _op.reduce.min(_op.abs(data), axis=axis, keepdims=keepdims)
else:
reci_order = _expr.const(1.0 / order, dtype=dtype)
order = _expr.const(order)
return _op.power(
_op.reduce.sum(_op.power(_op.abs(data), order), axis=axis, keepdims=keepdims),
reci_order,
)
return _impl
def _frobenius_norm():
def _impl(inputs, input_types):
data = inputs[0]
axis = None
keepdims = False
if len(inputs) > 2:
axis = inputs[1]
keepdims = bool(inputs[2])
return _op.sqrt(_op.reduce.sum((data * data), axis=axis, keepdims=keepdims))
return _impl
def _std():
def _impl(inputs, input_types):
data = inputs[0]
if len(inputs) == 2:
axis = None
keepdims = False
unbiased = bool(inputs[1])
else:
axis = inputs[1]
keepdims = bool(inputs[3])
unbiased = bool(inputs[2])
return _op.reduce.std(data, axis=axis, keepdims=keepdims, unbiased=unbiased)
return _impl
def _variance():
def _impl(inputs, input_types):
data = inputs[0]
if len(inputs) == 2:
axis = None
keepdims = False
unbiased = bool(inputs[1])
else:
axis = inputs[1]
keepdims = bool(inputs[3])
unbiased = bool(inputs[2])
return _op.reduce.variance(data, axis=axis, keepdims=keepdims, unbiased=unbiased)
return _impl
def _mean(prelude):
def _impl(inputs, input_types):
data = inputs[0]
if inputs[1]:
axis = inputs[1]
else:
axis = None
if len(inputs) > 2 and inputs[2]:
keepdims = int(inputs[2])
else:
keepdims = False
if len(inputs) > 3 and inputs[3]:
exclude = int(inputs[3])
else:
exclude = False
def func(x):
return _op.mean(x, axis, keepdims, exclude)
if _is_quantized_tensor(data, prelude):
assert len(inputs) == 6, "Input quant param not found in op inputs"
input_scale = _expr.const(inputs[4])
input_zero_point = _expr.const(inputs[5])
return qnn_torch.quantized_mean(data, input_scale, input_zero_point, func)
return func(data)
return _impl
def _chunk(prelude):
def _impl(inputs, input_types):
data = inputs[0]
num_chunks = int(inputs[1])
axis = int(inputs[2])
if isinstance(data, _expr.Expr):
inferred_shape = _infer_shape(data, prelude.mod)
shape = []
for infer in inferred_shape:
shape.append(infer)
dim = int(shape[axis])
if dim % num_chunks:
unif_size = int(dim / (num_chunks - 1))
else:
unif_size = int(dim / num_chunks)
chunks = []
for i in range(0, dim, unif_size):
begin = [0] * len(shape)
end = shape[:]
begin[axis] = i
end[axis] = i + unif_size
stride = [1] * len(shape)
chunk_out = _op.transform.strided_slice(data, begin=begin, end=end, strides=stride)
chunks.append(chunk_out)
if dim % num_chunks:
begin = [0] * len(shape)
end = shape[:]
begin[axis] = unif_size * (num_chunks - 1)
end[axis] = dim
stride = [1] * len(shape)
chunk_out = _op.transform.strided_slice(data, begin=begin, end=end, strides=stride)
chunks.append(chunk_out)
return chunks
return _impl
def _matmul(prelude):
def _impl(inputs, input_types):
inputs_0 = inputs[0]
inputs_1 = inputs[1]
# Need to check input shape as batch matmul must be supported.
a_shape = _infer_shape(inputs_0, prelude.mod)
b_shape = _infer_shape(inputs_1, prelude.mod)
# When performing a batch matmul, we need to properly handle N-dim shapes.
if len(a_shape) > 2 or len(b_shape) > 2:
# Convert a and b into 3 dimensional tensors.
a = _op.reshape(inputs_0, [-1, a_shape[-2], a_shape[-1]])
b = _op.reshape(inputs_1, [-1, b_shape[-2], b_shape[-1]])
# Broadcast b to match batch size of a
new_b_shape = list(_infer_shape(b, prelude.mod))
new_a_shape = _infer_shape(a, prelude.mod)
if new_a_shape[0] > new_b_shape[0]:
new_b_shape[0] = new_a_shape[0]
b = _op.broadcast_to(b, new_b_shape)
# Transpose matrix dimensions of b.
b = _op.transpose(b, [0, 2, 1])
# Perform a batch matmul.
output = _op.nn.batch_matmul(a, b)
# Reshape output to original dimensions.
return _op.reshape(output, [*a_shape[:-2], a_shape[-2], b_shape[-1]])
# Otherwise a simple dense op will get the job done.
if len(b_shape) == 1:
input_1 = _op.expand_dims(inputs_1, 0, 1)
else:
input_1 = _op.transpose(inputs_1, axes=(1, 0))
out = _op.nn.dense(inputs_0, input_1)
if len(b_shape) == 1:
out = _op.squeeze(out, axis=[-1])
return out
return _impl
def _expand():
def _impl(inputs, input_types):
data_in = inputs[0]
shape = list(_infer_shape(data_in))
ndims = len(shape)
sizes = inputs[1]
out = data_in
out_dims = len(sizes)
if ndims < out_dims:
num_newaxis = out_dims - ndims
out = _op.expand_dims(out, axis=0, num_newaxis=num_newaxis)
shape = [1] * num_newaxis + shape
for i in range(out_dims):
if sizes[i] != -1 and shape[i] == 1:
if not isinstance(sizes[i], int):
sizes[i] = int(_infer_value(sizes[i], {}).asnumpy())
out = _op.repeat(out, sizes[i], axis=i)
return out
return _impl
def _int():
def _impl(inputs, input_types):
if isinstance(inputs[0], _expr.Expr):
return inputs[0]
return int(inputs[0])
return _impl
def _identity():
def _impl(inputs, input_types):
return inputs[0]
return _impl
def _none():
def _impl(inputs, input_types):
return None
return _impl
def _pad(mode):
def _impl(inputs, input_types):
data = inputs[0]
if isinstance(inputs[1], list):
pad_list = inputs[1]
else:
pad_list = list(_infer_shape(inputs[1]))
# initialize paddings based on input len
pad_len = len(_infer_shape(data)) * 2
paddings = [0] * pad_len
if len(pad_list) >= 2:
paddings[-1] = pad_list[1]
paddings[-2] = pad_list[0]
if len(pad_list) >= 4:
paddings[-3] = pad_list[3]
paddings[-4] = pad_list[2]
if len(pad_list) >= 6:
paddings[-5] = pad_list[5]
paddings[-6] = pad_list[4]
# group into tuple of 2 ints
paddings = [paddings[i : i + 2] for i in range(0, len(paddings), 2)]
const_paddings = []
for pad in paddings:
const_paddings.append([])
for p in pad:
if not isinstance(p, int):
p = int(_infer_value(p, {}).asnumpy())
const_paddings[-1].append(p)
if mode == "constant":
return _op.nn.pad(data, const_paddings, pad_value=inputs[2], pad_mode=mode)
else:
return _op.nn.pad(data, const_paddings, pad_mode=mode)
return _impl
def _clamp():
def _impl(inputs, input_types):
data = inputs[0]
amin = inputs[1] if inputs[1] else np.finfo(np.float32).min
amax = inputs[2] if inputs[2] else np.finfo(np.float32).max
return _op.clip(data, amin, amax)
return _impl
def _to():
def _impl(inputs, input_types):
data = inputs[0]
dtype = inputs[1] if inputs[1] is not None and not isinstance(inputs[1], str) else inputs[2]
# special handling for aten::to(data, 6, _, _, _) case
# 6 means dtype = float
# this happens when converting upsampling with scale factor
cast_map = {
5: "float16",
6: "float32",
7: "float64",
3: "int32",
4: "int64",
}
cast_func = {5: float, 6: float, 7: float, 3: int, 4: int}
ret = data
if isinstance(data, _expr.Expr):
actual_dtype = str(_infer_type(data).checked_type.dtype)
if dtype in cast_map and cast_map[dtype] != actual_dtype:
ret = _op.cast(data, cast_map[dtype])
elif dtype in cast_map:
ret = cast_func[dtype](data)
return ret
return _impl
def _upsample(method, prelude):
def _impl(inputs, input_types):
out_size = []
for size in inputs[1]:
if not isinstance(size, int):
out_size.append(int(_infer_value(size, {}).asnumpy()))
else:
out_size.append(size)
data = inputs[0]
if len(inputs) > 2:
align_corners = inputs[2]
else:
align_corners = False
if method == "nearest_neighbor":
coord_trans = "asymmetric"
elif align_corners:
coord_trans = "align_corners"
else:
coord_trans = "half_pixel"
def func(x):
return _op.image.resize(x, out_size, "NCHW", method, coord_trans)
if _is_quantized_tensor(data, prelude):
# Torch version > 1.4 changed upsampling API
if _is_version_greater_than("1.4.0"):
num_inputs = 7
else:
num_inputs = 5
assert len(inputs) == num_inputs, "Input quant param not found in op inputs"
input_scale = _expr.const(inputs[-2])
input_zero_point = _expr.const(inputs[-1])
return qnn_torch.quantized_upsample(data, input_scale, input_zero_point, func)
return func(data)
return _impl
def _upsample3d(method):
def _impl(inputs, input_types):
if isinstance(inputs[1], _expr.Var):
out_size = _infer_shape(inputs[1])
elif _is_int_seq(inputs[1]):
out_size = inputs[1]
elif isinstance(inputs[1], list):
infer_res = [_infer_value(size, {}) for size in inputs[1]]
out_size = [np.asscalar(res.asnumpy().astype(np.int)) for res in infer_res]
data = inputs[0]
if len(inputs) > 2:
align_corners = inputs[2]
else:
align_corners = False
if method == "nearest_neighbor":
coord_trans = "asymmetric"
elif align_corners:
coord_trans = "align_corners"
else:
coord_trans = "half_pixel"
return _op.image.resize3d(data, out_size, "NCDHW", method, coord_trans)
return _impl
def _expand_as():
def _impl(inputs, input_types):
target = inputs[1]
t0 = _infer_type(inputs[0]).checked_type.dtype
t1 = _infer_type(inputs[1]).checked_type.dtype
if str(t0) != str(t1):
target = _op.cast(target, t0)
return _op.broadcast_to_like(inputs[0], target)
return _impl
def _Bool():
def _impl(inputs, input_types):
assert len(inputs) == 1
return inputs[0]
return _impl
def _Float():
def _impl(inputs, input_types):
assert len(inputs) == 1
return _op.cast(inputs[0], "float32")
return _impl
def _mm():
def _impl(inputs, input_types):
return _op.nn.dense(inputs[0], inputs[1])
return _impl
def _bitwise_not():
def _impl(inputs, input_types):
data = inputs[0]
# The input tensor must be of integral or Boolean types.
# For bool tensors, it computes the logical NOT
if input_types[0] == "bool":
out = _op.logical_not(_op.cast(data, "bool"))
else:
out = _op.bitwise_not(_op.cast(data, "int"))
return out
return _impl
def _bitwise_xor():
def _impl(inputs, input_types):
lhs = inputs[0]
rhs = inputs[1]
lhs = _op.cast(lhs, "bool") if input_types[0] == "bool" else _op.cast(lhs, "int")
rhs = _op.cast(rhs, "bool") if input_types[1] == "bool" else _op.cast(rhs, "int")
return _op.bitwise_xor(lhs, rhs)
return _impl
def _logical_not():
def _impl(inputs, input_types):
data = inputs[0]
return _op.logical_not(_op.cast(data, "bool"))
return _impl
def _logical_xor():
def _impl(inputs, input_types):
lhs = _op.cast(inputs[0], "bool")
rhs = _op.cast(inputs[1], "bool")
return _op.logical_xor(lhs, rhs)
return _impl
def _list_getitem(prelude):
def _impl(inputs, input_types):
return prelude.nth(inputs[0], _wrap_const(inputs[1]))
return _impl
def _list_len(prelude):
def _impl(inputs, input_types):
return prelude.length(inputs[0])
return _impl
def _type_as():
def _impl(inputs, input_types):
assert len(inputs) == 2
assert len(input_types) == 2
return _op.cast(inputs[0], input_types[1])
return _impl
def _gather():
def _impl(inputs, input_types):
data = inputs[0]
axis = inputs[1]
indices = inputs[2]
return _op.gather(data, axis, indices)
return _impl
def _add(prelude):
# add_ is overloaded for tensor add and list concat
def _impl(inputs, input_types):
if input_types[0] == "ListType":
return prelude.concat(inputs[0], inputs[1])
return _elemwise("add")(inputs, input_types)
return _impl
def _tensor_array_stack(prelude):
def _impl(inputs, input_types):
dim = inputs[1]
assert dim == 0, "stacking on a dynamic tensor list only supported on a first axis"
tensor_array, shape = _convert_to_tensor_array(inputs[0], prelude)
stacked_shape = (Any(),) + shape
stack = prelude.get_global_var_static("tensor_array_stack", "float32", shape)
stacked = stack(tensor_array)
static_tensor_array_ops = StaticTensorArrayOps(prelude, "float32", stacked_shape)
static_tensor_array_ops.register()
get_tensor = prelude.get_global_var_static("tensor_get_data", "float32", stacked_shape)
return get_tensor(stacked)
return _impl
def _stack(prelude):
def _impl(inputs, input_types):
if isinstance(inputs[0], list):
# a static python list of tensors
dim = inputs[1]
return _op.stack(inputs[0], dim)
else:
# List ADT case
assert isinstance(inputs[0], _expr.Expr)
ty = _infer_type_with_prelude(inputs[0], prelude)
list_ty = prelude.mod.get_global_type_var("List")
msg = "The input list is expected to be List ADT"
assert isinstance(ty, tvm.ir.TypeCall) and ty.func == list_ty, msg
return _tensor_array_stack(prelude)(inputs, input_types)
return _impl
def _rsub():
def _impl(inputs, input_types):
data0, data1 = _pytorch_promote_types(inputs[:2], input_types[:2])
# TODO (t-vi): should this also be part of the type promotion?
alpha = _expr.const(float(inputs[2]))
# note: rsub means data0 and data1 swap places
return get_relay_op("subtract")(data1, alpha * data0)
return _impl
def _embedding():
def _impl(inputs, input_types):
weight = inputs[0]
indices = inputs[1]
return _op.take(weight, indices.astype("int32"), axis=0)
return _impl
def _one_hot():
def _impl(inputs, input_types):
indices = inputs[0].astype("int32")
num_classes = inputs[1]
if num_classes == -1:
msg = "Inferring the number of classes is not yet supported."
raise NotImplementedError(msg)
dtype = "int32"
on_value = tvm.relay.const(1.0, dtype)
off_value = tvm.relay.const(0.0, dtype)
return _op.one_hot(indices, on_value, off_value, num_classes, -1, dtype)
return _impl
def _index():
def _impl(inputs, input_types):
data = inputs[0]
indices = inputs[1]
return _op.adv_index([data] + indices)
return _impl
def _meshgrid():
def _impl(inputs, input_types):
data = inputs[0]
return _op.meshgrid(data, indexing="ij")
return _impl
def _nms(prelude):
def _impl(inputs, input_types):
boxes = inputs[0]
scores = inputs[1]
iou_threshold = inputs[2]
# Generate data with shape (1, num_anchors, 5)
scores = AttrCvt(op_name="expand_dims", extras={"axis": -1, "num_newaxis": 1})([scores], {})
# Prepare input data for get_valid_counts
data = _op.concatenate([scores, boxes], -1)
data = _op.expand_dims(data, 0, 1)
# Leverage get_valid_counts to sort the data and clear invalid boxes
ct, data, indices = get_relay_op("get_valid_counts")(
data, score_threshold=-1.0, id_index=-1, score_index=0
)
# Perform Non-Maximum Suppression,
# PyTorch NMS doesn't have parameter top_k and max_output_size
score_index = 0
top_k = max_out_size = -1
nms_ret = get_relay_op("non_max_suppression")(
data=data,
valid_count=ct,
indices=indices,
max_output_size=max_out_size,
iou_threshold=iou_threshold,
force_suppress=True,
top_k=top_k,
coord_start=1,
score_index=score_index,
id_index=-1,
return_indices=True,
invalid_to_bottom=False,
)
# squeeze the two outputs of nms for strided_slice
size = get_relay_op("squeeze")(nms_ret[1], axis=[1])
data_slice = get_relay_op("squeeze")(nms_ret[0], axis=[0])
# strided slice to get the dynamic result
ret = get_relay_op("strided_slice")(
data_slice, begin=_expr.const([0]), end=size, slice_mode="size"
)
# in torchvision, indices from nms are int64
return _op.cast(ret, "int64")
return _impl
def _logsumexp():
def _impl(inputs, input_types):
data = _pytorch_promote_types(inputs[:1], input_types[:1])
dim_list = inputs[1]
keepdim = inputs[2] if len(inputs) > 2 else False
# dim is output of prim::ListConstruct, even if it is int in python code
assert isinstance(dim_list, list), "dim is expected to be a list"
return _op.logsumexp(data[0], axis=dim_list, keepdims=keepdim)
return _impl
def _roi_align(prelude):
def _impl(inputs, input_types):
data = inputs[0]
boxes = inputs[1]
output_size = (inputs[3], inputs[4])
spatial_scale = inputs[2]
sample_ratio = inputs[5]
aligned = False if len(inputs) < 7 else inputs[6]
if aligned:
boxes -= _expr.const(0.5 / spatial_scale)
return _op.vision.roi_align(data, boxes, output_size, spatial_scale, sample_ratio)
return _impl
def _unbind():
def _impl(inputs, input_types):
data = inputs[0]
dim = int(inputs[1])
ishapes = _infer_shape(data)
if dim >= len(ishapes):
msg = "Please check input dim, it shouldn't" "be greater than or equal to rank."
raise AttributeError(msg)
selections = ishapes[dim]
res_split = _op.split(data, selections, dim)
# squeeze each split piece to get same shape as aten::unbind
# TODO (yongwww): add new op to avoid the squeeze overhead
ret = []
for i in range(selections):
ret.append(_op.transform.squeeze(res_split[i], axis=[dim]))
ret = _expr.TupleWrapper(_expr.Tuple(ret), selections)
return ret
return _impl
def _shape_as_tensor(prelude):
def _impl(inputs, input_types):
is_symbolic_shape = False
input_shape = _infer_shape(inputs[0], prelude.mod)
for axis in input_shape:
if not isinstance(axis, (int, tvm.tir.IntImm)):
is_symbolic_shape = True
break
if is_symbolic_shape:
ret = _op.shape_of(inputs[0], dtype="int64")
else:
ret = _expr.const(np.array(input_shape), dtype="int64")
return ret
return _impl
def _logical_and():
def _impl(inputs, input_types):
lhs = _op.cast(inputs[0], "bool")
rhs = _op.cast(inputs[1], "bool")
return _op.logical_and(lhs, rhs)
return _impl
def _nonzero(is_numpy_style):
def _impl(inputs, input_types):
data = inputs[0]
ret = _op.transform.argwhere(data)
if is_numpy_style or (len(inputs) > 1 and inputs[1]):
return _unbind()([ret, 1], None)
return ret
return _impl
def _scatter():
def _impl(inputs, input_types):
data = inputs[0]
axis = int(inputs[1])
index = inputs[2]
src = inputs[3]
return _op.transform.scatter(data, index, src, axis)
return _impl
def _scalar_tensor():
def _impl(inputs, input_types):
data = inputs[0]
cast_map = {
6: "float32",
7: "float64",
3: "int32",
4: "int64",
}
type_key = inputs[1]
if isinstance(data, _expr.Constant):
data = data.data.asnumpy().tolist()
return _expr.const(data, cast_map[type_key])
return _impl
def _interpolate():
def _impl(inputs, input_types):
if isinstance(inputs[1], _expr.Expr):
out_size = inputs[1]
elif isinstance(inputs[1], list):
out_size = []
for i in [0, 1]:
size, _ = try_infer_value(
inputs[1][i],
lambda ret: ret.astype(np.int),
lambda: _op.expand_dims(inputs[1][i], axis=0),
)
out_size.append(size)
out_size = _op.concatenate(out_size, axis=0)
data = inputs[0]
align_corners = inputs[4]
method = inputs[3]
if method.startswith("nearest"):
method = "nearest_neighbor"
if method == "nearest_neighbor":
coord_trans = "asymmetric"
elif align_corners:
coord_trans = "align_corners"
else:
coord_trans = "half_pixel"
return _op.image.resize(data, out_size, "NCHW", method, coord_trans)
return _impl
def _numel():
def _impl(inputs, input_types):
return _op.ndarray_size(inputs[0])
return _impl
def _empty():
def _impl(inputs, input_types):
shape = inputs[0]
return _op.zeros(shape, _convert_dtype_value(inputs[1]))
return _impl
def _pytorch_result_type(dtypes, non_tensor_inputs):
"""This promotes TVM dtypes like PyTorch would"""
import torch
dtype_map = {
"float64": torch.float64,
"float32": torch.float32,
"float16": torch.float16,
"bfloat16": torch.bfloat16,
"int64": torch.int64,
"int32": torch.int32,
"int16": torch.int16,
"int8": torch.int8,
"uint8": torch.uint8,
"bool": torch.bool,
}
if len(dtypes) > 0:
result_type = dtypes[0]
for dt in dtypes[1:]:
if dt != result_type: # we don't want to work with same types as we
# don't do quantized here (which cannot be promoted?)
result_type = _convert_data_type(
str(
torch.result_type(
torch.zeros((), dtype=dtype_map[result_type]),
torch.zeros((), dtype=dtype_map[dt]),
)
)
)
else:
result_type = "bool" # this is the smallest type...
for inp in non_tensor_inputs:
result_type = _convert_data_type(
str(torch.result_type(torch.zeros((), dtype=dtype_map[result_type]), inp))
)
return result_type
def _pytorch_promote_types(inputs, dtypes):
"""This promotes TVM inputs with TVM dtypes passed like PyTorch would"""
actual_dtypes = []
for i, inp in enumerate(inputs):
if isinstance(inp, _expr.Expr):
idt = _infer_type(inp).checked_type.dtype
actual_dtypes.append(idt)
else:
actual_dtypes.append(dtypes[i])
dtypes = actual_dtypes
tensor_dtypes = [dt for inp, dt in zip(inputs, dtypes) if not np.isscalar(inp)]
non_tensor_inputs = [inp for inp in inputs if np.isscalar(inp)]
result_type = _pytorch_result_type(tensor_dtypes, non_tensor_inputs)
results = []
for inp, dt in zip(inputs, dtypes):
if np.isscalar(inp):
results.append(_expr.const(inp, dtype=result_type))
elif dt == result_type:
results.append(inp)
else:
results.append(_op.cast(inp, result_type))
return results
# Helper functions for operator implementation
def _convert_dtype_value(val):
"""converts a PyTorch the PyTorch numeric type id to a torch scalar type."""
convert_torch_dtype_map = {
7: "torch.float64",
6: "torch.float32",
5: "torch.float16",
4: "torch.int64",
3: "torch.int32",
2: "torch.int16",
1: "torch.int8",
0: "torch.unit8",
None: "torch.int64",
} # Default is torch.int64
if val in convert_torch_dtype_map:
return _convert_data_type(convert_torch_dtype_map[val])
else:
msg = "Torch data type value %d is not handled yet." % (val)
raise NotImplementedError(msg)
def _convert_data_type(input_type, default_dtype=None):
"""converts the PyTorch scalar type input_type to a TVM dtype.
optionally, default_dtype can be a TVM dtype that is used
if input_type is None (but not when it is unknown)"""
if input_type is None and default_dtype is not None:
return default_dtype
input_type = input_type.lower()
if input_type in ["double", "float64", "torch.float64"]:
return "float64"
elif input_type in ["float", "float32", "torch.float32"]:
return "float32"
elif input_type in ["half", "float16", "torch.float16"]:
return "float16"
elif input_type in ["long", "int64", "torch.int64"]:
return "int64"
elif input_type in ["int", "int32", "torch.int32"]:
return "int32"
elif input_type in ["short", "int16", "torch.int16"]:
return "int16"
elif input_type in ["char", "int8", "torch.int8"]:
return "int8"
elif input_type in ["byte", "uint8", "torch.uint8"]:
return "uint8"
elif input_type in ["quint8", "torch.quint8"]:
return "quint8"
elif input_type in ["qint8", "torch.qint8"]:
return "qint8"
elif input_type in ["qint32", "torch.qint32"]:
return "qint32"
elif input_type in ["bool", "torch.bool"]:
return "bool"
elif input_type in ["str"]:
return "str"
else:
raise NotImplementedError("input_type {} is not handled yet".format(input_type))
return "float32" # Never reached
def _create_typed_const(data, dtype):
"""create a (scalar) constant of given value and dtype.
dtype should be a TVM dtype"""
if dtype == "float64":
typed_data = _expr.const(np.float64(data), dtype=dtype)
elif dtype == "float32":
typed_data = _expr.const(np.float32(data), dtype=dtype)
elif dtype == "float16":
typed_data = _expr.const(np.float16(data), dtype=dtype)
elif dtype == "int64":
typed_data = _expr.const(np.int64(data), dtype=dtype)
elif dtype == "int32":
typed_data = _expr.const(np.int32(data), dtype=dtype)
elif dtype == "int16":
typed_data = _expr.const(np.int16(data), dtype=dtype)
elif dtype == "int8":
typed_data = _expr.const(np.int8(data), dtype=dtype)
elif dtype == "uint8":
typed_data = _expr.const(np.uint8(data), dtype=dtype)
else:
raise NotImplementedError("input_type {} is not handled yet".format(dtype))
return typed_data
def _wrap_const(c):
if not isinstance(c, (_expr.Expr, list, tvm.tir.expr.Any)):
return _expr.const(c)
return c
# Operator mappings
def _get_convert_map(prelude, default_dtype):
convert_map = {
"aten::pixel_shuffle": _pixel_shuffle(prelude),
"aten::device": _none(),
"prim::device": _none(),
"aten::sub": _elemwise("subtract"),
"aten::sub_": _elemwise("subtract"),
"aten::max": _max(),
"aten::min": _min(),
"aten::mul": _elemwise("multiply"),
"aten::mul_": _elemwise("multiply"),
"aten::pow": _elemwise("power"),
"aten::arange": _arange(),
"aten::meshgrid": _meshgrid(),
"aten::div": _elemwise("divide"),
"aten::div_": _elemwise("divide"),
"aten::floor_divide": _elemwise("floor_divide"),
"aten::true_divide": _elemwise("divide"),
"aten::addcdiv": _addcdiv(),
"aten::addcmul": _addcmul(),
"aten::ones": _ones(default_dtype),
"aten::ones_like": _ones_like(default_dtype),
"aten::zeros": _zeros(default_dtype),
"aten::zeros_like": _zeros_like(default_dtype),
"aten::full": _full(default_dtype),
"aten::full_like": _full_like(default_dtype),
"aten::linspace": _linspace(),
"aten::reciprocal": _reciprocal(),
"aten::repeat": _repeat(),
"aten::repeat_interleave": _repeat_interleave(),
"aten::to": _to(),
"aten::squeeze": _squeeze(),
"aten::unsqueeze": _unsqueeze(),
"aten::cat": _concatenate(prelude),
"aten::slice": _slice(),
"aten::split": _split(),
"aten::split_with_sizes": _split_with_sizes(),
"aten::select": _select(),
"aten::take": _take(),
"aten::where": _where(),
"aten::topk": _topk(),
"aten::relu": _relu(prelude),
"aten::relu_": _relu(prelude),
"aten::prelu": _prelu(),
"aten::leaky_relu": _leaky_relu(),
"aten::leaky_relu_": _leaky_relu(),
"aten::elu": _elu(),
"aten::elu_": _elu(),
"aten::celu": _celu(),
"aten::gelu": _gelu(),
"aten::selu": _selu(),
"aten::log_sigmoid": _log_sigmoid(),
"aten::adaptive_avg_pool2d": _adaptive_avg_pool_2d(prelude),
"aten::adaptive_max_pool2d": _adaptive_max_pool_2d(),
"aten::max_pool2d": _maxpool_2d(),
"aten::max_pool2d_with_indices": _maxpool_2d_with_indices(),
"aten::max_pool1d": _maxpool_1d(),
"aten::max_pool3d": _maxpool_3d(),
"aten::hardtanh": _hardtanh(),
"aten::hardtanh_": _hardtanh(),
"aten::_convolution": _convolution(),
"aten::softmax": _softmax(),
"aten::threshold": _threshold(),
"aten::threshold_": _threshold(),
"aten::contiguous": _contiguous(),
"aten::batch_norm": _batch_norm(),
"aten::instance_norm": _instance_norm(),
"aten::layer_norm": _layer_norm(),
"aten::group_norm": _group_norm(),
"aten::transpose": _transpose(prelude),
"aten::transpose_": _transpose(prelude),
"aten::t": _transpose(prelude),
"aten::flatten": _flatten(),
"aten::addmm": _addmm(),
"aten::size": _size(prelude),
"aten::view": _view(),
"aten::reshape": _reshape(),
"aten::clone": _clone(),
"aten::log_softmax": _log_softmax(),
"aten::sigmoid": _sigmoid(),
"aten::softplus": _softplus(),
"aten::avg_pool2d": _avg_pool2d(prelude),
"aten::avg_pool3d": _avg_pool3d(),
"aten::dropout": _dropout(),
"aten::dropout_": _dropout(),
"aten::feature_dropout": _dropout(),
"aten::alpha_dropout": _dropout(),
"aten::mean": _mean(prelude),
"aten::chunk": _chunk(prelude),
"aten::matmul": _matmul(prelude),
"aten::bmm": _matmul(prelude),
"aten::expand": _expand(),
"aten::Int": _int(),
"prim::NumToTensor": _numtotensor(),
"prim::ImplicitTensorToNum": _tensortonum(),
"aten::ScalarImplicit": _tensortonum(),
"aten::constant_pad_nd": _pad("constant"),
"aten::reflection_pad1d": _pad("reflect"),
"aten::reflection_pad2d": _pad("reflect"),
"aten::replication_pad1d": _pad("edge"),
"aten::replication_pad2d": _pad("edge"),
"aten::replication_pad3d": _pad("edge"),
"aten::permute": _transpose(prelude),
"aten::sum": _reduce("sum"),
"aten::prod": _reduce("prod"),
"aten::argmin": _reduce("argmin"),
"aten::argmax": _reduce("argmax"),
"aten::norm": _norm(),
"aten::frobenius_norm": _frobenius_norm(),
"aten::std": _std(),
"aten::var": _variance(),
"aten::abs": _unary("abs"),
"aten::neg": _unary("negative"),
"aten::cos": _unary("cos"),
"aten::cosh": _unary("cosh"),
"aten::sin": _unary("sin"),
"aten::sinh": _unary("sinh"),
"aten::tan": _unary("tan"),
"aten::tanh": _unary("tanh"),
"aten::acos": _unary("acos"),
"aten::asin": _unary("asin"),
"aten::atan": _unary("atan"),
"aten::log": _unary("log"),
"aten::log2": _unary("log2"),
"aten::log10": _unary("log10"),
"aten::log1p": _log1p(),
"aten::exp": _unary("exp"),
"aten::erf": _unary("erf"),
"aten::trunc": _unary("trunc"),
"aten::sign": _unary("sign"),
"aten::sqrt": _unary("sqrt"),
"aten::rsqrt": _unary("rsqrt"),
"aten::ceil": _unary("ceil"),
"aten::floor": _unary("floor"),
"aten::round": _unary("round"),
"aten::isfinite": _unary("isfinite"),
"aten::isinf": _unary("isinf"),
"aten::isnan": _unary("isnan"),
"aten::clamp": _clamp(),
"aten::clamp_": _clamp(),
"aten::detach": _identity(),
"aten::upsample_bilinear2d": _upsample("bilinear", prelude),
"aten::upsample_nearest2d": _upsample("nearest_neighbor", prelude),
"aten::upsample_trilinear3d": _upsample3d("trilinear"),
"aten::upsample_nearest3d": _upsample3d("nearest_neighbor"),
"aten::expand_as": _expand_as(),
"aten::lt": _elemwise("less"),
"aten::gt": _elemwise("greater"),
"aten::le": _elemwise("less_equal"),
"aten::ge": _elemwise("greater_equal"),
"aten::ne": _elemwise("not_equal"),
"aten::eq": _elemwise("equal"),
"aten::logical_not": _logical_not(),
"aten::logical_xor": _logical_xor(),
"aten::bitwise_not": _bitwise_not(),
"aten::bitwise_xor": _bitwise_xor(),
"aten::Bool": _Bool(),
"aten::Float": _Float(),
"aten::adaptive_avg_pool3d": _adaptive_avg_pool_3d(),
"aten::adaptive_max_pool3d": _adaptive_max_pool_3d(),
"aten::rsub": _rsub(),
"aten::embedding": _embedding(),
"aten::one_hot": _one_hot(),
"aten::mm": _matmul(prelude),
"aten::add": _add(prelude),
"aten::add_": _add(prelude),
"aten::stack": _stack(prelude),
"aten::__getitem__": _list_getitem(prelude),
"aten::len": _list_len(prelude),
"aten::type_as": _type_as(),
"aten::gather": _gather(),
"aten::index_select": _select(),
"aten::index": _index(),
"torchvision::nms": _nms(prelude),
"aten::logsumexp": _logsumexp(),
"torchvision::roi_align": _roi_align(prelude),
"aten::unbind": _unbind(),
"aten::__and__": _logical_and(),
"aten::_shape_as_tensor": _shape_as_tensor(prelude),
"aten::nonzero": _nonzero(False),
"aten::nonzero_numpy": _nonzero(True),
"aten::scatter": _scatter(),
"aten::scalar_tensor": _scalar_tensor(),
"aten::__interpolate": _interpolate(),
"aten::IntImplicit": _identity(),
"aten::tensor": _identity(), # used for example in tensor(1.0)
"aten::numel": _numel(),
"aten::empty": _empty(),
}
return convert_map
def _run_jit_passes(graph):
""" The inline pass is necessary to unwrap prim::CallMethod """
import torch
if _is_version_greater_than("1.5.0"):
# This is required for torchvision detection models from 1.6 above
# It is the same as _jit_pass_inline, except that it has some special
# case behaviors for some ops such as aten::__interpolate()
torch._C._jit_pass_onnx_function_substitution(graph)
else:
torch._C._jit_pass_inline(graph)
def _get_tensor_and_var(torch_tensor, name):
tensor = tvm.nd.array(torch_tensor.cpu().numpy())
var = _expr.var(name, shape=tensor.shape, dtype=tensor.dtype)
return tensor, var
def _get_output_name(node):
assert node.outputsSize() == 1
return node.output().debugName()
def _get_output_names(node):
return [output.debugName() for output in node.outputs()]
def _get_input_names(node_or_graph):
return [inp.debugName() for inp in node_or_graph.inputs()]
def _get_op_inputs(op_node, outputs):
return [outputs[name] for name in _get_input_names(op_node)]
def _get_node_type(node):
assert node.outputsSize() == 1
return node.output().type().kind()
def _get_uses(node):
uses = []
for output in node.outputs():
uses += output.uses()
return uses
def _get_users(node):
return [use.user for use in _get_uses(node)]
def _report_missing_conversion(op_names, convert_map):
""" Check if all ops in an input graph are supported by TVM """
known_ops = [
"prim::Constant",
"prim::GetAttr",
"prim::ListConstruct",
"prim::ListUnpack",
"prim::TupleConstruct",
"prim::TupleUnpack",
"prim::If",
"prim::Loop",
]
known_ops += list(convert_map.keys())
known_ops += list(qnn_torch.convert_map.keys())
missing = [op_name for op_name in op_names if op_name not in known_ops]
if missing:
msg = "The following operators are not implemented: {}".format(missing)
raise NotImplementedError(msg)
def _getattr_attr_name(node):
attribute_names = node.attributeNames()
assert len(attribute_names) == 1
attr_name = node.s(attribute_names[0])
return attr_name
def _getattr_full_name(getattrs):
return ".".join([_getattr_attr_name(node) for node in getattrs])
def _get_pytorch_value_type(typ, default_dtype="float32"):
kind = typ.kind()
if kind == "TensorType":
if typ.scalarType() is None:
# Tensor's type can be unknown if we use torch.jit.script(...)
# Defaults can be passed in, if not it is float32
logging.warning("Untyped Tensor found, assume it is %s", default_dtype)
return default_dtype
else:
return _convert_data_type(typ.scalarType())
elif kind == "ListType":
return "ListType"
elif kind in ["IntType", "FloatType", "BoolType", "StringType", "OptionalType"]:
pt_dtype = str(typ).lower()
dtype = pt_dtype if pt_dtype == "OptionalType" else _convert_data_type(pt_dtype)
return dtype
else:
return "UnsupportedType"
def _get_input_types(op_node, outputs, default_dtype="float32"):
"""Returns a TVM dtype for each input nodes derived from the torch type"""
in_types = []
for inp in op_node.inputs():
if inp.node().kind() == "prim::GetAttr":
# GetAttr nodes always return None when we call scalarType() on it
name = inp.debugName()
assert name in outputs
if isinstance(outputs[name], _expr.Var):
in_types.append(outputs[name].type_annotation.dtype)
else:
# For quantized modules with parameters, here we would get
# "prim::GetAttr[name="_packed_params"]". Since the dtype corresponding to
# _packed_params is not needed by quantized ops, we return an arbitrary type.
in_types.append(default_dtype)
else:
in_types.append(_get_pytorch_value_type(inp.type(), default_dtype=default_dtype))
return in_types
def _get_constant(node):
""" Retrieve a constant associated with this prim::Constant node """
attribute_names = node.attributeNames()
num_attributes = len(attribute_names)
if num_attributes == 1:
attr_name = attribute_names[0]
ty = node.output().type().kind()
if ty == "IntType":
return node.i(attr_name)
elif ty == "BoolType":
return bool(node.i(attr_name))
elif ty in ["FloatType", "LongType"]:
return node.f(attr_name)
elif ty in ["TensorType", "CompleteTensorType"]:
tensor = node.t(attr_name)
if tensor.is_cuda:
tensor = tensor.cpu()
if len(tensor.shape) == 0: # tensor(0.1)
# TODO(t-vi): When is this needed?
return tensor.item()
return _wrap_const(tensor.numpy())
elif ty in ["DeviceObjType", "StringType"]:
return node.s(attr_name)
elif ty == "FunctionType":
return None
else:
raise NotImplementedError("Unsupported type: %s" % ty)
else:
assert num_attributes == 0
return None
def _get_operator_nodes(nodes):
""" Returns torch IR nodes that need conversion to Relay """
ops = []
# Traverse nodes and add to graph
for node in nodes:
if node.outputsSize() > 1:
node_name = "_".join(_get_output_names(node))
else:
node_name = _get_output_name(node)
if node.kind() != "prim::GetAttr":
ops.append((node_name, node))
return ops
def _get_relay_input_vars(graph, input_infos, prelude, is_module=True, default_dtype="float32"):
"""
Return Relay vars from input shapes and create entries based on
expected graph inputs - to allow translation
"""
graph_inputs = list(graph.inputs())
if is_module:
# a module has "self" as first input, which we do not need/want
graph_inputs = graph_inputs[1:]
if not isinstance(input_infos, list):
msg = "Graph inputs input_infos should be a list"
raise RuntimeError(msg)
if len(graph_inputs) != len(input_infos):
msg = "PyTorch has {} inputs and input_infos lists {}.".format(
len(graph_inputs), len(input_infos)
)
raise RuntimeError(msg)
def get_relay_ty(ishape, itype, pt_type):
if pt_type.kind() == "TensorType":
if not (_is_int_seq(ishape) or len(ishape) == 0):
msg = "Shape for Tensors must be lists of ints"
raise RuntimeError(msg)
if (pt_type.dim() is not None and pt_type.dim() != len(ishape)) or (
pt_type.sizes() is not None
and any([s1 != s2 for s1, s2 in zip(pt_type.sizes(), ishape)])
):
msg = "Shapes of input list and information in the graph do not match"
raise RuntimeError(msg)
pt_dtype = pt_type.scalarType()
if not pt_dtype and itype:
pt_dtype = itype
dtype = _convert_data_type(pt_dtype, default_dtype=default_dtype)
return TensorType(ishape, dtype)
elif pt_type.kind() == "TupleType":
if not isinstance(ishape, tuple):
msg = "Shapes for tuples must be tuples"
raise RuntimeError(msg)
return TupleType(
[get_relay_ty(elem, itype, pt_t) for elem, pt_t in zip(ishape, pt_type.elements())]
)
elif pt_type.kind() == "ListType":
if not isinstance(ishape, list):
msg = "Shapes for lists must be lists"
raise RuntimeError(msg)
pt_elemtype = pt_type.getElementType()
elem_tys = [get_relay_ty(s, itype, pt_elemtype) for s in ishape]
if len(elem_tys) > 0 and not all(map(lambda ty: ty == elem_tys[0], elem_tys)):
msg = "List elements need have identical types"
raise RuntimeError(msg)
rlist, _, _ = prelude.mod.get_type("List")
return rlist(elem_tys[0])
elif pt_type.kind() == "OptionalType":
# we do not support None yet, so we fill in the type
return get_relay_ty(ishape, itype, pt_type.getElementType())
# TODO: scalar inputs
raise NotImplementedError("unsupported input type")
input_vars = {}
new_input_infos = []
for num, inp in enumerate(input_infos):
if not isinstance(inp, tuple):
msg = "Graph input {} is not a tuple".format(num)
raise RuntimeError(msg)
if len(inp) != 2 or not isinstance(inp[0], str):
msg = (
"Graph input {} is not valid,"
" expected ('name', shape) or ('name', (shape, dtype))".format(inp)
)
raise RuntimeError(msg)
if not isinstance(inp[1], tuple) or len(inp[1]) == 0 or not isinstance(inp[1][-1], str):
new_input_infos.append((inp[0], (inp[1], default_dtype)))
else:
new_input_infos.append(inp)
input_types = [
(name, get_relay_ty(info[0], info[1], gi.type()))
for (name, info), gi in zip(new_input_infos, graph_inputs)
]
ir_inputs = [i.debugName() for i in graph_inputs]
for ir_input, (name, itype) in zip(ir_inputs, input_types):
inp = _expr.var(name, type_annotation=itype)
# Translate from graph input to user input name
input_vars[ir_input] = inp
return input_vars
def _unpack_tuple(tup):
def unpack(tup, num_fields):
return [_expr.TupleGetItem(tup, i) for i in range(num_fields)]
if isinstance(tup, _expr.Tuple):
return unpack(tup, len(tup.fields))
elif isinstance(tup.type_annotation, TupleType):
return unpack(tup, len(tup.type_annotation.fields))
# shouldn't happen
assert False
def _get_free_vars_from_block(block):
block_inp_names = _get_input_names(block)
bound_names = block_inp_names
free_vars = set()
for node in block.nodes():
inp_names = _get_input_names(node)
list_diff = [name for name in inp_names if name not in bound_names]
free_vars.update(list_diff)
bound_names += _get_output_names(node)
return free_vars
def get_use_chains(root_node, terminate=lambda _: False):
"""
Track a chain of users of this node forward, returning a list of chains
See get_attr_chains below for its usage
"""
def concat_lists(lists):
return itertools.chain.from_iterable(lists)
def inner(current, accum):
users = _get_users(current)
if not users or terminate(users):
return [accum]
return concat_lists([inner(nxt, accum + [nxt]) for nxt in users])
return inner(root_node, [root_node])
def get_attr_chains(root_getattr_node):
"""Returns chains of attribute access starting from root_getattr_node
For example, given attribute "block", as in "self.block" when "self" points
to the top level torch.nn.Module, it returns lists of attribute "chains",
e.g. ['block', '2'], ['block', '1'], ['block', '0', '_packed_params']
These sets of attributes form full attribute accessors. For example,
"self.block.1", "self.block.2" will return the second and third submodule,
and "self.block.0._packed_params" will return the parameters of the first
submodule.
"""
def terminate(users):
next_attrs = [user for user in users if user.kind() == "prim::GetAttr"]
return len(next_attrs) == 0
return get_use_chains(root_getattr_node, terminate)
def convert_params(graph, state_dict):
"""
Return Relay vars and TVM NDArrays for input parameters
A chain of prim::GetAttr nodes is processed one at a time
"""
getattr_nodes = graph.findAllNodes("prim::GetAttr", recurse=True)
params = {}
param_tensors = {}
packed_param_map = {}
vars_by_name = {}
seen = set()
for node in getattr_nodes:
if _get_output_name(node) in seen:
continue
for getattrs in get_attr_chains(node):
seen.update(map(_get_output_name, getattrs))
full_attr = _getattr_full_name(getattrs)
full_attr_node_name = _get_output_name(getattrs[-1])
if full_attr.endswith("_packed_params"): # for quantized models
err_msg = "parameter %s not found in state dict" % full_attr
assert full_attr in state_dict, err_msg
packed_param_map[full_attr_node_name] = full_attr
elif full_attr in state_dict:
if full_attr in vars_by_name:
var = vars_by_name[full_attr]
else:
torch_tensor = state_dict[full_attr]
tensor, var = _get_tensor_and_var(torch_tensor, full_attr)
param_tensors[full_attr] = tensor
vars_by_name[full_attr] = var
params[full_attr_node_name] = var
return params, param_tensors, packed_param_map
def convert_block(block, outputs, convert_map, prelude, default_dtype="float32"):
""" Translate Torch "Block", used for prim::If and prim::Loop """
ops = _get_operator_nodes(block.nodes())
ret_names = _get_input_names(block.returnNode())
return convert_operators(
ops, outputs, ret_names, convert_map, prelude, default_dtype=default_dtype
)
def convert_if(if_node, outputs, convert_map, prelude, default_dtype="float32"):
""" Translate Torch prim::If to Relay If """
cond = outputs[if_node.inputsAt(0).debugName()]
blocks = list(if_node.blocks())
true_branch = convert_block(
blocks[0], outputs, convert_map, prelude, default_dtype=default_dtype
)
false_branch = convert_block(
blocks[1], outputs, convert_map, prelude, default_dtype=default_dtype
)
assert len(true_branch) == 1 and len(false_branch) == 1
return _expr.If(cond, true_branch[0], false_branch[0])
def convert_loop(loop_node, outputs, convert_map, prelude):
""" Translate Torch prim::Loop to Relay while_loop """
def get_input(index):
ivalue = loop_node.inputsAt(index)
inode = ivalue.node()
if inode.kind() == "prim::Constant":
return _expr.const(_get_constant(inode))
var_name = ivalue.debugName()
assert var_name in outputs
return _wrap_const(outputs[var_name])
# Refer to the spec for prim::Loop below
# https://github.com/pytorch/pytorch/blob/master/torch/csrc/jit/OVERVIEW.md#loops
# The first input: %max_trip_count
# The second input: %initial_condition
# The rest of input: loop variables
max_loop_count = get_input(0)
init_cond = get_input(1)
num_loop_var = len(list(loop_node.inputs())) - 2
init_vals = [get_input(i + 2) for i in range(num_loop_var)]
# while loop has always max_loop_count being int64 max
# max_loop_count.data (tvm.runtime.NDArray) is -1, so _get_constant again
is_while_loop = (
isinstance(max_loop_count, _expr.Constant)
and _get_constant(loop_node.inputsAt(0).node()) == sys.maxsize
)
if is_while_loop:
loop_iter_dtype = "bool"
# while loop with non input dependent condition such as while i < 10:
# init_cond is int, need to cast to bool to type check
if isinstance(init_cond, _expr.Constant):
init_cond = _op.cast(init_cond, "bool")
init_loop_iter_val = init_cond
else:
loop_iter_dtype = "int32"
# always count from 0
init_loop_iter_val = _expr.const(0, dtype="int32")
body_block = list(loop_node.blocks())[0]
block_input_names = _get_input_names(body_block)
num_block_inputs = len(block_input_names)
name_val_pairs = list(zip(block_input_names, [init_loop_iter_val] + init_vals))
outputs.update(name_val_pairs)
def get_var(name, val):
if val:
checked_type = _infer_type_with_prelude(val, prelude)
if hasattr(checked_type, "shape"):
shape = get_const_tuple(checked_type.shape)
actual_shape = []
for dim in shape:
if isinstance(dim, int) and dim == 0:
actual_shape.append(Any())
else:
actual_shape.append(dim)
return _expr.var(name, shape=actual_shape, dtype=checked_type.dtype)
else:
return _expr.var(name, type_annotation=checked_type)
return _expr.var(name)
loop_iter_var = _expr.var(block_input_names[0], shape=(), dtype=loop_iter_dtype)
loop_vars = [get_var(name, val) for name, val in name_val_pairs[1:]]
# Add non constant free variables to loop variables to prevent code blow up
# Without this, if there are two for loops in a row, which often happens
# if the outer loop is unrolled, the computation corresponding to the first for loop
# is inlined inside loop body, turning O(N) + O(N) computation into O(N^2).
# This issue was found when converting from Stacked LSTM test. Torch does not add the output
# of the eariler loop into loop variables of the next loop.
# So the variable corresponding to the first loop output appears free in the second loop body.
free_vars = [
var
for var in _get_free_vars_from_block(body_block)
if var in outputs
and not isinstance(outputs[var], (_expr.Constant, int, float, str))
and outputs[var]
]
prev_outputs = {}
for name in free_vars:
prev_output = outputs[name]
new_loop_var = get_var(name, prev_output)
prev_outputs[name] = prev_output
outputs[name] = new_loop_var
loop_vars.append(new_loop_var)
init_vals.append(prev_output)
def cond(*current_vals):
i = current_vals[0]
if is_while_loop:
return _op.equal(i, _expr.const(True, "bool"))
return _op.less(i, max_loop_count)
def body(*current_vals):
# Update loop variables using the prev iteration outputs
assert len(current_vals) == num_block_inputs + len(free_vars)
for (i, val) in enumerate(current_vals):
if i < num_block_inputs:
outputs[block_input_names[i]] = val
else:
outputs[free_vars[i - num_block_inputs]] = val
block_outputs = convert_block(body_block, outputs, convert_map, prelude)
block_outputs += [outputs[name] for name in free_vars]
if not is_while_loop:
# iter var increment implicit in torch, so do it manually
# for while loop, block_outputs[0] is already a boolean,
# the result of termination check
incr = _expr.const(1, dtype="int32")
block_outputs[0] = current_vals[0] + incr
return block_outputs
loop = while_loop(cond, [loop_iter_var] + loop_vars, body)
loop_val = loop(init_loop_iter_val, *init_vals)
# restore original output values for free vars
outputs.update(prev_outputs)
# The first element is a loop counter or boolean condition, ignore it
return [_expr.TupleGetItem(loop_val, i + 1) for i in range(num_loop_var)]
def convert_operators(operators, outputs, ret_names, convert_map, prelude, default_dtype="float32"):
""" Convert each Torch IR operators to Relay equivalent """
for node_name, op_node in operators:
operator = op_node.kind()
inputs = _get_op_inputs(op_node, outputs)
if operator == "prim::Constant":
outputs[node_name] = _get_constant(op_node)
elif operator == "prim::ListConstruct" and _should_construct_dynamic_list(op_node):
outputs[node_name] = _convert_to_list_adt(inputs, prelude)
elif operator == "prim::ListConstruct":
# This assumes that no more elements will be appended to this list
# In this case, we keep the Python list
outputs[node_name] = inputs
elif operator == "prim::TupleConstruct":
outputs[node_name] = _expr.Tuple(inputs)
elif operator in ["prim::ListUnpack", "prim::TupleUnpack"]:
assert len(inputs) == 1
if isinstance(inputs[0], (list, _expr.TupleWrapper)):
unpacked = inputs[0]
else:
unpacked = _unpack_tuple(inputs[0])
outputs.update(zip(_get_output_names(op_node), unpacked))
elif operator == "prim::If":
if_out = convert_if(op_node, outputs, convert_map, prelude, default_dtype=default_dtype)
outputs[node_name] = if_out
elif operator == "prim::Loop":
loop_out = convert_loop(op_node, outputs, convert_map, prelude)
unpacked_names = _get_output_names(op_node)
assert len(loop_out) == len(unpacked_names)
outputs.update(zip(unpacked_names, loop_out))
else:
relay_op = convert_map[operator]
relay_out = relay_op(
inputs, _get_input_types(op_node, outputs, default_dtype=default_dtype)
)
if isinstance(relay_out, tuple):
# This is for torch operators that return multiple outputs
# See _adaptive_max_2d above for example
out_names = _get_output_names(op_node)
outputs.update(zip(out_names, relay_out))
else:
assert op_node.outputsSize() == 1
outputs[node_name] = relay_out
return [_wrap_const(outputs[ret_name]) for ret_name in ret_names]
def get_all_op_names(graph):
""" Return all operator names in the input graph """
nodes = list(graph.nodes())
prim_with_blocks = ["prim::If", "prim::Loop"]
for prim in prim_with_blocks:
prim_nodes = graph.findAllNodes(prim, recurse=True)
for prim_node in prim_nodes:
for block in prim_node.blocks():
nodes += block.nodes()
return set(node.kind() for node in nodes)
def from_pytorch(script_module, input_infos, custom_convert_map=None, default_dtype="float32"):
"""Load PyTorch model in the form of a scripted PyTorch model and convert into relay.
The companion parameters will be handled automatically.
Parameters
----------
script_module : TopLevelTracedModule object
TorchScripted PyTorch graph
Note: We currently only support traces (ie: torch.jit.trace(model, input))
input_infos: List of tuples of (input name, input shape)
or (input name, (input shape, input types))
Graph level input shape and type list
The same input names need to be used for deployment, so choose easy to
remember names (such as: input0, input1)
e.g.
[('input0', (1, 2)), ('input1', (3, 4))]
or
[('input0', ((1, 2), 'int')), ('input1', ((3, 4), 'float'))]
custom_convert_map: Dictionary of str to Relay op
A custom op conversion map in the same format as _convert_map above
Returns
-------
mod : tvm.relay.Module
The module that optimizations will be performed on.
params : dict of str to tvm.runtime.NDArray
Dict of converted parameters stored in tvm.runtime.ndarray format
"""
import torch
mod = tvm.IRModule()
prelude = Prelude(mod)
convert_map = _get_convert_map(prelude, default_dtype)
graph = script_module.graph.copy()
_run_jit_passes(graph)
if custom_convert_map:
convert_map.update(custom_convert_map)
op_names = get_all_op_names(graph)
_report_missing_conversion(op_names, convert_map)
is_module = isinstance(script_module, torch.jit.ScriptModule)
params = script_module.state_dict() if is_module else {}
outputs = _get_relay_input_vars(
graph, input_infos, prelude, default_dtype=default_dtype, is_module=is_module
)
param_vars, tensors, packed_param_map = convert_params(graph, params)
tvm_params = {k: tvm.nd.array(v) for k, v in tensors.items()}
outputs.update(param_vars)
ret_name = _get_input_names(graph.return_node())
# For quantized models
if "aten::quantize_per_tensor" in op_names:
weight_quant_params = qnn_torch.get_weight_quant_params(script_module)
qnn_torch.add_input_quant_params_to_op_inputs(graph)
qnn_torch.add_quant_params_to_outputs(outputs, packed_param_map, weight_quant_params)
qnn_torch.add_quant_params(tvm_params, weight_quant_params)
convert_map.update(qnn_torch.convert_map)
ret = convert_operators(
_get_operator_nodes(graph.nodes()),
outputs,
ret_name,
convert_map,
prelude,
default_dtype=default_dtype,
)
mod["main"] = tvm.relay.Function(_analysis.free_vars(ret[0]), ret[0])
return transform.RemoveUnusedFunctions()(mod), tvm_params
| {
"content_hash": "9950fb45c27ab5c3ceefe271cac16072",
"timestamp": "",
"source": "github",
"line_count": 3360,
"max_line_length": 100,
"avg_line_length": 31.205952380952382,
"alnum_prop": 0.5607522984778545,
"repo_name": "sxjscience/tvm",
"id": "8e626f52d528202550b08d5209f9461bc65c3421",
"size": "105978",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/tvm/relay/frontend/pytorch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "6056"
},
{
"name": "C",
"bytes": "95567"
},
{
"name": "C++",
"bytes": "5565032"
},
{
"name": "CMake",
"bytes": "67305"
},
{
"name": "Go",
"bytes": "112376"
},
{
"name": "HTML",
"bytes": "8625"
},
{
"name": "Java",
"bytes": "173219"
},
{
"name": "JavaScript",
"bytes": "49801"
},
{
"name": "Makefile",
"bytes": "50818"
},
{
"name": "Objective-C",
"bytes": "15264"
},
{
"name": "Objective-C++",
"bytes": "46673"
},
{
"name": "Python",
"bytes": "6763729"
},
{
"name": "Rust",
"bytes": "182027"
},
{
"name": "Scala",
"bytes": "184105"
},
{
"name": "Shell",
"bytes": "96967"
},
{
"name": "Tcl",
"bytes": "53645"
},
{
"name": "Verilog",
"bytes": "30605"
}
],
"symlink_target": ""
} |
import datetime
import dateutil.tz
from dateutil.parser import parse as parse_date
class AssetState:
"""AssetState is used instead of primitive state values (like ``2``, ``"a"``
or ``True``) when publishing data with a custom timestamp, i.e. not the
current timestamp at the time of publishing."""
def __init__(self, value, at=None):
"""Initializes the asset state.
:param value: Any JSON-serializable value applicable to the given :class:`~allthingstalk.Asset`.
:param datetime.datetime at: Optional timestamp
"""
self.value = value
if at is None:
self.at = datetime.datetime.utcnow()
elif isinstance(at, str):
self.at = parse_date(at)
elif isinstance(at, datetime.datetime):
self.at = at
else:
raise ValueError('Invalid timestamp in at: %s' % at)
if self.at.tzinfo is None:
self.at = self.at.replace(tzinfo=dateutil.tz.tzutc())
def __repr__(self):
return 'AssetState(value=%s, at=%s)' % (self.value, self.at)
def __str__(self):
return str(self.value)
| {
"content_hash": "e22e2d5f886298557e5f11af379b90cc",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 104,
"avg_line_length": 32.6,
"alnum_prop": 0.6108676599474145,
"repo_name": "allthingstalk/python-sdk",
"id": "5f686e143a32bae57f369806ca8d5856ee7c8707",
"size": "2040",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "allthingstalk/asset_state.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "790"
},
{
"name": "Makefile",
"bytes": "619"
},
{
"name": "Python",
"bytes": "32083"
},
{
"name": "Shell",
"bytes": "50"
}
],
"symlink_target": ""
} |
"""Utilities for Backward differentiation formula (BDF) solver."""
import collections
import numpy as np
import tensorflow.compat.v1 as tf1
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import tensorshape_util
MAX_ORDER = 5
ORDERS = np.arange(0, MAX_ORDER + 1)
RECIPROCAL_SUMS = np.concatenate([[np.nan], np.cumsum(1. / ORDERS[1:])])
def error_ratio(backward_difference, error_coefficient, tol):
"""Computes the ratio of the error in the computed state to the tolerance."""
tol_cast = tf.cast(tol, backward_difference.dtype)
error_ratio_ = tf.norm(error_coefficient * backward_difference / tol_cast)
return tf.cast(error_ratio_, tf.abs(backward_difference).dtype)
def first_step_size(
atol,
first_order_error_coefficient,
initial_state_vec,
initial_time,
ode_fn_vec,
rtol,
safety_factor,
epsilon=1e-12,
max_step_size=1.,
min_step_size=1e-12,
):
"""Selects the first step size to use."""
next_time = initial_time + epsilon
first_derivative = ode_fn_vec(initial_time, initial_state_vec)
state_dtype = initial_state_vec.dtype
next_state_vec = initial_state_vec + first_derivative * epsilon
second_derivative = (ode_fn_vec(next_time, next_state_vec) -
first_derivative) / epsilon
tol = tf.cast(atol + rtol * tf.abs(initial_state_vec), state_dtype)
# Local truncation error of an order one step is
# `err(step_size) = first_order_error_coefficient * second_derivative *
# * step_size**2`.
# Choose the largest `step_size` such that `norm(err(step_size) / tol) <= 1`.
norm = tf.norm(first_order_error_coefficient * second_derivative / tol)
step_size = tf.cast(tf.math.rsqrt(norm), tf.abs(initial_state_vec).dtype)
return tf.clip_by_value(safety_factor * step_size, min_step_size,
max_step_size)
def interpolate_backward_differences(backward_differences, order,
step_size_ratio):
"""Updates backward differences when a change in the step size occurs."""
state_dtype = backward_differences.dtype
interpolation_matrix_ = interpolation_matrix(state_dtype, order,
step_size_ratio)
interpolation_matrix_unit_step_size_ratio = interpolation_matrix(
state_dtype, order, 1.)
interpolated_backward_differences_orders_one_to_five = tf.matmul(
interpolation_matrix_unit_step_size_ratio,
tf.matmul(interpolation_matrix_, backward_differences[1:MAX_ORDER + 1]))
interpolated_backward_differences = tf.concat([
tf.gather(backward_differences, [0]),
interpolated_backward_differences_orders_one_to_five,
ps.zeros(
ps.stack([2, ps.shape(backward_differences)[1]]), dtype=state_dtype),
], 0)
return interpolated_backward_differences
def interpolation_matrix(dtype, order, step_size_ratio):
"""Creates the matrix used to interpolate backward differences."""
orders = tf.cast(tf.range(1, MAX_ORDER + 1), dtype=dtype)
i = orders[:, tf.newaxis]
j = orders[tf.newaxis, :]
# Matrix whose (i, j)-th entry (`1 <= i, j <= order`) is
# `1/j! (0 - i * step_size_ratio) * ... * ((j-1) - i * step_size_ratio)`.
step_size_ratio_cast = tf.cast(step_size_ratio, dtype)
full_interpolation_matrix = tf.math.cumprod(
((j - 1) - i * step_size_ratio_cast) / j, axis=1)
zeros_matrix = tf.zeros_like(full_interpolation_matrix)
interpolation_matrix_ = tf1.where(
tf.range(1, MAX_ORDER + 1) <= order,
tf.transpose(
tf1.where(
tf.range(1, MAX_ORDER + 1) <= order,
tf.transpose(full_interpolation_matrix), zeros_matrix)),
zeros_matrix)
return interpolation_matrix_
def newton(backward_differences, max_num_iters, newton_coefficient, ode_fn_vec,
order, step_size, time, tol, unitary, upper):
"""Runs Newton's method to solve the BDF equation."""
initial_guess = tf.reduce_sum(
tf1.where(
tf.range(MAX_ORDER + 1) <= order,
backward_differences[:MAX_ORDER + 1],
tf.zeros_like(backward_differences)[:MAX_ORDER + 1]),
axis=0)
np_dtype = np_dtype = dtype_util.as_numpy_dtype(backward_differences.dtype)
rhs_constant_term = newton_coefficient * tf.reduce_sum(
tf1.where(
tf.range(1, MAX_ORDER + 1) <= order,
RECIPROCAL_SUMS[1:, np.newaxis].astype(np_dtype) *
backward_differences[1:MAX_ORDER + 1],
tf.zeros_like(backward_differences)[1:MAX_ORDER + 1]),
axis=0)
next_time = time + step_size
step_size_cast = tf.cast(step_size, backward_differences.dtype)
real_dtype = tf.abs(backward_differences).dtype
def newton_body(iterand):
"""Performs one iteration of Newton's method."""
next_backward_difference = iterand.next_backward_difference
next_state_vec = iterand.next_state_vec
rhs = newton_coefficient * step_size_cast * ode_fn_vec(
next_time,
next_state_vec) - rhs_constant_term - next_backward_difference
delta = tf.squeeze(
tf.linalg.triangular_solve(
upper,
tf.matmul(tf.transpose(unitary), rhs[:, tf.newaxis]),
lower=False))
num_iters = iterand.num_iters + 1
next_backward_difference += delta
next_state_vec += delta
delta_norm = tf.cast(tf.norm(delta), real_dtype)
lipschitz_const = delta_norm / iterand.prev_delta_norm
# Stop if method has converged.
approx_dist_to_sol = lipschitz_const / (1. - lipschitz_const) * delta_norm
close_to_sol = approx_dist_to_sol < tol
delta_norm_is_zero = tf.equal(delta_norm, tf.constant(0., dtype=real_dtype))
converged = close_to_sol | delta_norm_is_zero
finished = converged
# Stop if any of the following conditions are met:
# (A) We have hit the maximum number of iterations.
# (B) The method is converging too slowly.
# (C) The method is not expected to converge.
too_slow = lipschitz_const > 1.
finished = finished | too_slow
if max_num_iters is not None:
too_many_iters = tf.equal(num_iters, max_num_iters)
num_iters_left = max_num_iters - num_iters
num_iters_left_cast = tf.cast(num_iters_left, real_dtype)
wont_converge = (
approx_dist_to_sol * lipschitz_const**num_iters_left_cast > tol)
finished = finished | too_many_iters | wont_converge
return [
_NewtonIterand(
converged=converged,
finished=finished,
next_backward_difference=next_backward_difference,
next_state_vec=next_state_vec,
num_iters=num_iters,
prev_delta_norm=delta_norm)
]
iterand = _NewtonIterand(
converged=False,
finished=False,
next_backward_difference=tf.zeros_like(initial_guess),
next_state_vec=tf.identity(initial_guess),
num_iters=0,
prev_delta_norm=tf.constant(np.array(-0.), dtype=real_dtype))
[iterand] = tf.while_loop(lambda iterand: tf.logical_not(iterand.finished),
newton_body, [iterand])
return (iterand.converged, iterand.next_backward_difference,
iterand.next_state_vec, iterand.num_iters)
_NewtonIterand = collections.namedtuple('NewtonIterand', [
'converged',
'finished',
'next_backward_difference',
'next_state_vec',
'num_iters',
'prev_delta_norm',
])
def newton_qr(jacobian_mat, newton_coefficient, step_size):
"""QR factorizes the matrix used in each iteration of Newton's method."""
identity = tf.eye(ps.shape(jacobian_mat)[0], dtype=jacobian_mat.dtype)
step_size_cast = tf.cast(step_size, jacobian_mat.dtype)
newton_matrix = (
identity - step_size_cast * newton_coefficient * jacobian_mat)
factorization = tf.linalg.qr(newton_matrix)
return factorization.q, factorization.r
def update_backward_differences(backward_differences, next_backward_difference,
next_state_vec, order):
"""Returns the backward differences for the next time."""
backward_differences_array = tf.TensorArray(
backward_differences.dtype,
size=MAX_ORDER + 3,
clear_after_read=False,
element_shape=next_backward_difference.shape).unstack(
backward_differences)
new_backward_differences_array = tf.TensorArray(
backward_differences.dtype,
size=MAX_ORDER + 3,
clear_after_read=False,
element_shape=next_backward_difference.shape)
new_backward_differences_array = new_backward_differences_array.write(
order + 2,
next_backward_difference - backward_differences_array.read(order + 1))
new_backward_differences_array = new_backward_differences_array.write(
order + 1, next_backward_difference)
def body(k, new_backward_differences_array_):
new_backward_differences_array_k = (
new_backward_differences_array_.read(k + 1) +
backward_differences_array.read(k))
new_backward_differences_array_ = new_backward_differences_array_.write(
k, new_backward_differences_array_k)
return k - 1, new_backward_differences_array_
_, new_backward_differences_array = tf.while_loop(
lambda k, new_backward_differences_array: k > 0, body,
[order, new_backward_differences_array])
new_backward_differences_array = new_backward_differences_array.write(
0, next_state_vec)
new_backward_differences = new_backward_differences_array.stack()
tensorshape_util.set_shape(new_backward_differences,
tf.TensorShape([MAX_ORDER + 3, None]))
return new_backward_differences
| {
"content_hash": "c6c4a13e4615f204d971e3c89bd0179e",
"timestamp": "",
"source": "github",
"line_count": 239,
"max_line_length": 80,
"avg_line_length": 40.55648535564853,
"alnum_prop": 0.6673888373052719,
"repo_name": "tensorflow/probability",
"id": "349456b2bba4e2d931682e759fc2dc90ae7800cd",
"size": "10371",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tensorflow_probability/python/math/ode/bdf_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "55552121"
},
{
"name": "Python",
"bytes": "17339674"
},
{
"name": "Shell",
"bytes": "24852"
},
{
"name": "Starlark",
"bytes": "663851"
}
],
"symlink_target": ""
} |
import time
from collections import defaultdict
import operator
# Below are two codes to read the file, we will be going with the non list comprehension version
def readtext(filename):
with open(filename) as f:
txtlines = [[str(s) for s in line.rstrip("\n").split(" ")] for line in f]
return txtlines
def readtext2(filename):
data = open(filename, "r")
txtlines = list()
for line in data:
line = line.rstrip("\n")
lst = [str(s) for s in line.split(" ")]
# print(lst)
txtlines.append(lst)
return txtlines
def getdictoffreqwords(listoflines):
fullwordlist = defaultdict(int)
for line in listoflines:
for word in line:
fullwordlist[word] +=1
return fullwordlist
def getreducedwordlist(worddict,minsup):
return {k:v for k,v in worddict.items() if v >= minsup}
def getpatternsgivenline(line):
linelen = len(line)
# print(linelen)
patterns = set()
for i in range(1,linelen):
for j in range(0,linelen-i+1):
patterns.add(" ".join(line[j:j+i]))
# print(patterns)
# print(len(patterns))
return(patterns)
def getpatternsforeachline(alltext):
listoflinesets = []
i = 0
for line in alltext:
listoflinesets.append(getpatternsgivenline(line))
# print(i)
i += 1
return listoflinesets
def getphrasefreq(listoflinesets):
# print(listoflinesets)
phrasedict = defaultdict(int)
for lineset in listoflinesets:
# print(lineset)
if lineset is not None:
# print("inside")
for element in lineset:
phrasedict[element] +=1
return phrasedict
def filterbyfrequency(phrasefrequencydict,minsup):
return {k:v for k,v in phrasefrequencydict.items() if v >= minsup}
def filterbywordlength(phrasefrequencydict,minlength):
return {k: v for k, v in phrasefrequencydict.items() if len(k.split(" ")) >= minlength}
def printreturnfile(inputdict,outputfile):
# inputlist.sort(key=lambda x: -x[1])
inputlist = [(k,v) for k,v in inputdict.items()]
# print(inputlist)
inputlist.sort(key=operator.itemgetter(1),reverse=True)
with open(outputfile, 'w') as the_file:
for element in inputlist:
the_file.write(str(element[1]) + ":" + element[0].replace(" ",";") + '\n')
if __name__ == "__main__":
#testing time for reading
times = time.time()
txtlines = readtext2("rawdata/yelp_reviews.txt")
# print("timetaken by longer code = ",time.time() - times)
# time taken by the list comprehension is 0.18secs
# times = time.time()
# txtlines = readtext("rawdata/yelp_reviews.txt")
# print("timetaken by shorter code = ", time.time() - times)
# time taken by normal loop is 0.15secs
# going with normal code
# print(txtlines)
worddict = getdictoffreqwords(txtlines)
# print("worddict is ",worddict )
# print("len of worddict is ", len(worddict))
# worddict = getreducedwordlist(worddict,100)
# print("reduced worddict is ", worddict)
# print("len of reduced worddict is ", len(worddict))
# Test whether single line comprehension works
# getpatternsgivenline(txtlines[0])
# Get list of sets for each line
# times = time.time()
listoflinesets = getpatternsforeachline(txtlines)
# print("Got list of line phrases in ", time.time() - times, "seconds")
# Get list of all phrases
# times = time.time()
phrasesfreq = getphrasefreq(listoflinesets)
print("number of all phrases checked:",len(phrasesfreq))
frequentphrases = filterbyfrequency(phrasesfreq,100)
# print(frequentphrases)
# print(len(frequentphrases))
frequentphrases = filterbywordlength(frequentphrases, 2)
# print(frequentphrases)
# print(len(frequentphrases))
print("Ran Algo for yelp in ", time.time() - times, "seconds")
printreturnfile(frequentphrases, "output/yelpcontiguouspatterns.txt")
| {
"content_hash": "6cd26ac6850215abb1d9d0bc756b45ec",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 96,
"avg_line_length": 32.467213114754095,
"alnum_prop": 0.6574097450138854,
"repo_name": "p10rahulm/Dmage",
"id": "28bb2d41d9b97784363d8074fcbf87069b823271",
"size": "3961",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contiguous_patterns.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7184"
},
{
"name": "R",
"bytes": "4642"
}
],
"symlink_target": ""
} |
'''Trains a stacked what-where autoencoder built on residual blocks on the
MNIST dataset. It exemplifies two influential methods that have been developed
in the past few years.
The first is the idea of properly "unpooling." During any max pool, the
exact location (the "where") of the maximal value in a pooled receptive field
is lost, however it can be very useful in the overall reconstruction of an
input image. Therefore, if the "where" is handed from the encoder
to the corresponding decoder layer, features being decoded can be "placed" in
the right location, allowing for reconstructions of much higher fidelity.
References:
[1]
"Visualizing and Understanding Convolutional Networks"
Matthew D Zeiler, Rob Fergus
https://arxiv.org/abs/1311.2901v3
[2]
"Stacked What-Where Auto-encoders"
Junbo Zhao, Michael Mathieu, Ross Goroshin, Yann LeCun
https://arxiv.org/abs/1506.02351v8
The second idea exploited here is that of residual learning. Residual blocks
ease the training process by allowing skip connections that give the network
the ability to be as linear (or non-linear) as the data sees fit. This allows
for much deep networks to be easily trained. The residual element seems to
be advantageous in the context of this example as it allows a nice symmetry
between the encoder and decoder. Normally, in the decoder, the final
projection to the space where the image is reconstructed is linear, however
this does not have to be the case for a residual block as the degree to which
its output is linear or non-linear is determined by the data it is fed.
However, in order to cap the reconstruction in this example, a hard softmax is
applied as a bias because we know the MNIST digits are mapped to [0,1].
References:
[3]
"Deep Residual Learning for Image Recognition"
Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
https://arxiv.org/abs/1512.03385v1
[4]
"Identity Mappings in Deep Residual Networks"
Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
https://arxiv.org/abs/1603.05027v3
'''
from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.datasets import mnist
from keras.models import Model
from keras.layers import Activation, merge
from keras.layers import UpSampling2D, Convolution2D, MaxPooling2D
from keras.layers import Input, BatchNormalization
import matplotlib.pyplot as plt
import keras.backend as K
def convresblock(x, nfeats=8, ksize=3, nskipped=2):
''' The proposed residual block from [4]'''
y0 = Convolution2D(nfeats, ksize, ksize, border_mode='same')(x)
y = y0
for i in range(nskipped):
y = BatchNormalization(mode=0, axis=1)(y)
y = Activation('relu')(y)
y = Convolution2D(nfeats, ksize, ksize, border_mode='same')(y)
return merge([y0, y], mode='sum')
def getwhere(x):
''' Calculate the "where" mask that contains switches indicating which
index contained the max value when MaxPool2D was applied. Using the
gradient of the sum is a nice trick to keep everything high level.'''
y_prepool, y_postpool = x
return K.gradients(K.sum(y_postpool), y_prepool)
# input image dimensions
img_rows, img_cols = 28, 28
# the data, shuffled and split between train and test sets
(X_train, _), (X_test, _) = mnist.load_data()
X_train = X_train.reshape(X_train.shape[0], 1, img_rows, img_cols)
X_test = X_test.reshape(X_test.shape[0], 1, img_rows, img_cols)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# The size of the kernel used for the MaxPooling2D
pool_size = 2
# The total number of feature maps at each layer
nfeats = [8, 16, 32, 64, 128]
# The sizes of the pooling kernel at each layer
pool_sizes = np.array([1, 1, 1, 1, 1]) * pool_size
# The convolution kernel size
ksize = 3
# Number of epochs to train for
nb_epoch = 5
# Batch size during training
batch_size = 128
if pool_size == 2:
# if using a 5 layer net of pool_size = 2
X_train = np.pad(X_train, [[0, 0], [0, 0], [2, 2], [2, 2]],
mode='constant')
X_test = np.pad(X_test, [[0, 0], [0, 0], [2, 2], [2, 2]], mode='constant')
nlayers = 5
elif pool_size == 3:
# if using a 3 layer net of pool_size = 3
X_train = X_train[:, :, :-1, :-1]
X_test = X_test[:, :, :-1, :-1]
nlayers = 3
else:
import sys
sys.exit("Script supports pool_size of 2 and 3.")
# Shape of input to train on (note that model is fully convolutional however)
input_shape = X_train.shape[1:]
# The final list of the size of axis=1 for all layers, including input
nfeats_all = [input_shape[0]] + nfeats
# First build the encoder, all the while keeping track of the "where" masks
img_input = Input(shape=input_shape)
# We push the "where" masks to the following list
wheres = [None] * nlayers
y = img_input
for i in range(nlayers):
y_prepool = convresblock(y, nfeats=nfeats_all[i + 1], ksize=ksize)
y = MaxPooling2D(pool_size=(pool_sizes[i], pool_sizes[i]))(y_prepool)
wheres[i] = merge([y_prepool, y], mode=getwhere,
output_shape=lambda x: x[0])
# Now build the decoder, and use the stored "where" masks to place the features
for i in range(nlayers):
ind = nlayers - 1 - i
y = UpSampling2D(size=(pool_sizes[ind], pool_sizes[ind]))(y)
y = merge([y, wheres[ind]], mode='mul')
y = convresblock(y, nfeats=nfeats_all[ind], ksize=ksize)
# Use hard_simgoid to clip range of reconstruction
y = Activation('hard_sigmoid')(y)
# Define the model and it's mean square error loss, and compile it with Adam
model = Model(img_input, y)
model.compile('adam', 'mse')
# Fit the model
model.fit(X_train, X_train, validation_data=(X_test, X_test),
batch_size=batch_size, nb_epoch=nb_epoch)
# Plot
X_recon = model.predict(X_test[:25])
X_plot = np.concatenate((X_test[:25], X_recon), axis=1)
X_plot = X_plot.reshape((5, 10, input_shape[-2], input_shape[-1]))
X_plot = np.vstack([np.hstack(x) for x in X_plot])
plt.figure()
plt.axis('off')
plt.title('Test Samples: Originals/Reconstructions')
plt.imshow(X_plot, interpolation='none', cmap='gray')
plt.savefig('reconstructions.png')
| {
"content_hash": "6b1e1f16b1f7a7ccb32a01f29b62d3ad",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 79,
"avg_line_length": 38.4251497005988,
"alnum_prop": 0.6909770920991117,
"repo_name": "dolaameng/keras",
"id": "56919072c9801ef8a82f19fa778833dcad5e821e",
"size": "6417",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/mnist_swwae.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "697"
},
{
"name": "Python",
"bytes": "1135820"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
from collections import namedtuple
from itertools import starmap
from timeit import default_timer
from time import sleep
from multiprocessing import Process, Pipe, current_process
from ..callbacks import Callback
from ..utils import import_required
# Stores execution data for each task
TaskData = namedtuple('TaskData', ('key', 'task', 'start_time',
'end_time', 'worker_id'))
class Profiler(Callback):
"""A profiler for dask execution at the task level.
Records the following information for each task:
1. Key
2. Task
3. Start time in seconds since the epoch
4. Finish time in seconds since the epoch
5. Worker id
Examples
--------
>>> from operator import add, mul
>>> from dask.threaded import get
>>> dsk = {'x': 1, 'y': (add, 'x', 10), 'z': (mul, 'y', 2)}
>>> with Profiler() as prof:
... get(dsk, 'z')
22
>>> prof.results # doctest: +SKIP
[('y', (add, 'x', 10), 1435352238.48039, 1435352238.480655, 140285575100160),
('z', (mul, 'y', 2), 1435352238.480657, 1435352238.480803, 140285566707456)]
These results can be visualized in a bokeh plot using the ``visualize``
method. Note that this requires bokeh to be installed.
>>> prof.visualize() # doctest: +SKIP
You can activate the profiler globally
>>> prof.register() # doctest: +SKIP
If you use the profiler globally you will need to clear out old results
manually.
>>> prof.clear()
"""
def __init__(self):
self._results = {}
self.results = []
self._dsk = {}
def __enter__(self):
self.clear()
return super(Profiler, self).__enter__()
def _start(self, dsk):
self._dsk.update(dsk)
def _pretask(self, key, dsk, state):
start = default_timer()
self._results[key] = (key, dsk[key], start)
def _posttask(self, key, value, dsk, state, id):
end = default_timer()
self._results[key] += (end, id)
def _finish(self, dsk, state, failed):
results = dict((k, v) for k, v in self._results.items() if len(v) == 5)
self.results += list(starmap(TaskData, results.values()))
self._results.clear()
def _plot(self, **kwargs):
from .profile_visualize import plot_tasks
return plot_tasks(self.results, self._dsk, **kwargs)
def visualize(self, **kwargs):
"""Visualize the profiling run in a bokeh plot.
See also
--------
dask.diagnostics.profile_visualize.visualize
"""
from .profile_visualize import visualize
return visualize(self, **kwargs)
def clear(self):
"""Clear out old results from profiler"""
self._results.clear()
del self.results[:]
self._dsk = {}
ResourceData = namedtuple('ResourceData', ('time', 'mem', 'cpu'))
class ResourceProfiler(Callback):
"""A profiler for resource use.
Records the following each timestep
1. Time in seconds since the epoch
2. Memory usage in MB
3. % CPU usage
Examples
--------
>>> from operator import add, mul
>>> from dask.threaded import get
>>> dsk = {'x': 1, 'y': (add, 'x', 10), 'z': (mul, 'y', 2)}
>>> with ResourceProfiler() as prof: # doctest: +SKIP
... get(dsk, 'z')
22
These results can be visualized in a bokeh plot using the ``visualize``
method. Note that this requires bokeh to be installed.
>>> prof.visualize() # doctest: +SKIP
You can activate the profiler globally
>>> prof.register() # doctest: +SKIP
If you use the profiler globally you will need to clear out old results
manually.
>>> prof.clear() # doctest: +SKIP
"""
def __init__(self, dt=1):
self._tracker = _Tracker(dt)
self._tracker.start()
self.results = []
self._entered = False
def _start_collect(self):
assert self._tracker.is_alive(), "Resource tracker is shutdown"
self._tracker.parent_conn.send('collect')
def _stop_collect(self):
if self._tracker.is_alive():
self._tracker.parent_conn.send('send_data')
self.results.extend(starmap(ResourceData, self._tracker.parent_conn.recv()))
def __enter__(self):
self.clear()
self._entered = True
self._start_collect()
return super(ResourceProfiler, self).__enter__()
def __exit__(self, *args):
self._entered = False
self._stop_collect()
super(ResourceProfiler, self).__exit__(*args)
def _start(self, dsk):
self._start_collect()
def _finish(self, dsk, state, failed):
if not self._entered:
self._stop_collect()
def close(self):
"""Shutdown the resource tracker process"""
self._tracker.shutdown()
__del__ = close
def clear(self):
self.results = []
def _plot(self, **kwargs):
from .profile_visualize import plot_resources
return plot_resources(self.results, **kwargs)
def visualize(self, **kwargs):
"""Visualize the profiling run in a bokeh plot.
See also
--------
dask.diagnostics.profile_visualize.visualize
"""
from .profile_visualize import visualize
return visualize(self, **kwargs)
class _Tracker(Process):
"""Background process for tracking resource usage"""
def __init__(self, dt=1):
psutil = import_required("psutil", "Tracking resource usage requires "
"`psutil` to be installed")
Process.__init__(self)
self.daemon = True
self.dt = dt
self.parent = psutil.Process(current_process().pid)
self.parent_conn, self.child_conn = Pipe()
def shutdown(self):
if not self.parent_conn.closed:
self.parent_conn.send('shutdown')
self.parent_conn.close()
self.join()
def _update_pids(self, pid):
return [self.parent] + [p for p in self.parent.children()
if p.pid != pid and p.status() != 'zombie']
def run(self):
pid = current_process()
ps = self._update_pids(pid)
data = []
while True:
try:
msg = self.child_conn.recv()
except KeyboardInterrupt:
continue
if msg == 'shutdown':
break
elif msg == 'collect':
ps = self._update_pids(pid)
while not self.child_conn.poll():
tic = default_timer()
mem = sum(p.memory_info().rss for p in ps) / 1e6
cpu = sum(p.cpu_percent() for p in ps)
data.append((tic, mem, cpu))
sleep(self.dt)
elif msg == 'send_data':
self.child_conn.send(data)
data = []
self.child_conn.close()
CacheData = namedtuple('CacheData', ('key', 'task', 'metric', 'cache_time',
'free_time'))
class CacheProfiler(Callback):
"""A profiler for dask execution at the scheduler cache level.
Records the following information for each task:
1. Key
2. Task
3. Size metric
4. Cache entry time in seconds since the epoch
5. Cache exit time in seconds since the epoch
Examples
--------
>>> from operator import add, mul
>>> from dask.threaded import get
>>> dsk = {'x': 1, 'y': (add, 'x', 10), 'z': (mul, 'y', 2)}
>>> with CacheProfiler() as prof:
... get(dsk, 'z')
22
>>> prof.results # doctest: +SKIP
[CacheData('y', (add, 'x', 10), 1, 1435352238.48039, 1435352238.480655),
CacheData('z', (mul, 'y', 2), 1, 1435352238.480657, 1435352238.480803)]
The default is to count each task (``metric`` is 1 for all tasks). Other
functions may used as a metric instead through the ``metric`` keyword. For
example, the ``nbytes`` function found in ``cachey`` can be used to measure
the number of bytes in the cache.
>>> from cachey import nbytes # doctest: +SKIP
>>> with CacheProfiler(metric=nbytes) as prof: # doctest: +SKIP
... get(dsk, 'z')
The profiling results can be visualized in a bokeh plot using the
``visualize`` method. Note that this requires bokeh to be installed.
>>> prof.visualize() # doctest: +SKIP
You can activate the profiler globally
>>> prof.register() # doctest: +SKIP
If you use the profiler globally you will need to clear out old results
manually.
>>> prof.clear()
"""
def __init__(self, metric=None, metric_name=None):
self._metric = metric if metric else lambda value: 1
if metric_name:
self._metric_name = metric_name
elif metric:
self._metric_name = metric.__name__
else:
self._metric_name = 'count'
def __enter__(self):
self.clear()
return super(CacheProfiler, self).__enter__()
def _start(self, dsk):
self._dsk.update(dsk)
if not self._start_time:
self._start_time = default_timer()
def _posttask(self, key, value, dsk, state, id):
t = default_timer()
self._cache[key] = (self._metric(value), t)
for k in state['released'].intersection(self._cache):
metric, start = self._cache.pop(k)
self.results.append(CacheData(k, dsk[k], metric, start, t))
def _finish(self, dsk, state, failed):
t = default_timer()
for k, (metric, start) in self._cache.items():
self.results.append(CacheData(k, dsk[k], metric, start, t))
self._cache.clear()
def _plot(self, **kwargs):
from .profile_visualize import plot_cache
return plot_cache(self.results, self._dsk, self._start_time,
self._metric_name, **kwargs)
def visualize(self, **kwargs):
"""Visualize the profiling run in a bokeh plot.
See also
--------
dask.diagnostics.profile_visualize.visualize
"""
from .profile_visualize import visualize
return visualize(self, **kwargs)
def clear(self):
"""Clear out old results from profiler"""
self.results = []
self._cache = {}
self._dsk = {}
self._start_time = None
| {
"content_hash": "d5f2da82b0f2adf5da4e6bddd1ccf958",
"timestamp": "",
"source": "github",
"line_count": 345,
"max_line_length": 88,
"avg_line_length": 30.469565217391306,
"alnum_prop": 0.5716324200913242,
"repo_name": "chrisbarber/dask",
"id": "8690defb7b0b76dee618feb831d55ffe0d8e5ff1",
"size": "10512",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "dask/diagnostics/profile.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4934"
},
{
"name": "Python",
"bytes": "1737672"
}
],
"symlink_target": ""
} |
from django.test.testcases import TestCase
from django_core.forms.widgets import CommaSeparatedListWidget
from django_core.forms.widgets import MultipleDecimalInputWidget
class CommaSeparatedListWidgetTestCase(TestCase):
def test_widget_value_from_datadict(self):
field_name = 'some_field'
data = {field_name: '1,4, 5 , hello'}
widget = CommaSeparatedListWidget()
value = widget.value_from_datadict(data=data, files={},
name=field_name)
self.assertEqual(value, ['1', '4', '5', 'hello'])
def test_widget_value_from_datadict_ints(self):
field_name = 'some_field'
data = {field_name: '1,4,5'}
widget = CommaSeparatedListWidget()
value = widget.value_from_datadict(data=data, files={},
name=field_name)
self.assertEqual(value, ['1', '4', '5'])
class MultipleDecimalInputWidgetTestCase(TestCase):
"""Test case for the multiple decimal input widget."""
def test_widget_value_from_data_dict(self):
field_name = 'some_field'
data = {
'{0}_0'.format(field_name): '3',
'{0}_1'.format(field_name): '2',
'{0}_2'.format(field_name): '1'
}
widget = MultipleDecimalInputWidget(num_inputs=3)
value = widget.value_from_datadict(data=data, files={},
name=field_name)
self.assertEqual(value, ['3', '2', '1'])
def test_widget_decompress(self):
val_1 = '5'
val_2 = '4'
val_3 = '1'
val_4 = '0'
widget = MultipleDecimalInputWidget()
value = '{0} {1} {2} {3}'.format(val_1, val_2, val_3, val_4)
decompressed_value = widget.decompress(value)
self.assertEqual(decompressed_value[0], val_1)
self.assertEqual(decompressed_value[1], val_2)
self.assertEqual(decompressed_value[2], val_3)
self.assertEqual(decompressed_value[3], val_4)
| {
"content_hash": "b7cbe8a82afdd2b5c570d310cb7124c2",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 68,
"avg_line_length": 38.75,
"alnum_prop": 0.5816377171215881,
"repo_name": "InfoAgeTech/django-core",
"id": "0f2b5e85d495834a061cd7a68a9aa366e207830c",
"size": "2015",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_form_widgets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "452"
},
{
"name": "Python",
"bytes": "180676"
}
],
"symlink_target": ""
} |
"""
The POX packet library for packet parsing and creation.
This is based heavily on NOX's packet library, though it has undergone
some signficant change, particularly with regard to making packet
assembly easier.
Could still use more work.
"""
# None of this is probably that big, and almost all of it gets loaded
# under most circumstances anyway. Let's just load all of it.
import arp as ARP
import dhcp as DHCP
import dns as DNS
import eap as EAP
import eapol as EAPOL
import ethernet as ETHERNET
import ipv4 as IPV4
import ipv6 as IPV6
import icmp as ICMP
import icmpv6 as ICMPV6
import lldp as LLDP
import tcp as TCP
import udp as UDP
import vlan as VLAN
import mpls as MPLS
from arp import *
from dhcp import *
from dns import *
from eap import *
from eapol import *
from ethernet import *
from ipv4 import *
from ipv6 import *
from icmp import *
from icmpv6 import *
from lldp import *
from tcp import *
from udp import *
from vlan import *
from mpls import *
__all__ = [
'arp',
'dhcp',
'dns',
'eap',
'eapol',
'ethernet',
'ipv4',
'ipv6',
'icmp',
'icmpv6',
'lldp',
'tcp',
'tcp_opt',
'udp',
'vlan',
'mpls',
'ARP',
'DHCP',
'DNS',
'EAP',
'EAPOL',
'ETHERNET',
'IPV4',
'IPV6',
'ICMP',
'ICMPV6',
'LLDP',
'TCP',
'UDP',
'VLAN',
'MPLS',
]
| {
"content_hash": "804db61026c17b57eb15c070909f42a4",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 70,
"avg_line_length": 16.743589743589745,
"alnum_prop": 0.6761102603369066,
"repo_name": "kavitshah8/SDNDeveloper",
"id": "4dffc7042402b670997e71fba7dfb0806040a25a",
"size": "1891",
"binary": false,
"copies": "3",
"ref": "refs/heads/SDN_Developer",
"path": "pox/lib/packet/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "15160"
},
{
"name": "JavaScript",
"bytes": "9048"
},
{
"name": "Python",
"bytes": "1113186"
},
{
"name": "Shell",
"bytes": "447"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function
import sys
import setuptools
import pip.req
from setuptools.command.test import test as TestCommand
import stackquery
install_reqs = pip.req.parse_requirements('requirements.txt')
class Tox(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import must be here, because outside the eggs aren't loaded
import tox
errcode = tox.cmdline(self.test_args)
sys.exit(errcode)
setuptools.setup(
name='stackquery',
version=stackquery.__version__,
author='Arx Cruz',
author_email='acruz@redhat.com',
url='https://github.com/arxcruz/stackquery',
packages=['stackquery'],
license='Apache License, Version 2.0',
scripts=['bin/stackquery'],
description='Get Launchpad statistics and create various CSV or HTML'
' tables from them.',
# long_description=long_description,
include_package_data=True,
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2'
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Utilities',
],
install_requires=[str(x.req) for x in install_reqs],
tests_require=['tox>=1.6'], # tox will take care of the other reqs
cmdclass={'test': Tox},
)
| {
"content_hash": "df6a0a38189c147b72aa269796732e02",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 73,
"avg_line_length": 32.67272727272727,
"alnum_prop": 0.6377295492487479,
"repo_name": "arxcruz/stackquery-cmd",
"id": "d526ba7d1b7f05d53846c30843d5e8c7e032080d",
"size": "2408",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "36924"
}
],
"symlink_target": ""
} |
css = u"""
body
{
font-family:"YaHei Consolas Hybrid";
}
#test1
{
font-family:"hello";
}
#test2
{
font-family:"Times New Roman";
}
#test3
{
font-family:"";
}
#test4
{
font-family:"YaHei Consolas Hybrid";
}
"""
html = u"""
<html>
<head>
<body>
<p id='test1'>hello</p> <!-- does not fallback to default font specified -->
<p id='test2'>hello</p> <!-- Works !! -->
<p id='test3'>hello d d d的的的的</p> <!-- Works !! fallbacks to default font -->
<p id='test4'>hello 的的的</p>
<p >hello 的的的</p> <!-- Works !! fallbacks to default font -->
</body>
</html>
"""
from PySide import QtCore, QtGui,QtWebKit
import base64
class MainWindow(QtGui.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.webview = QtWebKit.QWebView(self)
self.webview.settings().setAttribute(
QtWebKit.QWebSettings.WebAttribute.DeveloperExtrasEnabled, True)
encodeStr = base64.encodestring(css)
self.webview.settings().setUserStyleSheetUrl("data:text/css;charset=utf-8;base64,%s==" % encodeStr)
self.inspector = QtWebKit.QWebInspector()
self.webview.setHtml(html)
#self.webview.load(QtCore.QUrl("http://localhost:8999/ptwebos/ide/?path=D:\Dhole\Workspace/netcafe"))
self.inspector.setPage(self.webview.page())
self.inspector.show()
self.setCentralWidget(self.webview)
self.createActions()
self.createMenus()
self.setWindowTitle("Dock Widgets")
def createActions(self):
self.fontAct = QtGui.QAction("&Font", self,
statusTip="Set Font",
triggered=self.font)
def createMenus(self):
self.menuBar().addSeparator()
self.helpMenu = self.menuBar().addMenu("&Font")
self.helpMenu.addAction(self.fontAct)
def font(self):
font, ok = QtGui.QFontDialog.getFont()
if ok:
self.webview.setFont(font)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
mainWin = MainWindow()
mainWin.show()
sys.exit(app.exec_())
| {
"content_hash": "9ea766ed69d7bfb13297103fe5edc4a3",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 109,
"avg_line_length": 24.892857142857142,
"alnum_prop": 0.6183644189383071,
"repo_name": "ptphp/PyLib",
"id": "231167bfdf710d5df4ef620238c6a8fdaa74d357",
"size": "2157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/pyside/font/wekitfont.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1523"
},
{
"name": "C++",
"bytes": "7541"
},
{
"name": "CSS",
"bytes": "625731"
},
{
"name": "JavaScript",
"bytes": "4811257"
},
{
"name": "PHP",
"bytes": "34868"
},
{
"name": "Python",
"bytes": "3824172"
},
{
"name": "Ruby",
"bytes": "322"
},
{
"name": "SQL",
"bytes": "685656"
},
{
"name": "Shell",
"bytes": "4143"
}
],
"symlink_target": ""
} |
import datetime
import unittest
from freezegun import freeze_time
from airflow import settings
from airflow.models import DagRun, TaskInstance
from airflow.models.dag import DAG
from airflow.operators.dummy import DummyOperator
from airflow.operators.latest_only import LatestOnlyOperator
from airflow.utils import timezone
from airflow.utils.session import create_session
from airflow.utils.state import State
from airflow.utils.trigger_rule import TriggerRule
from airflow.utils.types import DagRunType
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
END_DATE = timezone.datetime(2016, 1, 2)
INTERVAL = datetime.timedelta(hours=12)
FROZEN_NOW = timezone.datetime(2016, 1, 2, 12, 1, 1)
def get_task_instances(task_id):
session = settings.Session()
return (
session.query(TaskInstance)
.join(TaskInstance.dag_run)
.filter(TaskInstance.task_id == task_id)
.order_by(DagRun.execution_date)
.all()
)
class TestLatestOnlyOperator(unittest.TestCase):
def setUp(self):
super().setUp()
self.dag = DAG(
'test_dag',
default_args={'owner': 'airflow', 'start_date': DEFAULT_DATE},
schedule_interval=INTERVAL,
)
with create_session() as session:
session.query(DagRun).delete()
session.query(TaskInstance).delete()
freezer = freeze_time(FROZEN_NOW)
freezer.start()
self.addCleanup(freezer.stop)
def test_run(self):
task = LatestOnlyOperator(task_id='latest', dag=self.dag)
task.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
def test_skipping_non_latest(self):
latest_task = LatestOnlyOperator(task_id='latest', dag=self.dag)
downstream_task = DummyOperator(task_id='downstream', dag=self.dag)
downstream_task2 = DummyOperator(task_id='downstream_2', dag=self.dag)
downstream_task3 = DummyOperator(
task_id='downstream_3', trigger_rule=TriggerRule.NONE_FAILED, dag=self.dag
)
downstream_task.set_upstream(latest_task)
downstream_task2.set_upstream(downstream_task)
downstream_task3.set_upstream(downstream_task)
self.dag.create_dagrun(
run_type=DagRunType.SCHEDULED,
start_date=timezone.utcnow(),
execution_date=DEFAULT_DATE,
state=State.RUNNING,
)
self.dag.create_dagrun(
run_type=DagRunType.SCHEDULED,
start_date=timezone.utcnow(),
execution_date=timezone.datetime(2016, 1, 1, 12),
state=State.RUNNING,
)
self.dag.create_dagrun(
run_type=DagRunType.SCHEDULED,
start_date=timezone.utcnow(),
execution_date=END_DATE,
state=State.RUNNING,
)
latest_task.run(start_date=DEFAULT_DATE, end_date=END_DATE)
downstream_task.run(start_date=DEFAULT_DATE, end_date=END_DATE)
downstream_task2.run(start_date=DEFAULT_DATE, end_date=END_DATE)
downstream_task3.run(start_date=DEFAULT_DATE, end_date=END_DATE)
latest_instances = get_task_instances('latest')
exec_date_to_latest_state = {ti.execution_date: ti.state for ti in latest_instances}
assert {
timezone.datetime(2016, 1, 1): 'success',
timezone.datetime(2016, 1, 1, 12): 'success',
timezone.datetime(2016, 1, 2): 'success',
} == exec_date_to_latest_state
downstream_instances = get_task_instances('downstream')
exec_date_to_downstream_state = {ti.execution_date: ti.state for ti in downstream_instances}
assert {
timezone.datetime(2016, 1, 1): 'skipped',
timezone.datetime(2016, 1, 1, 12): 'skipped',
timezone.datetime(2016, 1, 2): 'success',
} == exec_date_to_downstream_state
downstream_instances = get_task_instances('downstream_2')
exec_date_to_downstream_state = {ti.execution_date: ti.state for ti in downstream_instances}
assert {
timezone.datetime(2016, 1, 1): None,
timezone.datetime(2016, 1, 1, 12): None,
timezone.datetime(2016, 1, 2): 'success',
} == exec_date_to_downstream_state
downstream_instances = get_task_instances('downstream_3')
exec_date_to_downstream_state = {ti.execution_date: ti.state for ti in downstream_instances}
assert {
timezone.datetime(2016, 1, 1): 'success',
timezone.datetime(2016, 1, 1, 12): 'success',
timezone.datetime(2016, 1, 2): 'success',
} == exec_date_to_downstream_state
def test_not_skipping_external(self):
latest_task = LatestOnlyOperator(task_id='latest', dag=self.dag)
downstream_task = DummyOperator(task_id='downstream', dag=self.dag)
downstream_task2 = DummyOperator(task_id='downstream_2', dag=self.dag)
downstream_task.set_upstream(latest_task)
downstream_task2.set_upstream(downstream_task)
self.dag.create_dagrun(
run_type=DagRunType.MANUAL,
start_date=timezone.utcnow(),
execution_date=DEFAULT_DATE,
state=State.RUNNING,
external_trigger=True,
)
self.dag.create_dagrun(
run_type=DagRunType.MANUAL,
start_date=timezone.utcnow(),
execution_date=timezone.datetime(2016, 1, 1, 12),
state=State.RUNNING,
external_trigger=True,
)
self.dag.create_dagrun(
run_type=DagRunType.MANUAL,
start_date=timezone.utcnow(),
execution_date=END_DATE,
state=State.RUNNING,
external_trigger=True,
)
latest_task.run(start_date=DEFAULT_DATE, end_date=END_DATE)
downstream_task.run(start_date=DEFAULT_DATE, end_date=END_DATE)
downstream_task2.run(start_date=DEFAULT_DATE, end_date=END_DATE)
latest_instances = get_task_instances('latest')
exec_date_to_latest_state = {ti.execution_date: ti.state for ti in latest_instances}
assert {
timezone.datetime(2016, 1, 1): 'success',
timezone.datetime(2016, 1, 1, 12): 'success',
timezone.datetime(2016, 1, 2): 'success',
} == exec_date_to_latest_state
downstream_instances = get_task_instances('downstream')
exec_date_to_downstream_state = {ti.execution_date: ti.state for ti in downstream_instances}
assert {
timezone.datetime(2016, 1, 1): 'success',
timezone.datetime(2016, 1, 1, 12): 'success',
timezone.datetime(2016, 1, 2): 'success',
} == exec_date_to_downstream_state
downstream_instances = get_task_instances('downstream_2')
exec_date_to_downstream_state = {ti.execution_date: ti.state for ti in downstream_instances}
assert {
timezone.datetime(2016, 1, 1): 'success',
timezone.datetime(2016, 1, 1, 12): 'success',
timezone.datetime(2016, 1, 2): 'success',
} == exec_date_to_downstream_state
| {
"content_hash": "7711710f967efbef657e017fa55cf8a5",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 100,
"avg_line_length": 39.24309392265194,
"alnum_prop": 0.6283260594115163,
"repo_name": "apache/incubator-airflow",
"id": "90714afb71fb30a3a525cd0771b372f0d35ef9b4",
"size": "7891",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/operators/test_latest_only_operator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "69070"
},
{
"name": "Dockerfile",
"bytes": "2001"
},
{
"name": "HTML",
"bytes": "283783"
},
{
"name": "JavaScript",
"bytes": "1387552"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "5482822"
},
{
"name": "Shell",
"bytes": "40957"
}
],
"symlink_target": ""
} |
from webob import exc
from nova.api.openstack import common
from nova.api.openstack.compute.schemas import server_migrations
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api import validation
from nova import compute
from nova import exception
from nova.i18n import _
from nova.policies import servers_migrations as sm_policies
def output(migration):
"""Returns the desired output of the API from an object.
From a Migrations's object this method returns the primitive
object with the only necessary and expected fields.
"""
return {
"created_at": migration.created_at,
"dest_compute": migration.dest_compute,
"dest_host": migration.dest_host,
"dest_node": migration.dest_node,
"disk_processed_bytes": migration.disk_processed,
"disk_remaining_bytes": migration.disk_remaining,
"disk_total_bytes": migration.disk_total,
"id": migration.id,
"memory_processed_bytes": migration.memory_processed,
"memory_remaining_bytes": migration.memory_remaining,
"memory_total_bytes": migration.memory_total,
"server_uuid": migration.instance_uuid,
"source_compute": migration.source_compute,
"source_node": migration.source_node,
"status": migration.status,
"updated_at": migration.updated_at
}
class ServerMigrationsController(wsgi.Controller):
"""The server migrations API controller for the OpenStack API."""
def __init__(self):
self.compute_api = compute.API()
super(ServerMigrationsController, self).__init__()
@wsgi.Controller.api_version("2.22")
@wsgi.response(202)
@extensions.expected_errors((400, 403, 404, 409))
@wsgi.action('force_complete')
@validation.schema(server_migrations.force_complete)
def _force_complete(self, req, id, server_id, body):
context = req.environ['nova.context']
context.can(sm_policies.POLICY_ROOT % 'force_complete')
instance = common.get_instance(self.compute_api, context, server_id)
try:
self.compute_api.live_migrate_force_complete(context, instance, id)
except exception.InstanceNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
except (exception.MigrationNotFoundByStatus,
exception.InvalidMigrationState,
exception.MigrationNotFoundForInstance) as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(
state_error, 'force_complete', server_id)
@wsgi.Controller.api_version("2.23")
@extensions.expected_errors(404)
def index(self, req, server_id):
"""Return all migrations of an instance in progress."""
context = req.environ['nova.context']
context.can(sm_policies.POLICY_ROOT % 'index')
# NOTE(Shaohe Feng) just check the instance is available. To keep
# consistency with other API, check it before get migrations.
common.get_instance(self.compute_api, context, server_id)
migrations = self.compute_api.get_migrations_in_progress_by_instance(
context, server_id, 'live-migration')
return {'migrations': [output(migration) for migration in migrations]}
@wsgi.Controller.api_version("2.23")
@extensions.expected_errors(404)
def show(self, req, server_id, id):
"""Return the migration of an instance in progress by id."""
context = req.environ['nova.context']
context.can(sm_policies.POLICY_ROOT % 'show')
# NOTE(Shaohe Feng) just check the instance is available. To keep
# consistency with other API, check it before get migrations.
common.get_instance(self.compute_api, context, server_id)
try:
migration = self.compute_api.get_migration_by_id_and_instance(
context, id, server_id)
except exception.MigrationNotFoundForInstance:
msg = _("In-progress live migration %(id)s is not found for"
" server %(uuid)s.") % {"id": id, "uuid": server_id}
raise exc.HTTPNotFound(explanation=msg)
if migration.get("migration_type") != "live-migration":
msg = _("Migration %(id)s for server %(uuid)s is not"
" live-migration.") % {"id": id, "uuid": server_id}
raise exc.HTTPNotFound(explanation=msg)
# TODO(Shaohe Feng) we should share the in-progress list.
in_progress = ['queued', 'preparing', 'running', 'post-migrating']
if migration.get("status") not in in_progress:
msg = _("Live migration %(id)s for server %(uuid)s is not in"
" progress.") % {"id": id, "uuid": server_id}
raise exc.HTTPNotFound(explanation=msg)
return {'migration': output(migration)}
@wsgi.Controller.api_version("2.24")
@wsgi.response(202)
@extensions.expected_errors((400, 404, 409))
def delete(self, req, server_id, id):
"""Abort an in progress migration of an instance."""
context = req.environ['nova.context']
context.can(sm_policies.POLICY_ROOT % 'delete')
instance = common.get_instance(self.compute_api, context, server_id)
try:
self.compute_api.live_migrate_abort(context, instance, id)
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(
state_error, "abort live migration", server_id)
except exception.MigrationNotFoundForInstance as e:
raise exc.HTTPNotFound(explanation=e.format_message())
except exception.InvalidMigrationState as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
| {
"content_hash": "9634ffa4d64cad47b13ae0b00b2cba8f",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 79,
"avg_line_length": 44.08029197080292,
"alnum_prop": 0.6560688855770823,
"repo_name": "Juniper/nova",
"id": "c0fd4c38b5ae11f47b90424a1c2ccf5aa3d31a7d",
"size": "6675",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/api/openstack/compute/server_migrations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "601"
},
{
"name": "PHP",
"bytes": "23962"
},
{
"name": "Python",
"bytes": "19816434"
},
{
"name": "Shell",
"bytes": "27717"
},
{
"name": "Smarty",
"bytes": "339635"
}
],
"symlink_target": ""
} |
from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need
# fine tuning.
buildOptions = dict(packages = [], excludes = [])
executables = [
Executable('push.py', 'Console', targetName = 'ldpush')
]
setup(name='ldpush',
version = '1.0',
description = 'A cross-vendor network configuration distribution tool. This is useful for pushing ACLs or other pieces of configuration to network elements. It can also be used to send commands to a list of devices and gather the results.',
options = dict(build_exe = buildOptions),
executables = executables)
| {
"content_hash": "ac7764aeb39e35dce6d10e4f6e473163",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 246,
"avg_line_length": 41.266666666666666,
"alnum_prop": 0.7205169628432956,
"repo_name": "google/ldpush",
"id": "fe3c3ff1fc03400d6ef39bc60a330972d964cf27",
"size": "619",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "181808"
}
],
"symlink_target": ""
} |
import json
from os import path
import warnings
import numpy
import six
from chainer import reporter
from chainer import serializer as serializer_module
from chainer.training import extension
from chainer.training import trigger as trigger_module
try:
import matplotlib # NOQA
_available = True
except (ImportError, TypeError):
_available = False
def _check_available():
if not _available:
warnings.warn('matplotlib is not installed on your environment, '
'so nothing will be plotted at this time. '
'Please install matplotlib to plot figures.\n\n'
' $ pip install matplotlib\n')
class PlotReport(extension.Extension):
"""Trainer extension to output plots.
This extension accumulates the observations of the trainer to
:class:`~chainer.DictSummary` at a regular interval specified by a supplied
trigger, and plot a graph with using them.
There are two triggers to handle this extension. One is the trigger to
invoke this extension, which is used to handle the timing of accumulating
the results. It is set to ``1, 'iteration'`` by default. The other is the
trigger to determine when to emit the result. When this trigger returns
True, this extension appends the summary of accumulated values to the list
of past summaries, and writes the list to the log file. Then, this
extension makes a new fresh summary object which is used until the next
time that the trigger fires.
It also adds ``'epoch'`` and ``'iteration'`` entries to each result
dictionary, which are the epoch and iteration counts at the output.
.. warning::
If your environment needs to specify a backend of matplotlib
explicitly, please call ``matplotlib.use`` before calling
``trainer.run``. For example:
.. code-block:: python
import matplotlib
matplotlib.use('Agg')
trainer.extend(
extensions.PlotReport(['main/loss', 'validation/main/loss'],
'epoch', file_name='loss.png'))
trainer.run()
Then, once one of instances of this extension is called,
``matplotlib.use`` will have no effect.
For the details, please see here:
https://matplotlib.org/faq/usage_faq.html#what-is-a-backend
Args:
y_keys (iterable of strs): Keys of values regarded as y. If this is
None, nothing is output to the graph.
x_key (str): Keys of values regarded as x. The default value is
'iteration'.
trigger: Trigger that decides when to aggregate the result and output
the values. This is distinct from the trigger of this extension
itself. If it is a tuple in the form ``<int>, 'epoch'`` or ``<int>,
'iteration'``, it is passed to :class:`IntervalTrigger`.
postprocess: Callback to postprocess the result dictionaries. Figure
object, Axes object, and all plot data are passed to this callback
in this order. This callback can modify the figure.
file_name (str): Name of the figure file under the output directory.
It can be a format string.
marker (str): The marker used to plot the graph. Default is ``'x'``. If
``None`` is given, it draws with no markers.
grid (bool): Set the axis grid on if True. Default is True.
"""
def __init__(self, y_keys, x_key='iteration', trigger=(1, 'epoch'),
postprocess=None, file_name='plot.png', marker='x',
grid=True):
_check_available()
self._x_key = x_key
if isinstance(y_keys, str):
y_keys = (y_keys,)
self._y_keys = y_keys
self._trigger = trigger_module.get_trigger(trigger)
self._file_name = file_name
self._marker = marker
self._grid = grid
self._postprocess = postprocess
self._init_summary()
self._data = {k: [] for k in y_keys}
@staticmethod
def available():
_check_available()
return _available
def __call__(self, trainer):
if _available:
# Dynamically import pyplot to call matplotlib.use()
# after importing chainer.training.extensions
import matplotlib.pyplot as plt
else:
return
keys = self._y_keys
observation = trainer.observation
summary = self._summary
if keys is None:
summary.add(observation)
else:
summary.add({k: observation[k] for k in keys if k in observation})
if self._trigger(trainer):
stats = self._summary.compute_mean()
stats_cpu = {}
for name, value in six.iteritems(stats):
stats_cpu[name] = float(value) # copy to CPU
updater = trainer.updater
stats_cpu['epoch'] = updater.epoch
stats_cpu['iteration'] = updater.iteration
x = stats_cpu[self._x_key]
data = self._data
for k in keys:
if k in stats_cpu:
data[k].append((x, stats_cpu[k]))
f = plt.figure()
a = f.add_subplot(111)
a.set_xlabel(self._x_key)
if self._grid:
a.grid()
for k in keys:
xy = data[k]
if len(xy) == 0:
continue
xy = numpy.array(xy)
a.plot(xy[:, 0], xy[:, 1], marker=self._marker, label=k)
if a.has_data():
if self._postprocess is not None:
self._postprocess(f, a, summary)
l = a.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
f.savefig(path.join(trainer.out, self._file_name),
bbox_extra_artists=(l,), bbox_inches='tight')
plt.close()
self._init_summary()
def serialize(self, serializer):
if isinstance(serializer, serializer_module.Serializer):
serializer('_plot_{}'.format(self._file_name),
json.dumps(self._data))
else:
self._data = json.loads(
serializer('_plot_{}'.format(self._file_name), ''))
def _init_summary(self):
self._summary = reporter.DictSummary()
| {
"content_hash": "ec609db5795d22b39e3881ae9f506f4d",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 79,
"avg_line_length": 34.89673913043478,
"alnum_prop": 0.5854228313346831,
"repo_name": "anaruse/chainer",
"id": "4877dc4f8ec7bac858ad21afa01d82faefe83cc1",
"size": "6421",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "chainer/training/extensions/plot_report.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3368"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "3723858"
}
],
"symlink_target": ""
} |
"""Testing utilities."""
# Copyright (c) 2011, 2012
# Authors: Pietro Berkes,
# Andreas Muller
# Mathieu Blondel
# Olivier Grisel
# Arnaud Joly
# Denis Engemann
# License: BSD 3 clause
import os
import inspect
import pkgutil
import warnings
import sys
import re
import platform
import scipy as sp
import scipy.io
from functools import wraps
try:
# Python 2
from urllib2 import urlopen
from urllib2 import HTTPError
except ImportError:
# Python 3+
from urllib.request import urlopen
from urllib.error import HTTPError
import sklearn
from sklearn.base import BaseEstimator
# Conveniently import all assertions in one place.
from nose.tools import assert_equal
from nose.tools import assert_not_equal
from nose.tools import assert_true
from nose.tools import assert_false
from nose.tools import assert_raises
from nose.tools import raises
from nose import SkipTest
from nose import with_setup
from numpy.testing import assert_almost_equal
from numpy.testing import assert_array_equal
from numpy.testing import assert_array_almost_equal
from numpy.testing import assert_array_less
import numpy as np
from sklearn.base import (ClassifierMixin, RegressorMixin, TransformerMixin,
ClusterMixin)
__all__ = ["assert_equal", "assert_not_equal", "assert_raises",
"assert_raises_regexp", "raises", "with_setup", "assert_true",
"assert_false", "assert_almost_equal", "assert_array_equal",
"assert_array_almost_equal", "assert_array_less",
"assert_less", "assert_less_equal",
"assert_greater", "assert_greater_equal"]
try:
from nose.tools import assert_in, assert_not_in
except ImportError:
# Nose < 1.0.0
def assert_in(x, container):
assert_true(x in container, msg="%r in %r" % (x, container))
def assert_not_in(x, container):
assert_false(x in container, msg="%r in %r" % (x, container))
try:
from nose.tools import assert_raises_regex
except ImportError:
# for Py 2.6
def assert_raises_regex(expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Helper function to check for message patterns in exceptions"""
not_raised = False
try:
callable_obj(*args, **kwargs)
not_raised = True
except Exception as e:
error_message = str(e)
if not re.compile(expected_regexp).search(error_message):
raise AssertionError("Error message should match pattern "
"%r. %r does not." %
(expected_regexp, error_message))
if not_raised:
raise AssertionError("Should have raised %r" %
expected_exception(expected_regexp))
# assert_raises_regexp is deprecated in Python 3.4 in favor of
# assert_raises_regex but lets keep the bacward compat in scikit-learn with
# the old name for now
assert_raises_regexp = assert_raises_regex
def _assert_less(a, b, msg=None):
message = "%r is not lower than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a < b, message
def _assert_greater(a, b, msg=None):
message = "%r is not greater than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a > b, message
def assert_less_equal(a, b, msg=None):
message = "%r is not lower than or equal to %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a <= b, message
def assert_greater_equal(a, b, msg=None):
message = "%r is not greater than or equal to %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a >= b, message
def assert_warns(warning_class, func, *args, **kw):
"""Test that a certain warning occurs.
Parameters
----------
warning_class : the warning class
The class to test for, e.g. UserWarning.
func : callable
Calable object to trigger warnings.
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`
Returns
-------
result : the return value of `func`
"""
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
result = func(*args, **kw)
if hasattr(np, 'VisibleDeprecationWarning'):
# Filter out numpy-specific warnings in numpy >= 1.9
w = [e for e in w
if not e.category is np.VisibleDeprecationWarning]
# Verify some things
if not len(w) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
found = any(warning.category is warning_class for warning in w)
if not found:
raise AssertionError("%s did not give warning: %s( is %s)"
% (func.__name__, warning_class, w))
return result
def assert_warns_message(warning_class, message, func, *args, **kw):
# very important to avoid uncontrolled state propagation
"""Test that a certain warning occurs and with a certain message.
Parameters
----------
warning_class : the warning class
The class to test for, e.g. UserWarning.
message : str | callable
The entire message or a substring to test for. If callable,
it takes a string as argument and will trigger an assertion error
if it returns `False`.
func : callable
Calable object to trigger warnings.
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`.
Returns
-------
result : the return value of `func`
"""
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
if hasattr(np, 'VisibleDeprecationWarning'):
# Let's not catch the numpy internal DeprecationWarnings
warnings.simplefilter('ignore', np.VisibleDeprecationWarning)
# Trigger a warning.
result = func(*args, **kw)
# Verify some things
if not len(w) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
if not w[0].category is warning_class:
raise AssertionError("First warning for %s is not a "
"%s( is %s)"
% (func.__name__, warning_class, w[0]))
# substring will match, the entire message with typo won't
msg = w[0].message # For Python 3 compatibility
msg = str(msg.args[0] if hasattr(msg, 'args') else msg)
if callable(message): # add support for certain tests
check_in_message = message
else:
check_in_message = lambda msg: message in msg
if not check_in_message(msg):
raise AssertionError("The message received ('%s') for <%s> is "
"not the one you expected ('%s')"
% (msg, func.__name__, message
))
return result
# To remove when we support numpy 1.7
def assert_no_warnings(func, *args, **kw):
# XXX: once we may depend on python >= 2.6, this can be replaced by the
# warnings module context manager.
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
result = func(*args, **kw)
if hasattr(np, 'VisibleDeprecationWarning'):
# Filter out numpy-specific warnings in numpy >= 1.9
w = [e for e in w
if not e.category is np.VisibleDeprecationWarning]
if len(w) > 0:
raise AssertionError("Got warnings when calling %s: %s"
% (func.__name__, w))
return result
def ignore_warnings(obj=None):
""" Context manager and decorator to ignore warnings
Note. Using this (in both variants) will clear all warnings
from all python modules loaded. In case you need to test
cross-module-warning-logging this is not your tool of choice.
Examples
--------
>>> with ignore_warnings():
... warnings.warn('buhuhuhu')
>>> def nasty_warn():
... warnings.warn('buhuhuhu')
... print(42)
>>> ignore_warnings(nasty_warn)()
42
"""
if callable(obj):
return _ignore_warnings(obj)
else:
return _IgnoreWarnings()
def _ignore_warnings(fn):
"""Decorator to catch and hide warnings without visual nesting"""
@wraps(fn)
def wrapper(*args, **kwargs):
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
return fn(*args, **kwargs)
w[:] = []
return wrapper
class _IgnoreWarnings(object):
"""Improved and simplified Python warnings context manager
Copied from Python 2.7.5 and modified as required.
"""
def __init__(self):
"""
Parameters
==========
category : warning class
The category to filter. Defaults to Warning. If None,
all categories will be muted.
"""
self._record = True
self._module = sys.modules['warnings']
self._entered = False
self.log = []
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
clean_warning_registry() # be safe and not propagate state + chaos
warnings.simplefilter('always')
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
self.log = []
def showwarning(*args, **kwargs):
self.log.append(warnings.WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return self.log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
self.log[:] = []
clean_warning_registry() # be safe and not propagate state + chaos
try:
from nose.tools import assert_less
except ImportError:
assert_less = _assert_less
try:
from nose.tools import assert_greater
except ImportError:
assert_greater = _assert_greater
def _assert_allclose(actual, desired, rtol=1e-7, atol=0,
err_msg='', verbose=True):
actual, desired = np.asanyarray(actual), np.asanyarray(desired)
if np.allclose(actual, desired, rtol=rtol, atol=atol):
return
msg = ('Array not equal to tolerance rtol=%g, atol=%g: '
'actual %s, desired %s') % (rtol, atol, actual, desired)
raise AssertionError(msg)
if hasattr(np.testing, 'assert_allclose'):
assert_allclose = np.testing.assert_allclose
else:
assert_allclose = _assert_allclose
def assert_raise_message(exception, message, function, *args, **kwargs):
"""Helper function to test error messages in exceptions"""
try:
function(*args, **kwargs)
raise AssertionError("Should have raised %r" % exception(message))
except exception as e:
error_message = str(e)
assert_in(message, error_message)
def fake_mldata(columns_dict, dataname, matfile, ordering=None):
"""Create a fake mldata data set.
Parameters
----------
columns_dict: contains data as
columns_dict[column_name] = array of data
dataname: name of data set
matfile: file-like object or file name
ordering: list of column_names, determines the ordering in the data set
Note: this function transposes all arrays, while fetch_mldata only
transposes 'data', keep that into account in the tests.
"""
datasets = dict(columns_dict)
# transpose all variables
for name in datasets:
datasets[name] = datasets[name].T
if ordering is None:
ordering = sorted(list(datasets.keys()))
# NOTE: setting up this array is tricky, because of the way Matlab
# re-packages 1D arrays
datasets['mldata_descr_ordering'] = sp.empty((1, len(ordering)),
dtype='object')
for i, name in enumerate(ordering):
datasets['mldata_descr_ordering'][0, i] = name
scipy.io.savemat(matfile, datasets, oned_as='column')
class mock_mldata_urlopen(object):
def __init__(self, mock_datasets):
"""Object that mocks the urlopen function to fake requests to mldata.
`mock_datasets` is a dictionary of {dataset_name: data_dict}, or
{dataset_name: (data_dict, ordering).
`data_dict` itself is a dictionary of {column_name: data_array},
and `ordering` is a list of column_names to determine the ordering
in the data set (see `fake_mldata` for details).
When requesting a dataset with a name that is in mock_datasets,
this object creates a fake dataset in a StringIO object and
returns it. Otherwise, it raises an HTTPError.
"""
self.mock_datasets = mock_datasets
def __call__(self, urlname):
dataset_name = urlname.split('/')[-1]
if dataset_name in self.mock_datasets:
resource_name = '_' + dataset_name
from io import BytesIO
matfile = BytesIO()
dataset = self.mock_datasets[dataset_name]
ordering = None
if isinstance(dataset, tuple):
dataset, ordering = dataset
fake_mldata(dataset, resource_name, matfile, ordering)
matfile.seek(0)
return matfile
else:
raise HTTPError(urlname, 404, dataset_name + " is not available",
[], None)
def install_mldata_mock(mock_datasets):
# Lazy import to avoid mutually recursive imports
from sklearn import datasets
datasets.mldata.urlopen = mock_mldata_urlopen(mock_datasets)
def uninstall_mldata_mock():
# Lazy import to avoid mutually recursive imports
from sklearn import datasets
datasets.mldata.urlopen = urlopen
# Meta estimators need another estimator to be instantiated.
META_ESTIMATORS = ["OneVsOneClassifier",
"OutputCodeClassifier", "OneVsRestClassifier", "RFE",
"RFECV", "BaseEnsemble"]
# estimators that there is no way to default-construct sensibly
OTHER = ["Pipeline", "FeatureUnion", "GridSearchCV", "RandomizedSearchCV"]
# some trange ones
DONT_TEST = ['SparseCoder', 'EllipticEnvelope', 'DictVectorizer',
'LabelBinarizer', 'LabelEncoder', 'MultiLabelBinarizer',
'TfidfTransformer', 'IsotonicRegression', 'OneHotEncoder',
'RandomTreesEmbedding', 'FeatureHasher', 'DummyClassifier',
'DummyRegressor', 'TruncatedSVD', 'PolynomialFeatures']
def all_estimators(include_meta_estimators=False, include_other=False,
type_filter=None, include_dont_test=False):
"""Get a list of all estimators from sklearn.
This function crawls the module and gets all classes that inherit
from BaseEstimator. Classes that are defined in test-modules are not
included.
By default meta_estimators such as GridSearchCV are also not included.
Parameters
----------
include_meta_estimators : boolean, default=False
Whether to include meta-estimators that can be constructed using
an estimator as their first argument. These are currently
BaseEnsemble, OneVsOneClassifier, OutputCodeClassifier,
OneVsRestClassifier, RFE, RFECV.
include_other : boolean, default=False
Wether to include meta-estimators that are somehow special and can
not be default-constructed sensibly. These are currently
Pipeline, FeatureUnion and GridSearchCV
include_dont_test : boolean, default=False
Whether to include "special" label estimator or test processors.
type_filter : string or None, default=None
Which kind of estimators should be returned. If None, no filter is
applied and all estimators are returned. Possible values are
'classifier', 'regressor', 'cluster' and 'transformer' to get
estimators only of these specific types.
Returns
-------
estimators : list of tuples
List of (name, class), where ``name`` is the class name as string
and ``class`` is the actuall type of the class.
"""
def is_abstract(c):
if not(hasattr(c, '__abstractmethods__')):
return False
if not len(c.__abstractmethods__):
return False
return True
all_classes = []
# get parent folder
path = sklearn.__path__
for importer, modname, ispkg in pkgutil.walk_packages(
path=path, prefix='sklearn.', onerror=lambda x: None):
if ".tests." in modname:
continue
module = __import__(modname, fromlist="dummy")
classes = inspect.getmembers(module, inspect.isclass)
all_classes.extend(classes)
all_classes = set(all_classes)
estimators = [c for c in all_classes
if (issubclass(c[1], BaseEstimator)
and c[0] != 'BaseEstimator')]
# get rid of abstract base classes
estimators = [c for c in estimators if not is_abstract(c[1])]
if not include_dont_test:
estimators = [c for c in estimators if not c[0] in DONT_TEST]
if not include_other:
estimators = [c for c in estimators if not c[0] in OTHER]
# possibly get rid of meta estimators
if not include_meta_estimators:
estimators = [c for c in estimators if not c[0] in META_ESTIMATORS]
if type_filter == 'classifier':
estimators = [est for est in estimators
if issubclass(est[1], ClassifierMixin)]
elif type_filter == 'regressor':
estimators = [est for est in estimators
if issubclass(est[1], RegressorMixin)]
elif type_filter == 'transformer':
estimators = [est for est in estimators
if issubclass(est[1], TransformerMixin)]
elif type_filter == 'cluster':
estimators = [est for est in estimators
if issubclass(est[1], ClusterMixin)]
elif type_filter is not None:
raise ValueError("Parameter type_filter must be 'classifier', "
"'regressor', 'transformer', 'cluster' or None, got"
" %s." % repr(type_filter))
# We sort in order to have reproducible test failures
return sorted(estimators)
def set_random_state(estimator, random_state=0):
if "random_state" in estimator.get_params().keys():
estimator.set_params(random_state=random_state)
def if_matplotlib(func):
"""Test decorator that skips test if matplotlib not installed. """
@wraps(func)
def run_test(*args, **kwargs):
try:
import matplotlib
matplotlib.use('Agg', warn=False)
# this fails if no $DISPLAY specified
matplotlib.pylab.figure()
except:
raise SkipTest('Matplotlib not available.')
else:
return func(*args, **kwargs)
return run_test
def if_not_mac_os(versions=('10.7', '10.8', '10.9'),
message='Multi-process bug in Mac OS X >= 10.7 '
'(see issue #636)'):
"""Test decorator that skips test if OS is Mac OS X and its
major version is one of ``versions``.
"""
mac_version, _, _ = platform.mac_ver()
skip = '.'.join(mac_version.split('.')[:2]) in versions
def decorator(func):
if skip:
@wraps(func)
def func(*args, **kwargs):
raise SkipTest(message)
return func
return decorator
def clean_warning_registry():
"""Safe way to reset warnings """
warnings.resetwarnings()
reg = "__warningregistry__"
for mod_name, mod in list(sys.modules.items()):
if 'six.moves' in mod_name:
continue
if hasattr(mod, reg):
getattr(mod, reg).clear()
def check_skip_network():
if int(os.environ.get('SKLEARN_SKIP_NETWORK_TESTS', 0)):
raise SkipTest("Text tutorial requires large dataset download")
def check_skip_travis():
"""Skip test if being run on Travis."""
if os.environ.get('TRAVIS') == "true":
raise SkipTest("This test needs to be skipped on Travis")
with_network = with_setup(check_skip_network)
with_travis = with_setup(check_skip_travis)
| {
"content_hash": "c917c2c8cd809ff78ff32d0d7755cff3",
"timestamp": "",
"source": "github",
"line_count": 643,
"max_line_length": 78,
"avg_line_length": 33.50077760497667,
"alnum_prop": 0.6133884220788264,
"repo_name": "evgchz/scikit-learn",
"id": "ee0cc4b65666b502d8dd390867d28074ff576854",
"size": "21541",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "sklearn/utils/testing.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "18469430"
},
{
"name": "C++",
"bytes": "1808975"
},
{
"name": "JavaScript",
"bytes": "22298"
},
{
"name": "Makefile",
"bytes": "4901"
},
{
"name": "PowerShell",
"bytes": "13427"
},
{
"name": "Python",
"bytes": "5642425"
},
{
"name": "Shell",
"bytes": "8730"
}
],
"symlink_target": ""
} |
"""Provides functionality to interact with fans."""
from datetime import timedelta
import functools as ft
import logging
from typing import List, Optional
import voluptuous as vol
from homeassistant.const import (
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.loader import bind_hass
from homeassistant.util.percentage import (
ordered_list_item_to_percentage,
percentage_to_ordered_list_item,
)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "fan"
SCAN_INTERVAL = timedelta(seconds=30)
ENTITY_ID_FORMAT = DOMAIN + ".{}"
# Bitfield of features supported by the fan entity
SUPPORT_SET_SPEED = 1
SUPPORT_OSCILLATE = 2
SUPPORT_DIRECTION = 4
SUPPORT_PRESET_MODE = 8
SERVICE_SET_SPEED = "set_speed"
SERVICE_OSCILLATE = "oscillate"
SERVICE_SET_DIRECTION = "set_direction"
SERVICE_SET_PERCENTAGE = "set_percentage"
SERVICE_SET_PRESET_MODE = "set_preset_mode"
SPEED_OFF = "off"
SPEED_LOW = "low"
SPEED_MEDIUM = "medium"
SPEED_HIGH = "high"
DIRECTION_FORWARD = "forward"
DIRECTION_REVERSE = "reverse"
ATTR_SPEED = "speed"
ATTR_PERCENTAGE = "percentage"
ATTR_SPEED_LIST = "speed_list"
ATTR_OSCILLATING = "oscillating"
ATTR_DIRECTION = "direction"
ATTR_PRESET_MODE = "preset_mode"
ATTR_PRESET_MODES = "preset_modes"
# Invalid speeds do not conform to the entity model, but have crept
# into core integrations at some point so we are temporarily
# accommodating them in the transition to percentages.
_NOT_SPEED_OFF = "off"
_NOT_SPEED_ON = "on"
_NOT_SPEED_AUTO = "auto"
_NOT_SPEED_SMART = "smart"
_NOT_SPEED_INTERVAL = "interval"
_NOT_SPEED_IDLE = "idle"
_NOT_SPEED_FAVORITE = "favorite"
_NOT_SPEED_SLEEP = "sleep"
_NOT_SPEEDS_FILTER = {
_NOT_SPEED_OFF,
_NOT_SPEED_ON,
_NOT_SPEED_AUTO,
_NOT_SPEED_SMART,
_NOT_SPEED_INTERVAL,
_NOT_SPEED_IDLE,
_NOT_SPEED_SLEEP,
_NOT_SPEED_FAVORITE,
}
_FAN_NATIVE = "_fan_native"
OFF_SPEED_VALUES = [SPEED_OFF, None]
LEGACY_SPEED_LIST = [SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
class NoValidSpeedsError(ValueError):
"""Exception class when there are no valid speeds."""
class NotValidSpeedError(ValueError):
"""Exception class when the speed in not in the speed list."""
class NotValidPresetModeError(ValueError):
"""Exception class when the preset_mode in not in the preset_modes list."""
@bind_hass
def is_on(hass, entity_id: str) -> bool:
"""Return if the fans are on based on the statemachine."""
state = hass.states.get(entity_id)
if ATTR_SPEED in state.attributes:
return state.attributes[ATTR_SPEED] not in OFF_SPEED_VALUES
return state.state == STATE_ON
async def async_setup(hass, config: dict):
"""Expose fan control via statemachine and services."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
# After the transition to percentage and preset_modes concludes,
# switch this back to async_turn_on and remove async_turn_on_compat
component.async_register_entity_service(
SERVICE_TURN_ON,
{
vol.Optional(ATTR_SPEED): cv.string,
vol.Optional(ATTR_PERCENTAGE): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
),
vol.Optional(ATTR_PRESET_MODE): cv.string,
},
"async_turn_on_compat",
)
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle")
# After the transition to percentage and preset_modes concludes,
# remove this service
component.async_register_entity_service(
SERVICE_SET_SPEED,
{vol.Required(ATTR_SPEED): cv.string},
"async_set_speed_deprecated",
[SUPPORT_SET_SPEED],
)
component.async_register_entity_service(
SERVICE_OSCILLATE,
{vol.Required(ATTR_OSCILLATING): cv.boolean},
"async_oscillate",
[SUPPORT_OSCILLATE],
)
component.async_register_entity_service(
SERVICE_SET_DIRECTION,
{vol.Optional(ATTR_DIRECTION): cv.string},
"async_set_direction",
[SUPPORT_DIRECTION],
)
component.async_register_entity_service(
SERVICE_SET_PERCENTAGE,
{
vol.Required(ATTR_PERCENTAGE): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
)
},
"async_set_percentage",
[SUPPORT_SET_SPEED],
)
component.async_register_entity_service(
SERVICE_SET_PRESET_MODE,
{vol.Required(ATTR_PRESET_MODE): cv.string},
"async_set_preset_mode",
[SUPPORT_SET_SPEED, SUPPORT_PRESET_MODE],
)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
def _fan_native(method):
"""Native fan method not overridden."""
setattr(method, _FAN_NATIVE, True)
return method
class FanEntity(ToggleEntity):
"""Representation of a fan."""
@_fan_native
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
raise NotImplementedError()
async def async_set_speed_deprecated(self, speed: str):
"""Set the speed of the fan."""
_LOGGER.warning(
"fan.set_speed is deprecated, use fan.set_percentage or fan.set_preset_mode instead."
)
await self.async_set_speed(speed)
@_fan_native
async def async_set_speed(self, speed: str):
"""Set the speed of the fan."""
if speed == SPEED_OFF:
await self.async_turn_off()
return
if speed in self.preset_modes:
if not hasattr(self.async_set_preset_mode, _FAN_NATIVE):
await self.async_set_preset_mode(speed)
return
if not hasattr(self.set_preset_mode, _FAN_NATIVE):
await self.hass.async_add_executor_job(self.set_preset_mode, speed)
return
else:
if not hasattr(self.async_set_percentage, _FAN_NATIVE):
await self.async_set_percentage(self.speed_to_percentage(speed))
return
if not hasattr(self.set_percentage, _FAN_NATIVE):
await self.hass.async_add_executor_job(
self.set_percentage, self.speed_to_percentage(speed)
)
return
await self.hass.async_add_executor_job(self.set_speed, speed)
@_fan_native
def set_percentage(self, percentage: int) -> None:
"""Set the speed of the fan, as a percentage."""
raise NotImplementedError()
@_fan_native
async def async_set_percentage(self, percentage: int) -> None:
"""Set the speed of the fan, as a percentage."""
if percentage == 0:
await self.async_turn_off()
elif not hasattr(self.set_percentage, _FAN_NATIVE):
await self.hass.async_add_executor_job(self.set_percentage, percentage)
else:
await self.async_set_speed(self.percentage_to_speed(percentage))
@_fan_native
def set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
self._valid_preset_mode_or_raise(preset_mode)
self.set_speed(preset_mode)
@_fan_native
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if not hasattr(self.set_preset_mode, _FAN_NATIVE):
await self.hass.async_add_executor_job(self.set_preset_mode, preset_mode)
return
self._valid_preset_mode_or_raise(preset_mode)
await self.async_set_speed(preset_mode)
def _valid_preset_mode_or_raise(self, preset_mode):
"""Raise NotValidPresetModeError on invalid preset_mode."""
preset_modes = self.preset_modes
if preset_mode not in preset_modes:
raise NotValidPresetModeError(
f"The preset_mode {preset_mode} is not a valid preset_mode: {preset_modes}"
)
def set_direction(self, direction: str) -> None:
"""Set the direction of the fan."""
raise NotImplementedError()
async def async_set_direction(self, direction: str):
"""Set the direction of the fan."""
await self.hass.async_add_executor_job(self.set_direction, direction)
# pylint: disable=arguments-differ
def turn_on(
self,
speed: Optional[str] = None,
percentage: Optional[int] = None,
preset_mode: Optional[str] = None,
**kwargs,
) -> None:
"""Turn on the fan."""
raise NotImplementedError()
# pylint: disable=arguments-differ
async def async_turn_on_compat(
self,
speed: Optional[str] = None,
percentage: Optional[int] = None,
preset_mode: Optional[str] = None,
**kwargs,
) -> None:
"""Turn on the fan.
This _compat version wraps async_turn_on with
backwards and forward compatibility.
After the transition to percentage and preset_modes concludes, it
should be removed.
"""
if preset_mode is not None:
self._valid_preset_mode_or_raise(preset_mode)
speed = preset_mode
percentage = None
elif speed is not None:
_LOGGER.warning(
"Calling fan.turn_on with the speed argument is deprecated, use percentage or preset_mode instead."
)
if speed in self.preset_modes:
preset_mode = speed
percentage = None
else:
percentage = self.speed_to_percentage(speed)
elif percentage is not None:
speed = self.percentage_to_speed(percentage)
await self.async_turn_on(
speed=speed,
percentage=percentage,
preset_mode=preset_mode,
**kwargs,
)
# pylint: disable=arguments-differ
async def async_turn_on(
self,
speed: Optional[str] = None,
percentage: Optional[int] = None,
preset_mode: Optional[str] = None,
**kwargs,
) -> None:
"""Turn on the fan."""
if speed == SPEED_OFF:
await self.async_turn_off()
else:
await self.hass.async_add_executor_job(
ft.partial(
self.turn_on,
speed=speed,
percentage=percentage,
preset_mode=preset_mode,
**kwargs,
)
)
def oscillate(self, oscillating: bool) -> None:
"""Oscillate the fan."""
raise NotImplementedError()
async def async_oscillate(self, oscillating: bool):
"""Oscillate the fan."""
await self.hass.async_add_executor_job(self.oscillate, oscillating)
@property
def is_on(self):
"""Return true if the entity is on."""
return self.speed not in [SPEED_OFF, None]
@property
def _implemented_percentage(self):
"""Return true if percentage has been implemented."""
return not hasattr(self.set_percentage, _FAN_NATIVE) or not hasattr(
self.async_set_percentage, _FAN_NATIVE
)
@property
def _implemented_preset_mode(self):
"""Return true if preset_mode has been implemented."""
return not hasattr(self.set_preset_mode, _FAN_NATIVE) or not hasattr(
self.async_set_preset_mode, _FAN_NATIVE
)
@property
def _implemented_speed(self):
"""Return true if speed has been implemented."""
return not hasattr(self.set_speed, _FAN_NATIVE) or not hasattr(
self.async_set_speed, _FAN_NATIVE
)
@property
def speed(self) -> Optional[str]:
"""Return the current speed."""
if self._implemented_preset_mode:
preset_mode = self.preset_mode
if preset_mode:
return preset_mode
if self._implemented_percentage:
percentage = self.percentage
if percentage is None:
return None
return self.percentage_to_speed(percentage)
return None
@property
def percentage(self) -> Optional[int]:
"""Return the current speed as a percentage."""
if not self._implemented_preset_mode:
if self.speed in self.preset_modes:
return None
if not self._implemented_percentage:
return self.speed_to_percentage(self.speed)
return 0
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
speeds = []
if self._implemented_percentage:
speeds += [SPEED_OFF, *LEGACY_SPEED_LIST]
if self._implemented_preset_mode:
speeds += self.preset_modes
return speeds
@property
def current_direction(self) -> Optional[str]:
"""Return the current direction of the fan."""
return None
@property
def oscillating(self):
"""Return whether or not the fan is currently oscillating."""
return None
@property
def capability_attributes(self):
"""Return capability attributes."""
attrs = {}
if self.supported_features & SUPPORT_SET_SPEED:
attrs[ATTR_SPEED_LIST] = self.speed_list
if (
self.supported_features & SUPPORT_SET_SPEED
or self.supported_features & SUPPORT_PRESET_MODE
):
attrs[ATTR_PRESET_MODES] = self.preset_modes
return attrs
@property
def _speed_list_without_preset_modes(self) -> list:
"""Return the speed list without preset modes.
This property provides forward and backwards
compatibility for conversion to percentage speeds.
"""
if not self._implemented_speed:
return LEGACY_SPEED_LIST
return speed_list_without_preset_modes(self.speed_list)
def speed_to_percentage(self, speed: str) -> int:
"""
Map a speed to a percentage.
Officially this should only have to deal with the 4 pre-defined speeds:
return {
SPEED_OFF: 0,
SPEED_LOW: 33,
SPEED_MEDIUM: 66,
SPEED_HIGH: 100,
}[speed]
Unfortunately lots of fans make up their own speeds. So the default
mapping is more dynamic.
"""
if speed in OFF_SPEED_VALUES:
return 0
speed_list = self._speed_list_without_preset_modes
if speed_list and speed not in speed_list:
raise NotValidSpeedError(f"The speed {speed} is not a valid speed.")
try:
return ordered_list_item_to_percentage(speed_list, speed)
except ValueError as ex:
raise NoValidSpeedsError(
f"The speed_list {speed_list} does not contain any valid speeds."
) from ex
def percentage_to_speed(self, percentage: int) -> str:
"""
Map a percentage onto self.speed_list.
Officially, this should only have to deal with 4 pre-defined speeds.
if value == 0:
return SPEED_OFF
elif value <= 33:
return SPEED_LOW
elif value <= 66:
return SPEED_MEDIUM
else:
return SPEED_HIGH
Unfortunately there is currently a high degree of non-conformancy.
Until fans have been corrected a more complicated and dynamic
mapping is used.
"""
if percentage == 0:
return SPEED_OFF
speed_list = self._speed_list_without_preset_modes
try:
return percentage_to_ordered_list_item(speed_list, percentage)
except ValueError as ex:
raise NoValidSpeedsError(
f"The speed_list {speed_list} does not contain any valid speeds."
) from ex
@property
def state_attributes(self) -> dict:
"""Return optional state attributes."""
data = {}
supported_features = self.supported_features
if supported_features & SUPPORT_DIRECTION:
data[ATTR_DIRECTION] = self.current_direction
if supported_features & SUPPORT_OSCILLATE:
data[ATTR_OSCILLATING] = self.oscillating
if supported_features & SUPPORT_SET_SPEED:
data[ATTR_SPEED] = self.speed
data[ATTR_PERCENTAGE] = self.percentage
if (
supported_features & SUPPORT_PRESET_MODE
or supported_features & SUPPORT_SET_SPEED
):
data[ATTR_PRESET_MODE] = self.preset_mode
return data
@property
def supported_features(self) -> int:
"""Flag supported features."""
return 0
@property
def preset_mode(self) -> Optional[str]:
"""Return the current preset mode, e.g., auto, smart, interval, favorite.
Requires SUPPORT_SET_SPEED.
"""
speed = self.speed
if speed in self.preset_modes:
return speed
return None
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return a list of available preset modes.
Requires SUPPORT_SET_SPEED.
"""
return preset_modes_from_speed_list(self.speed_list)
def speed_list_without_preset_modes(speed_list: List):
"""Filter out non-speeds from the speed list.
The goal is to get the speeds in a list from lowest to
highest by removing speeds that are not valid or out of order
so we can map them to percentages.
Examples:
input: ["off", "low", "low-medium", "medium", "medium-high", "high", "auto"]
output: ["low", "low-medium", "medium", "medium-high", "high"]
input: ["off", "auto", "low", "medium", "high"]
output: ["low", "medium", "high"]
input: ["off", "1", "2", "3", "4", "5", "6", "7", "smart"]
output: ["1", "2", "3", "4", "5", "6", "7"]
input: ["Auto", "Silent", "Favorite", "Idle", "Medium", "High", "Strong"]
output: ["Silent", "Medium", "High", "Strong"]
"""
return [speed for speed in speed_list if speed.lower() not in _NOT_SPEEDS_FILTER]
def preset_modes_from_speed_list(speed_list: List):
"""Filter out non-preset modes from the speed list.
The goal is to return only preset modes.
Examples:
input: ["off", "low", "low-medium", "medium", "medium-high", "high", "auto"]
output: ["auto"]
input: ["off", "auto", "low", "medium", "high"]
output: ["auto"]
input: ["off", "1", "2", "3", "4", "5", "6", "7", "smart"]
output: ["smart"]
input: ["Auto", "Silent", "Favorite", "Idle", "Medium", "High", "Strong"]
output: ["Auto", "Favorite", "Idle"]
"""
return [
speed
for speed in speed_list
if speed.lower() in _NOT_SPEEDS_FILTER and speed.lower() != SPEED_OFF
]
| {
"content_hash": "2aa324f96bf155718ba3f743e22deab6",
"timestamp": "",
"source": "github",
"line_count": 615,
"max_line_length": 115,
"avg_line_length": 31.53008130081301,
"alnum_prop": 0.6069826208034655,
"repo_name": "turbokongen/home-assistant",
"id": "8d6fcbea2c965805bf540df36b10c7553aa5625a",
"size": "19391",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/fan/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "30405146"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
class CudaDriverError(Exception):
pass
class CudaRuntimeError(Exception):
pass
class CudaSupportError(ImportError):
pass
class NvvmError(Exception):
def __str__(self):
return '\n'.join(map(str, self.args))
class NvvmSupportError(ImportError):
pass
| {
"content_hash": "ff4ba802b87ec93fa04897712e6c185d",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 45,
"avg_line_length": 14.947368421052632,
"alnum_prop": 0.6936619718309859,
"repo_name": "gmarkall/numba",
"id": "c82cebd545364a8bc95900d0ffb2dbee6c625f82",
"size": "284",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "numba/cuda/cudadrv/error.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6761"
},
{
"name": "C",
"bytes": "625527"
},
{
"name": "C++",
"bytes": "85627"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "GDB",
"bytes": "101"
},
{
"name": "HTML",
"bytes": "3464"
},
{
"name": "Python",
"bytes": "8467098"
},
{
"name": "Shell",
"bytes": "8286"
}
],
"symlink_target": ""
} |
"""Simple textbox editing widget with Emacs-like keybindings."""
import curses, ascii
def rectangle(win, uly, ulx, lry, lrx):
"""Draw a rectangle with corners at the provided upper-left
and lower-right coordinates.
"""
win.vline(uly+1, ulx, curses.ACS_VLINE, lry - uly - 1)
win.hline(uly, ulx+1, curses.ACS_HLINE, lrx - ulx - 1)
win.hline(lry, ulx+1, curses.ACS_HLINE, lrx - ulx - 1)
win.vline(uly+1, lrx, curses.ACS_VLINE, lry - uly - 1)
win.addch(uly, ulx, curses.ACS_ULCORNER)
win.addch(uly, lrx, curses.ACS_URCORNER)
win.addch(lry, lrx, curses.ACS_LRCORNER)
win.addch(lry, ulx, curses.ACS_LLCORNER)
class Textbox:
"""Editing widget using the interior of a window object.
Supports the following Emacs-like key bindings:
Ctrl-A Go to left edge of window.
Ctrl-B Cursor left, wrapping to previous line if appropriate.
Ctrl-D Delete character under cursor.
Ctrl-E Go to right edge (stripspaces off) or end of line (stripspaces on).
Ctrl-F Cursor right, wrapping to next line when appropriate.
Ctrl-G Terminate, returning the window contents.
Ctrl-H Delete character backward.
Ctrl-J Terminate if the window is 1 line, otherwise insert newline.
Ctrl-K If line is blank, delete it, otherwise clear to end of line.
Ctrl-L Refresh screen.
Ctrl-N Cursor down; move down one line.
Ctrl-O Insert a blank line at cursor location.
Ctrl-P Cursor up; move up one line.
Move operations do nothing if the cursor is at an edge where the movement
is not possible. The following synonyms are supported where possible:
KEY_LEFT = Ctrl-B, KEY_RIGHT = Ctrl-F, KEY_UP = Ctrl-P, KEY_DOWN = Ctrl-N
KEY_BACKSPACE = Ctrl-h
"""
def __init__(self, win):
self.win = win
(self.maxy, self.maxx) = win.getmaxyx()
self.maxy = self.maxy - 1
self.maxx = self.maxx - 1
self.stripspaces = 1
self.lastcmd = None
win.keypad(1)
def _end_of_line(self, y):
"Go to the location of the first blank on the given line."
last = self.maxx
while 1:
if ascii.ascii(self.win.inch(y, last)) != ascii.SP:
last = last + 1
break
elif last == 0:
break
last = last - 1
return last
def do_command(self, ch):
"Process a single editing command."
(y, x) = self.win.getyx()
self.lastcmd = ch
if ascii.isprint(ch):
if y < self.maxy or x < self.maxx:
# The try-catch ignores the error we trigger from some curses
# versions by trying to write into the lowest-rightmost spot
# in the window.
try:
self.win.addch(ch)
except curses.error:
pass
elif ch == ascii.SOH: # ^a
self.win.move(y, 0)
elif ch in (ascii.STX,curses.KEY_LEFT, ascii.BS,curses.KEY_BACKSPACE):
if x > 0:
self.win.move(y, x-1)
elif y == 0:
pass
elif self.stripspaces:
self.win.move(y-1, self._end_of_line(y-1))
else:
self.win.move(y-1, self.maxx)
if ch in (ascii.BS, curses.KEY_BACKSPACE):
self.win.delch()
elif ch == ascii.EOT: # ^d
self.win.delch()
elif ch == ascii.ENQ: # ^e
if self.stripspaces:
self.win.move(y, self._end_of_line(y))
else:
self.win.move(y, self.maxx)
elif ch in (ascii.ACK, curses.KEY_RIGHT): # ^f
if x < self.maxx:
self.win.move(y, x+1)
elif y == self.maxy:
pass
else:
self.win.move(y+1, 0)
elif ch == ascii.BEL: # ^g
return 0
elif ch == ascii.NL: # ^j
if self.maxy == 0:
return 0
elif y < self.maxy:
self.win.move(y+1, 0)
elif ch == ascii.VT: # ^k
if x == 0 and self._end_of_line(y) == 0:
self.win.deleteln()
else:
# first undo the effect of self._end_of_line
self.win.move(y, x)
self.win.clrtoeol()
elif ch == ascii.FF: # ^l
self.win.refresh()
elif ch in (ascii.SO, curses.KEY_DOWN): # ^n
if y < self.maxy:
self.win.move(y+1, x)
if x > self._end_of_line(y+1):
self.win.move(y+1, self._end_of_line(y+1))
elif ch == ascii.SI: # ^o
self.win.insertln()
elif ch in (ascii.DLE, curses.KEY_UP): # ^p
if y > 0:
self.win.move(y-1, x)
if x > self._end_of_line(y-1):
self.win.move(y-1, self._end_of_line(y-1))
return 1
def gather(self):
"Collect and return the contents of the window."
result = ""
for y in range(self.maxy+1):
self.win.move(y, 0)
stop = self._end_of_line(y)
if stop == 0 and self.stripspaces:
continue
for x in range(self.maxx+1):
if self.stripspaces and x == stop:
break
result = result + chr(ascii.ascii(self.win.inch(y, x)))
if self.maxy > 0:
result = result + "\n"
return result
def edit(self, validate=None):
"Edit in the widget window and collect the results."
while 1:
ch = self.win.getch()
if validate:
ch = validate(ch)
if not ch:
continue
if not self.do_command(ch):
break
self.win.refresh()
return self.gather()
if __name__ == '__main__':
def test_editbox(stdscr):
ncols, nlines = 9, 4
uly, ulx = 15, 20
stdscr.addstr(uly-2, ulx, "Use Ctrl-G to end editing.")
win = curses.newwin(nlines, ncols, uly, ulx)
rectangle(stdscr, uly-1, ulx-1, uly + nlines, ulx + ncols)
stdscr.refresh()
return Textbox(win).edit()
str = curses.wrapper(test_editbox)
print 'Contents of text box:', repr(str)
| {
"content_hash": "4e1db8ad18018077697c103dd26c50ec",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 83,
"avg_line_length": 38.225433526011564,
"alnum_prop": 0.5024950854377741,
"repo_name": "MalloyPower/parsing-python",
"id": "28d78dd5cd382cae86f8b0b94b6820555a800681",
"size": "6613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-2.4/Lib/curses/textpad.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
} |
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_data_policy_manifests_get_by_policy_mode_request(policy_mode: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-09-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.Authorization/dataPolicyManifests/{policyMode}")
path_format_arguments = {
"policyMode": _SERIALIZER.url("policy_mode", policy_mode, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_data_policy_manifests_list_request(*, filter: Optional[str] = None, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-09-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.Authorization/dataPolicyManifests")
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_delete_request(scope: str, policy_assignment_name: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}"
)
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, "str", skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_create_request(scope: str, policy_assignment_name: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}"
)
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, "str", skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_get_request(scope: str, policy_assignment_name: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}"
)
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, "str", skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_update_request(scope: str, policy_assignment_name: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}"
)
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, "str", skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_list_for_resource_group_request(
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int", maximum=1000, minimum=1)
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_list_for_resource_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int", maximum=1000, minimum=1)
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_list_for_management_group_request(
management_group_id: str, *, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyAssignments",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int", maximum=1000, minimum=1)
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_list_request(
subscription_id: str, *, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyAssignments"
)
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int", maximum=1000, minimum=1)
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_delete_by_id_request(policy_assignment_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/{policyAssignmentId}")
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, "str", skip_quote=True),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_create_by_id_request(policy_assignment_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/{policyAssignmentId}")
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, "str", skip_quote=True),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_get_by_id_request(policy_assignment_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/{policyAssignmentId}")
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, "str", skip_quote=True),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_assignments_update_by_id_request(policy_assignment_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/{policyAssignmentId}")
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, "str", skip_quote=True),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_definitions_create_or_update_request(
policy_definition_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policyDefinitionName": _SERIALIZER.url("policy_definition_name", policy_definition_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_definitions_delete_request(
policy_definition_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policyDefinitionName": _SERIALIZER.url("policy_definition_name", policy_definition_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_definitions_get_request(
policy_definition_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policyDefinitionName": _SERIALIZER.url("policy_definition_name", policy_definition_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_definitions_get_built_in_request(policy_definition_name: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}")
path_format_arguments = {
"policyDefinitionName": _SERIALIZER.url("policy_definition_name", policy_definition_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_definitions_create_or_update_at_management_group_request(
policy_definition_name: str, management_group_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policyDefinitionName": _SERIALIZER.url("policy_definition_name", policy_definition_name, "str"),
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_definitions_delete_at_management_group_request(
policy_definition_name: str, management_group_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policyDefinitionName": _SERIALIZER.url("policy_definition_name", policy_definition_name, "str"),
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_definitions_get_at_management_group_request(
policy_definition_name: str, management_group_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policyDefinitionName": _SERIALIZER.url("policy_definition_name", policy_definition_name, "str"),
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_definitions_list_request(
subscription_id: str, *, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyDefinitions"
)
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int", maximum=1000, minimum=1)
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_definitions_list_built_in_request(
*, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.Authorization/policyDefinitions")
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int", maximum=1000, minimum=1)
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_definitions_list_by_management_group_request(
management_group_id: str, *, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyDefinitions",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int", maximum=1000, minimum=1)
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_set_definitions_create_or_update_request(
policy_set_definition_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policySetDefinitionName": _SERIALIZER.url("policy_set_definition_name", policy_set_definition_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_set_definitions_delete_request(
policy_set_definition_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policySetDefinitionName": _SERIALIZER.url("policy_set_definition_name", policy_set_definition_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_set_definitions_get_request(
policy_set_definition_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policySetDefinitionName": _SERIALIZER.url("policy_set_definition_name", policy_set_definition_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_set_definitions_get_built_in_request(policy_set_definition_name: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}"
)
path_format_arguments = {
"policySetDefinitionName": _SERIALIZER.url("policy_set_definition_name", policy_set_definition_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_set_definitions_list_request(
subscription_id: str, *, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policySetDefinitions"
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int", maximum=1000, minimum=1)
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_set_definitions_list_built_in_request(
*, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.Authorization/policySetDefinitions")
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int", maximum=1000, minimum=1)
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_set_definitions_create_or_update_at_management_group_request(
policy_set_definition_name: str, management_group_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policySetDefinitionName": _SERIALIZER.url("policy_set_definition_name", policy_set_definition_name, "str"),
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_set_definitions_delete_at_management_group_request(
policy_set_definition_name: str, management_group_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policySetDefinitionName": _SERIALIZER.url("policy_set_definition_name", policy_set_definition_name, "str"),
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_set_definitions_get_at_management_group_request(
policy_set_definition_name: str, management_group_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"policySetDefinitionName": _SERIALIZER.url("policy_set_definition_name", policy_set_definition_name, "str"),
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_set_definitions_list_by_management_group_request(
management_group_id: str, *, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policySetDefinitions",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int", maximum=1000, minimum=1)
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_exemptions_delete_request(scope: str, policy_exemption_name: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/{scope}/providers/Microsoft.Authorization/policyExemptions/{policyExemptionName}"
)
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, "str", skip_quote=True),
"policyExemptionName": _SERIALIZER.url("policy_exemption_name", policy_exemption_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_exemptions_create_or_update_request(
scope: str, policy_exemption_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/{scope}/providers/Microsoft.Authorization/policyExemptions/{policyExemptionName}"
)
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, "str", skip_quote=True),
"policyExemptionName": _SERIALIZER.url("policy_exemption_name", policy_exemption_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_exemptions_get_request(scope: str, policy_exemption_name: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/{scope}/providers/Microsoft.Authorization/policyExemptions/{policyExemptionName}"
)
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, "str", skip_quote=True),
"policyExemptionName": _SERIALIZER.url("policy_exemption_name", policy_exemption_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_exemptions_list_request(
subscription_id: str, *, filter: Optional[str] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyExemptions"
)
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_exemptions_list_for_resource_group_request(
resource_group_name: str, subscription_id: str, *, filter: Optional[str] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyExemptions",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_exemptions_list_for_resource_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyExemptions",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_policy_exemptions_list_for_management_group_request(
management_group_id: str, *, filter: Optional[str] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyExemptions",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_variables_delete_request(variable_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}"
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_variables_create_or_update_request(variable_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}"
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_variables_get_request(variable_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}"
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_variables_delete_at_management_group_request(
management_group_id: str, variable_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_variables_create_or_update_at_management_group_request(
management_group_id: str, variable_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_variables_get_at_management_group_request(
management_group_id: str, variable_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_variables_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables")
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_variables_list_for_management_group_request(management_group_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_variable_values_delete_request(
variable_name: str, variable_value_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
"variableValueName": _SERIALIZER.url("variable_value_name", variable_value_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_variable_values_create_or_update_request(
variable_name: str, variable_value_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
"variableValueName": _SERIALIZER.url("variable_value_name", variable_value_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_variable_values_get_request(
variable_name: str, variable_value_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
"variableValueName": _SERIALIZER.url("variable_value_name", variable_value_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_variable_values_list_request(variable_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}/values",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_variable_values_list_for_management_group_request(
management_group_id: str, variable_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}/values",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_variable_values_delete_at_management_group_request(
management_group_id: str, variable_name: str, variable_value_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
"variableValueName": _SERIALIZER.url("variable_value_name", variable_value_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_variable_values_create_or_update_at_management_group_request(
management_group_id: str, variable_name: str, variable_value_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
"variableValueName": _SERIALIZER.url("variable_value_name", variable_value_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_variable_values_get_at_management_group_request(
management_group_id: str, variable_name: str, variable_value_name: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"managementGroupId": _SERIALIZER.url("management_group_id", management_group_id, "str"),
"variableName": _SERIALIZER.url("variable_name", variable_name, "str"),
"variableValueName": _SERIALIZER.url("variable_value_name", variable_value_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class DataPolicyManifestsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.policy.v2021_06_01.PolicyClient`'s
:attr:`data_policy_manifests` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def get_by_policy_mode(self, policy_mode: str, **kwargs: Any) -> _models.DataPolicyManifest:
"""Retrieves a data policy manifest.
This operation retrieves the data policy manifest with the given policy mode.
:param policy_mode: The policy mode of the data policy manifest to get. Required.
:type policy_mode: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataPolicyManifest or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.DataPolicyManifest
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-09-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.DataPolicyManifest]
request = build_data_policy_manifests_get_by_policy_mode_request(
policy_mode=policy_mode,
api_version=api_version,
template_url=self.get_by_policy_mode.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("DataPolicyManifest", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_policy_mode.metadata = {"url": "/providers/Microsoft.Authorization/dataPolicyManifests/{policyMode}"} # type: ignore
@distributed_trace
def list(self, filter: Optional[str] = None, **kwargs: Any) -> Iterable["_models.DataPolicyManifest"]:
"""Retrieves data policy manifests.
This operation retrieves a list of all the data policy manifests that match the optional given
$filter. Valid values for $filter are: "$filter=namespace eq '{0}'". If $filter is not
provided, the unfiltered list includes all data policy manifests for data resource types. If
$filter=namespace is provided, the returned list only includes all data policy manifests that
have a namespace matching the provided value.
:param filter: The filter to apply on the operation. Valid values for $filter are: "namespace
eq '{value}'". If $filter is not provided, no filtering is performed. If $filter=namespace eq
'{value}' is provided, the returned list only includes all data policy manifests that have a
namespace matching the provided value. Default value is None.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DataPolicyManifest or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.DataPolicyManifest]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-09-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.DataPolicyManifestListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_data_policy_manifests_list_request(
filter=filter,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("DataPolicyManifestListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/providers/Microsoft.Authorization/dataPolicyManifests"} # type: ignore
class PolicyAssignmentsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.policy.v2021_06_01.PolicyClient`'s
:attr:`policy_assignments` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def delete(self, scope: str, policy_assignment_name: str, **kwargs: Any) -> Optional[_models.PolicyAssignment]:
"""Deletes a policy assignment.
This operation deletes a policy assignment, given its name and the scope it was created in. The
scope of a policy assignment is the part of its ID preceding
'/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'.
:param scope: The scope of the policy assignment. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to delete. Required.
:type policy_assignment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or None or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.PolicyAssignment]]
request = build_policy_assignments_delete_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("PolicyAssignment", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete.metadata = {"url": "/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}"} # type: ignore
@overload
def create(
self,
scope: str,
policy_assignment_name: str,
parameters: _models.PolicyAssignment,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyAssignment:
"""Creates or updates a policy assignment.
This operation creates or updates a policy assignment with the given scope and name. Policy
assignments apply to all resources contained within their scope. For example, when you assign a
policy at resource group scope, that policy applies to all resources in the group.
:param scope: The scope of the policy assignment. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_assignment_name: The name of the policy assignment. Required.
:type policy_assignment_name: str
:param parameters: Parameters for the policy assignment. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create(
self,
scope: str,
policy_assignment_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyAssignment:
"""Creates or updates a policy assignment.
This operation creates or updates a policy assignment with the given scope and name. Policy
assignments apply to all resources contained within their scope. For example, when you assign a
policy at resource group scope, that policy applies to all resources in the group.
:param scope: The scope of the policy assignment. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_assignment_name: The name of the policy assignment. Required.
:type policy_assignment_name: str
:param parameters: Parameters for the policy assignment. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create(
self, scope: str, policy_assignment_name: str, parameters: Union[_models.PolicyAssignment, IO], **kwargs: Any
) -> _models.PolicyAssignment:
"""Creates or updates a policy assignment.
This operation creates or updates a policy assignment with the given scope and name. Policy
assignments apply to all resources contained within their scope. For example, when you assign a
policy at resource group scope, that policy applies to all resources in the group.
:param scope: The scope of the policy assignment. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_assignment_name: The name of the policy assignment. Required.
:type policy_assignment_name: str
:param parameters: Parameters for the policy assignment. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyAssignment]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PolicyAssignment")
request = build_policy_assignments_create_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyAssignment", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {"url": "/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}"} # type: ignore
@distributed_trace
def get(self, scope: str, policy_assignment_name: str, **kwargs: Any) -> _models.PolicyAssignment:
"""Retrieves a policy assignment.
This operation retrieves a single policy assignment, given its name and the scope it was
created at.
:param scope: The scope of the policy assignment. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to get. Required.
:type policy_assignment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyAssignment]
request = build_policy_assignments_get_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyAssignment", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}"} # type: ignore
@overload
def update(
self,
scope: str,
policy_assignment_name: str,
parameters: _models.PolicyAssignmentUpdate,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyAssignment:
"""Updates a policy assignment.
This operation updates a policy assignment with the given scope and name. Policy assignments
apply to all resources contained within their scope. For example, when you assign a policy at
resource group scope, that policy applies to all resources in the group.
:param scope: The scope of the policy assignment. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_assignment_name: The name of the policy assignment. Required.
:type policy_assignment_name: str
:param parameters: Parameters for policy assignment patch request. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignmentUpdate
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def update(
self,
scope: str,
policy_assignment_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyAssignment:
"""Updates a policy assignment.
This operation updates a policy assignment with the given scope and name. Policy assignments
apply to all resources contained within their scope. For example, when you assign a policy at
resource group scope, that policy applies to all resources in the group.
:param scope: The scope of the policy assignment. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_assignment_name: The name of the policy assignment. Required.
:type policy_assignment_name: str
:param parameters: Parameters for policy assignment patch request. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def update(
self,
scope: str,
policy_assignment_name: str,
parameters: Union[_models.PolicyAssignmentUpdate, IO],
**kwargs: Any
) -> _models.PolicyAssignment:
"""Updates a policy assignment.
This operation updates a policy assignment with the given scope and name. Policy assignments
apply to all resources contained within their scope. For example, when you assign a policy at
resource group scope, that policy applies to all resources in the group.
:param scope: The scope of the policy assignment. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_assignment_name: The name of the policy assignment. Required.
:type policy_assignment_name: str
:param parameters: Parameters for policy assignment patch request. Is either a model type or a
IO type. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignmentUpdate or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyAssignment]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PolicyAssignmentUpdate")
request = build_policy_assignments_update_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyAssignment", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {"url": "/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}"} # type: ignore
@distributed_trace
def list_for_resource_group(
self, resource_group_name: str, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.PolicyAssignment"]:
"""Retrieves all policy assignments that apply to a resource group.
This operation retrieves the list of all policy assignments associated with the given resource
group in the given subscription that match the optional given $filter. Valid values for $filter
are: 'atScope()', 'atExactScope()' or 'policyDefinitionId eq '{value}''. If $filter is not
provided, the unfiltered list includes all policy assignments associated with the resource
group, including those that apply directly or apply from containing scopes, as well as any
applied to resources contained within the resource group. If $filter=atScope() is provided, the
returned list includes all policy assignments that apply to the resource group, which is
everything in the unfiltered list except those applied to resources contained within the
resource group. If $filter=atExactScope() is provided, the returned list only includes all
policy assignments that at the resource group. If $filter=policyDefinitionId eq '{value}' is
provided, the returned list includes all policy assignments of the policy definition whose id
is {value} that apply to the resource group.
:param resource_group_name: The name of the resource group that contains policy assignments.
Required.
:type resource_group_name: str
:param filter: The filter to apply on the operation. Valid values for $filter are: 'atScope()',
'atExactScope()' or 'policyDefinitionId eq '{value}''. If $filter is not provided, no filtering
is performed. If $filter=atScope() is provided, the returned list only includes all policy
assignments that apply to the scope, which is everything in the unfiltered list except those
applied to sub scopes contained within the given scope. If $filter=atExactScope() is provided,
the returned list only includes all policy assignments that at the given scope. If
$filter=policyDefinitionId eq '{value}' is provided, the returned list includes all policy
assignments of the policy definition whose id is {value}. Default value is None.
:type filter: str
:param top: Maximum number of records to return. When the $top filter is not provided, it will
return 500 records. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignment or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyAssignmentListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_assignments_list_for_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list_for_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_for_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments"} # type: ignore
@distributed_trace
def list_for_resource(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
filter: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignment"]:
"""Retrieves all policy assignments that apply to a resource.
This operation retrieves the list of all policy assignments associated with the specified
resource in the given resource group and subscription that match the optional given $filter.
Valid values for $filter are: 'atScope()', 'atExactScope()' or 'policyDefinitionId eq
'{value}''. If $filter is not provided, the unfiltered list includes all policy assignments
associated with the resource, including those that apply directly or from all containing
scopes, as well as any applied to resources contained within the resource. If $filter=atScope()
is provided, the returned list includes all policy assignments that apply to the resource,
which is everything in the unfiltered list except those applied to resources contained within
the resource. If $filter=atExactScope() is provided, the returned list only includes all policy
assignments that at the resource level. If $filter=policyDefinitionId eq '{value}' is provided,
the returned list includes all policy assignments of the policy definition whose id is {value}
that apply to the resource. Three parameters plus the resource name are used to identify a
specific resource. If the resource is not part of a parent resource (the more common case), the
parent resource path should not be provided (or provided as ''). For example a web app could be
specified as ({resourceProviderNamespace} == 'Microsoft.Web', {parentResourcePath} == '',
{resourceType} == 'sites', {resourceName} == 'MyWebApp'). If the resource is part of a parent
resource, then all parameters should be provided. For example a virtual machine DNS name could
be specified as ({resourceProviderNamespace} == 'Microsoft.Compute', {parentResourcePath} ==
'virtualMachines/MyVirtualMachine', {resourceType} == 'domainNames', {resourceName} ==
'MyComputerName'). A convenient alternative to providing the namespace and type name separately
is to provide both in the {resourceType} parameter, format: ({resourceProviderNamespace} == '',
{parentResourcePath} == '', {resourceType} == 'Microsoft.Web/sites', {resourceName} ==
'MyWebApp').
:param resource_group_name: The name of the resource group containing the resource. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. For example, the
namespace of a virtual machine is Microsoft.Compute (from Microsoft.Compute/virtualMachines).
Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource path. Use empty string if there is none.
Required.
:type parent_resource_path: str
:param resource_type: The resource type name. For example the type name of a web app is 'sites'
(from Microsoft.Web/sites). Required.
:type resource_type: str
:param resource_name: The name of the resource. Required.
:type resource_name: str
:param filter: The filter to apply on the operation. Valid values for $filter are: 'atScope()',
'atExactScope()' or 'policyDefinitionId eq '{value}''. If $filter is not provided, no filtering
is performed. If $filter=atScope() is provided, the returned list only includes all policy
assignments that apply to the scope, which is everything in the unfiltered list except those
applied to sub scopes contained within the given scope. If $filter=atExactScope() is provided,
the returned list only includes all policy assignments that at the given scope. If
$filter=policyDefinitionId eq '{value}' is provided, the returned list includes all policy
assignments of the policy definition whose id is {value}. Default value is None.
:type filter: str
:param top: Maximum number of records to return. When the $top filter is not provided, it will
return 500 records. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignment or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyAssignmentListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_assignments_list_for_resource_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list_for_resource.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_for_resource.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments"} # type: ignore
@distributed_trace
def list_for_management_group(
self, management_group_id: str, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.PolicyAssignment"]:
"""Retrieves all policy assignments that apply to a management group.
This operation retrieves the list of all policy assignments applicable to the management group
that match the given $filter. Valid values for $filter are: 'atScope()', 'atExactScope()' or
'policyDefinitionId eq '{value}''. If $filter=atScope() is provided, the returned list includes
all policy assignments that are assigned to the management group or the management group's
ancestors. If $filter=atExactScope() is provided, the returned list only includes all policy
assignments that at the management group. If $filter=policyDefinitionId eq '{value}' is
provided, the returned list includes all policy assignments of the policy definition whose id
is {value} that apply to the management group.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param filter: The filter to apply on the operation. Valid values for $filter are: 'atScope()',
'atExactScope()' or 'policyDefinitionId eq '{value}''. If $filter is not provided, no filtering
is performed. If $filter=atScope() is provided, the returned list only includes all policy
assignments that apply to the scope, which is everything in the unfiltered list except those
applied to sub scopes contained within the given scope. If $filter=atExactScope() is provided,
the returned list only includes all policy assignments that at the given scope. If
$filter=policyDefinitionId eq '{value}' is provided, the returned list includes all policy
assignments of the policy definition whose id is {value}. Default value is None.
:type filter: str
:param top: Maximum number of records to return. When the $top filter is not provided, it will
return 500 records. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignment or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyAssignmentListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_assignments_list_for_management_group_request(
management_group_id=management_group_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list_for_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_for_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyAssignments"} # type: ignore
@distributed_trace
def list(
self, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.PolicyAssignment"]:
"""Retrieves all policy assignments that apply to a subscription.
This operation retrieves the list of all policy assignments associated with the given
subscription that match the optional given $filter. Valid values for $filter are: 'atScope()',
'atExactScope()' or 'policyDefinitionId eq '{value}''. If $filter is not provided, the
unfiltered list includes all policy assignments associated with the subscription, including
those that apply directly or from management groups that contain the given subscription, as
well as any applied to objects contained within the subscription. If $filter=atScope() is
provided, the returned list includes all policy assignments that apply to the subscription,
which is everything in the unfiltered list except those applied to objects contained within the
subscription. If $filter=atExactScope() is provided, the returned list only includes all policy
assignments that at the subscription. If $filter=policyDefinitionId eq '{value}' is provided,
the returned list includes all policy assignments of the policy definition whose id is {value}.
:param filter: The filter to apply on the operation. Valid values for $filter are: 'atScope()',
'atExactScope()' or 'policyDefinitionId eq '{value}''. If $filter is not provided, no filtering
is performed. If $filter=atScope() is provided, the returned list only includes all policy
assignments that apply to the scope, which is everything in the unfiltered list except those
applied to sub scopes contained within the given scope. If $filter=atExactScope() is provided,
the returned list only includes all policy assignments that at the given scope. If
$filter=policyDefinitionId eq '{value}' is provided, the returned list includes all policy
assignments of the policy definition whose id is {value}. Default value is None.
:type filter: str
:param top: Maximum number of records to return. When the $top filter is not provided, it will
return 500 records. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignment or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyAssignmentListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_assignments_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyAssignments"} # type: ignore
@distributed_trace
def delete_by_id(self, policy_assignment_id: str, **kwargs: Any) -> Optional[_models.PolicyAssignment]:
"""Deletes a policy assignment.
This operation deletes the policy with the given ID. Policy assignment IDs have this format:
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Valid
formats for {scope} are: '/providers/Microsoft.Management/managementGroups/{managementGroup}'
(management group), '/subscriptions/{subscriptionId}' (subscription),
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' (resource group), or
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'
(resource).
:param policy_assignment_id: The ID of the policy assignment to delete. Use the format
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Required.
:type policy_assignment_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or None or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.PolicyAssignment]]
request = build_policy_assignments_delete_by_id_request(
policy_assignment_id=policy_assignment_id,
api_version=api_version,
template_url=self.delete_by_id.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("PolicyAssignment", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_by_id.metadata = {"url": "/{policyAssignmentId}"} # type: ignore
@overload
def create_by_id(
self,
policy_assignment_id: str,
parameters: _models.PolicyAssignment,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyAssignment:
"""Creates or updates a policy assignment.
This operation creates or updates the policy assignment with the given ID. Policy assignments
made on a scope apply to all resources contained in that scope. For example, when you assign a
policy to a resource group that policy applies to all resources in the group. Policy assignment
IDs have this format:
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Valid
scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
:param policy_assignment_id: The ID of the policy assignment to create. Use the format
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Required.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_by_id(
self, policy_assignment_id: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.PolicyAssignment:
"""Creates or updates a policy assignment.
This operation creates or updates the policy assignment with the given ID. Policy assignments
made on a scope apply to all resources contained in that scope. For example, when you assign a
policy to a resource group that policy applies to all resources in the group. Policy assignment
IDs have this format:
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Valid
scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
:param policy_assignment_id: The ID of the policy assignment to create. Use the format
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Required.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_by_id(
self, policy_assignment_id: str, parameters: Union[_models.PolicyAssignment, IO], **kwargs: Any
) -> _models.PolicyAssignment:
"""Creates or updates a policy assignment.
This operation creates or updates the policy assignment with the given ID. Policy assignments
made on a scope apply to all resources contained in that scope. For example, when you assign a
policy to a resource group that policy applies to all resources in the group. Policy assignment
IDs have this format:
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Valid
scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
:param policy_assignment_id: The ID of the policy assignment to create. Use the format
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Required.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyAssignment]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PolicyAssignment")
request = build_policy_assignments_create_by_id_request(
policy_assignment_id=policy_assignment_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_by_id.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyAssignment", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_by_id.metadata = {"url": "/{policyAssignmentId}"} # type: ignore
@distributed_trace
def get_by_id(self, policy_assignment_id: str, **kwargs: Any) -> _models.PolicyAssignment:
"""Retrieves the policy assignment with the given ID.
The operation retrieves the policy assignment with the given ID. Policy assignment IDs have
this format:
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Valid
scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
:param policy_assignment_id: The ID of the policy assignment to get. Use the format
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Required.
:type policy_assignment_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyAssignment]
request = build_policy_assignments_get_by_id_request(
policy_assignment_id=policy_assignment_id,
api_version=api_version,
template_url=self.get_by_id.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyAssignment", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_id.metadata = {"url": "/{policyAssignmentId}"} # type: ignore
@overload
def update_by_id(
self,
policy_assignment_id: str,
parameters: _models.PolicyAssignmentUpdate,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyAssignment:
"""Updates a policy assignment.
This operation updates the policy assignment with the given ID. Policy assignments made on a
scope apply to all resources contained in that scope. For example, when you assign a policy to
a resource group that policy applies to all resources in the group. Policy assignment IDs have
this format:
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Valid
scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
:param policy_assignment_id: The ID of the policy assignment to update. Use the format
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Required.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment patch request. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignmentUpdate
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def update_by_id(
self, policy_assignment_id: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.PolicyAssignment:
"""Updates a policy assignment.
This operation updates the policy assignment with the given ID. Policy assignments made on a
scope apply to all resources contained in that scope. For example, when you assign a policy to
a resource group that policy applies to all resources in the group. Policy assignment IDs have
this format:
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Valid
scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
:param policy_assignment_id: The ID of the policy assignment to update. Use the format
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Required.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment patch request. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def update_by_id(
self, policy_assignment_id: str, parameters: Union[_models.PolicyAssignmentUpdate, IO], **kwargs: Any
) -> _models.PolicyAssignment:
"""Updates a policy assignment.
This operation updates the policy assignment with the given ID. Policy assignments made on a
scope apply to all resources contained in that scope. For example, when you assign a policy to
a resource group that policy applies to all resources in the group. Policy assignment IDs have
this format:
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Valid
scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
:param policy_assignment_id: The ID of the policy assignment to update. Use the format
'{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'. Required.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment patch request. Is either a model type or a
IO type. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignmentUpdate or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyAssignment
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyAssignment]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PolicyAssignmentUpdate")
request = build_policy_assignments_update_by_id_request(
policy_assignment_id=policy_assignment_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.update_by_id.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyAssignment", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_by_id.metadata = {"url": "/{policyAssignmentId}"} # type: ignore
class PolicyDefinitionsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.policy.v2021_06_01.PolicyClient`'s
:attr:`policy_definitions` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@overload
def create_or_update(
self,
policy_definition_name: str,
parameters: _models.PolicyDefinition,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyDefinition:
"""Creates or updates a policy definition in a subscription.
This operation creates or updates a policy definition in the given subscription with the given
name.
:param policy_definition_name: The name of the policy definition to create. Required.
:type policy_definition_name: str
:param parameters: The policy definition properties. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update(
self, policy_definition_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.PolicyDefinition:
"""Creates or updates a policy definition in a subscription.
This operation creates or updates a policy definition in the given subscription with the given
name.
:param policy_definition_name: The name of the policy definition to create. Required.
:type policy_definition_name: str
:param parameters: The policy definition properties. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update(
self, policy_definition_name: str, parameters: Union[_models.PolicyDefinition, IO], **kwargs: Any
) -> _models.PolicyDefinition:
"""Creates or updates a policy definition in a subscription.
This operation creates or updates a policy definition in the given subscription with the given
name.
:param policy_definition_name: The name of the policy definition to create. Required.
:type policy_definition_name: str
:param parameters: The policy definition properties. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyDefinition]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PolicyDefinition")
request = build_policy_definitions_create_or_update_request(
policy_definition_name=policy_definition_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyDefinition", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}"} # type: ignore
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
self, policy_definition_name: str, **kwargs: Any
) -> None:
"""Deletes a policy definition in a subscription.
This operation deletes the policy definition in the given subscription with the given name.
:param policy_definition_name: The name of the policy definition to delete. Required.
:type policy_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_policy_definitions_delete_request(
policy_definition_name=policy_definition_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}"} # type: ignore
@distributed_trace
def get(self, policy_definition_name: str, **kwargs: Any) -> _models.PolicyDefinition:
"""Retrieves a policy definition in a subscription.
This operation retrieves the policy definition in the given subscription with the given name.
:param policy_definition_name: The name of the policy definition to get. Required.
:type policy_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyDefinition]
request = build_policy_definitions_get_request(
policy_definition_name=policy_definition_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyDefinition", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}"} # type: ignore
@distributed_trace
def get_built_in(self, policy_definition_name: str, **kwargs: Any) -> _models.PolicyDefinition:
"""Retrieves a built-in policy definition.
This operation retrieves the built-in policy definition with the given name.
:param policy_definition_name: The name of the built-in policy definition to get. Required.
:type policy_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyDefinition]
request = build_policy_definitions_get_built_in_request(
policy_definition_name=policy_definition_name,
api_version=api_version,
template_url=self.get_built_in.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyDefinition", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_built_in.metadata = {"url": "/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}"} # type: ignore
@overload
def create_or_update_at_management_group(
self,
policy_definition_name: str,
management_group_id: str,
parameters: _models.PolicyDefinition,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyDefinition:
"""Creates or updates a policy definition in a management group.
This operation creates or updates a policy definition in the given management group with the
given name.
:param policy_definition_name: The name of the policy definition to create. Required.
:type policy_definition_name: str
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param parameters: The policy definition properties. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update_at_management_group(
self,
policy_definition_name: str,
management_group_id: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyDefinition:
"""Creates or updates a policy definition in a management group.
This operation creates or updates a policy definition in the given management group with the
given name.
:param policy_definition_name: The name of the policy definition to create. Required.
:type policy_definition_name: str
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param parameters: The policy definition properties. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update_at_management_group(
self,
policy_definition_name: str,
management_group_id: str,
parameters: Union[_models.PolicyDefinition, IO],
**kwargs: Any
) -> _models.PolicyDefinition:
"""Creates or updates a policy definition in a management group.
This operation creates or updates a policy definition in the given management group with the
given name.
:param policy_definition_name: The name of the policy definition to create. Required.
:type policy_definition_name: str
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param parameters: The policy definition properties. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyDefinition]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PolicyDefinition")
request = build_policy_definitions_create_or_update_at_management_group_request(
policy_definition_name=policy_definition_name,
management_group_id=management_group_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyDefinition", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}"} # type: ignore
@distributed_trace
def delete_at_management_group( # pylint: disable=inconsistent-return-statements
self, policy_definition_name: str, management_group_id: str, **kwargs: Any
) -> None:
"""Deletes a policy definition in a management group.
This operation deletes the policy definition in the given management group with the given name.
:param policy_definition_name: The name of the policy definition to delete. Required.
:type policy_definition_name: str
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_policy_definitions_delete_at_management_group_request(
policy_definition_name=policy_definition_name,
management_group_id=management_group_id,
api_version=api_version,
template_url=self.delete_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}"} # type: ignore
@distributed_trace
def get_at_management_group(
self, policy_definition_name: str, management_group_id: str, **kwargs: Any
) -> _models.PolicyDefinition:
"""Retrieve a policy definition in a management group.
This operation retrieves the policy definition in the given management group with the given
name.
:param policy_definition_name: The name of the policy definition to get. Required.
:type policy_definition_name: str
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyDefinition]
request = build_policy_definitions_get_at_management_group_request(
policy_definition_name=policy_definition_name,
management_group_id=management_group_id,
api_version=api_version,
template_url=self.get_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyDefinition", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyDefinitions/{policyDefinitionName}"} # type: ignore
@distributed_trace
def list(
self, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.PolicyDefinition"]:
"""Retrieves policy definitions in a subscription.
This operation retrieves a list of all the policy definitions in a given subscription that
match the optional given $filter. Valid values for $filter are: 'atExactScope()', 'policyType
-eq {value}' or 'category eq '{value}''. If $filter is not provided, the unfiltered list
includes all policy definitions associated with the subscription, including those that apply
directly or from management groups that contain the given subscription. If
$filter=atExactScope() is provided, the returned list only includes all policy definitions that
at the given subscription. If $filter='policyType -eq {value}' is provided, the returned list
only includes all policy definitions whose type match the {value}. Possible policyType values
are NotSpecified, BuiltIn, Custom, and Static. If $filter='category -eq {value}' is provided,
the returned list only includes all policy definitions whose category match the {value}.
:param filter: The filter to apply on the operation. Valid values for $filter are:
'atExactScope()', 'policyType -eq {value}' or 'category eq '{value}''. If $filter is not
provided, no filtering is performed. If $filter=atExactScope() is provided, the returned list
only includes all policy definitions that at the given scope. If $filter='policyType -eq
{value}' is provided, the returned list only includes all policy definitions whose type match
the {value}. Possible policyType values are NotSpecified, BuiltIn, Custom, and Static. If
$filter='category -eq {value}' is provided, the returned list only includes all policy
definitions whose category match the {value}. Default value is None.
:type filter: str
:param top: Maximum number of records to return. When the $top filter is not provided, it will
return 500 records. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyDefinition or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyDefinitionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_definitions_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyDefinitionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyDefinitions"} # type: ignore
@distributed_trace
def list_built_in(
self, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.PolicyDefinition"]:
"""Retrieve built-in policy definitions.
This operation retrieves a list of all the built-in policy definitions that match the optional
given $filter. If $filter='policyType -eq {value}' is provided, the returned list only includes
all built-in policy definitions whose type match the {value}. Possible policyType values are
NotSpecified, BuiltIn, Custom, and Static. If $filter='category -eq {value}' is provided, the
returned list only includes all built-in policy definitions whose category match the {value}.
:param filter: The filter to apply on the operation. Valid values for $filter are:
'atExactScope()', 'policyType -eq {value}' or 'category eq '{value}''. If $filter is not
provided, no filtering is performed. If $filter=atExactScope() is provided, the returned list
only includes all policy definitions that at the given scope. If $filter='policyType -eq
{value}' is provided, the returned list only includes all policy definitions whose type match
the {value}. Possible policyType values are NotSpecified, BuiltIn, Custom, and Static. If
$filter='category -eq {value}' is provided, the returned list only includes all policy
definitions whose category match the {value}. Default value is None.
:type filter: str
:param top: Maximum number of records to return. When the $top filter is not provided, it will
return 500 records. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyDefinition or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyDefinitionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_definitions_list_built_in_request(
filter=filter,
top=top,
api_version=api_version,
template_url=self.list_built_in.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyDefinitionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_built_in.metadata = {"url": "/providers/Microsoft.Authorization/policyDefinitions"} # type: ignore
@distributed_trace
def list_by_management_group(
self, management_group_id: str, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.PolicyDefinition"]:
"""Retrieve policy definitions in a management group.
This operation retrieves a list of all the policy definitions in a given management group that
match the optional given $filter. Valid values for $filter are: 'atExactScope()', 'policyType
-eq {value}' or 'category eq '{value}''. If $filter is not provided, the unfiltered list
includes all policy definitions associated with the management group, including those that
apply directly or from management groups that contain the given management group. If
$filter=atExactScope() is provided, the returned list only includes all policy definitions that
at the given management group. If $filter='policyType -eq {value}' is provided, the returned
list only includes all policy definitions whose type match the {value}. Possible policyType
values are NotSpecified, BuiltIn, Custom, and Static. If $filter='category -eq {value}' is
provided, the returned list only includes all policy definitions whose category match the
{value}.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param filter: The filter to apply on the operation. Valid values for $filter are:
'atExactScope()', 'policyType -eq {value}' or 'category eq '{value}''. If $filter is not
provided, no filtering is performed. If $filter=atExactScope() is provided, the returned list
only includes all policy definitions that at the given scope. If $filter='policyType -eq
{value}' is provided, the returned list only includes all policy definitions whose type match
the {value}. Possible policyType values are NotSpecified, BuiltIn, Custom, and Static. If
$filter='category -eq {value}' is provided, the returned list only includes all policy
definitions whose category match the {value}. Default value is None.
:type filter: str
:param top: Maximum number of records to return. When the $top filter is not provided, it will
return 500 records. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyDefinition or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyDefinition]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyDefinitionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_definitions_list_by_management_group_request(
management_group_id=management_group_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list_by_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyDefinitionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_by_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyDefinitions"} # type: ignore
class PolicySetDefinitionsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.policy.v2021_06_01.PolicyClient`'s
:attr:`policy_set_definitions` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@overload
def create_or_update(
self,
policy_set_definition_name: str,
parameters: _models.PolicySetDefinition,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicySetDefinition:
"""Creates or updates a policy set definition.
This operation creates or updates a policy set definition in the given subscription with the
given name.
:param policy_set_definition_name: The name of the policy set definition to create. Required.
:type policy_set_definition_name: str
:param parameters: The policy set definition properties. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicySetDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update(
self, policy_set_definition_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.PolicySetDefinition:
"""Creates or updates a policy set definition.
This operation creates or updates a policy set definition in the given subscription with the
given name.
:param policy_set_definition_name: The name of the policy set definition to create. Required.
:type policy_set_definition_name: str
:param parameters: The policy set definition properties. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicySetDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update(
self, policy_set_definition_name: str, parameters: Union[_models.PolicySetDefinition, IO], **kwargs: Any
) -> _models.PolicySetDefinition:
"""Creates or updates a policy set definition.
This operation creates or updates a policy set definition in the given subscription with the
given name.
:param policy_set_definition_name: The name of the policy set definition to create. Required.
:type policy_set_definition_name: str
:param parameters: The policy set definition properties. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicySetDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicySetDefinition]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PolicySetDefinition")
request = build_policy_set_definitions_create_or_update_request(
policy_set_definition_name=policy_set_definition_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("PolicySetDefinition", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("PolicySetDefinition", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}"} # type: ignore
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
self, policy_set_definition_name: str, **kwargs: Any
) -> None:
"""Deletes a policy set definition.
This operation deletes the policy set definition in the given subscription with the given name.
:param policy_set_definition_name: The name of the policy set definition to delete. Required.
:type policy_set_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_policy_set_definitions_delete_request(
policy_set_definition_name=policy_set_definition_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}"} # type: ignore
@distributed_trace
def get(self, policy_set_definition_name: str, **kwargs: Any) -> _models.PolicySetDefinition:
"""Retrieves a policy set definition.
This operation retrieves the policy set definition in the given subscription with the given
name.
:param policy_set_definition_name: The name of the policy set definition to get. Required.
:type policy_set_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicySetDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicySetDefinition]
request = build_policy_set_definitions_get_request(
policy_set_definition_name=policy_set_definition_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicySetDefinition", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}"} # type: ignore
@distributed_trace
def get_built_in(self, policy_set_definition_name: str, **kwargs: Any) -> _models.PolicySetDefinition:
"""Retrieves a built in policy set definition.
This operation retrieves the built-in policy set definition with the given name.
:param policy_set_definition_name: The name of the policy set definition to get. Required.
:type policy_set_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicySetDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicySetDefinition]
request = build_policy_set_definitions_get_built_in_request(
policy_set_definition_name=policy_set_definition_name,
api_version=api_version,
template_url=self.get_built_in.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicySetDefinition", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_built_in.metadata = {"url": "/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}"} # type: ignore
@distributed_trace
def list(
self, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.PolicySetDefinition"]:
"""Retrieves the policy set definitions for a subscription.
This operation retrieves a list of all the policy set definitions in a given subscription that
match the optional given $filter. Valid values for $filter are: 'atExactScope()', 'policyType
-eq {value}' or 'category eq '{value}''. If $filter is not provided, the unfiltered list
includes all policy set definitions associated with the subscription, including those that
apply directly or from management groups that contain the given subscription. If
$filter=atExactScope() is provided, the returned list only includes all policy set definitions
that at the given subscription. If $filter='policyType -eq {value}' is provided, the returned
list only includes all policy set definitions whose type match the {value}. Possible policyType
values are NotSpecified, BuiltIn and Custom. If $filter='category -eq {value}' is provided, the
returned list only includes all policy set definitions whose category match the {value}.
:param filter: The filter to apply on the operation. Valid values for $filter are:
'atExactScope()', 'policyType -eq {value}' or 'category eq '{value}''. If $filter is not
provided, no filtering is performed. If $filter=atExactScope() is provided, the returned list
only includes all policy set definitions that at the given scope. If $filter='policyType -eq
{value}' is provided, the returned list only includes all policy set definitions whose type
match the {value}. Possible policyType values are NotSpecified, BuiltIn, Custom, and Static. If
$filter='category -eq {value}' is provided, the returned list only includes all policy set
definitions whose category match the {value}. Default value is None.
:type filter: str
:param top: Maximum number of records to return. When the $top filter is not provided, it will
return 500 records. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicySetDefinition or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicySetDefinitionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_set_definitions_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicySetDefinitionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policySetDefinitions"} # type: ignore
@distributed_trace
def list_built_in(
self, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.PolicySetDefinition"]:
"""Retrieves built-in policy set definitions.
This operation retrieves a list of all the built-in policy set definitions that match the
optional given $filter. If $filter='category -eq {value}' is provided, the returned list only
includes all built-in policy set definitions whose category match the {value}.
:param filter: The filter to apply on the operation. Valid values for $filter are:
'atExactScope()', 'policyType -eq {value}' or 'category eq '{value}''. If $filter is not
provided, no filtering is performed. If $filter=atExactScope() is provided, the returned list
only includes all policy set definitions that at the given scope. If $filter='policyType -eq
{value}' is provided, the returned list only includes all policy set definitions whose type
match the {value}. Possible policyType values are NotSpecified, BuiltIn, Custom, and Static. If
$filter='category -eq {value}' is provided, the returned list only includes all policy set
definitions whose category match the {value}. Default value is None.
:type filter: str
:param top: Maximum number of records to return. When the $top filter is not provided, it will
return 500 records. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicySetDefinition or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicySetDefinitionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_set_definitions_list_built_in_request(
filter=filter,
top=top,
api_version=api_version,
template_url=self.list_built_in.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicySetDefinitionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_built_in.metadata = {"url": "/providers/Microsoft.Authorization/policySetDefinitions"} # type: ignore
@overload
def create_or_update_at_management_group(
self,
policy_set_definition_name: str,
management_group_id: str,
parameters: _models.PolicySetDefinition,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicySetDefinition:
"""Creates or updates a policy set definition.
This operation creates or updates a policy set definition in the given management group with
the given name.
:param policy_set_definition_name: The name of the policy set definition to create. Required.
:type policy_set_definition_name: str
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param parameters: The policy set definition properties. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicySetDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update_at_management_group(
self,
policy_set_definition_name: str,
management_group_id: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicySetDefinition:
"""Creates or updates a policy set definition.
This operation creates or updates a policy set definition in the given management group with
the given name.
:param policy_set_definition_name: The name of the policy set definition to create. Required.
:type policy_set_definition_name: str
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param parameters: The policy set definition properties. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicySetDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update_at_management_group(
self,
policy_set_definition_name: str,
management_group_id: str,
parameters: Union[_models.PolicySetDefinition, IO],
**kwargs: Any
) -> _models.PolicySetDefinition:
"""Creates or updates a policy set definition.
This operation creates or updates a policy set definition in the given management group with
the given name.
:param policy_set_definition_name: The name of the policy set definition to create. Required.
:type policy_set_definition_name: str
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param parameters: The policy set definition properties. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicySetDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicySetDefinition]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PolicySetDefinition")
request = build_policy_set_definitions_create_or_update_at_management_group_request(
policy_set_definition_name=policy_set_definition_name,
management_group_id=management_group_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("PolicySetDefinition", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("PolicySetDefinition", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}"} # type: ignore
@distributed_trace
def delete_at_management_group( # pylint: disable=inconsistent-return-statements
self, policy_set_definition_name: str, management_group_id: str, **kwargs: Any
) -> None:
"""Deletes a policy set definition.
This operation deletes the policy set definition in the given management group with the given
name.
:param policy_set_definition_name: The name of the policy set definition to delete. Required.
:type policy_set_definition_name: str
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_policy_set_definitions_delete_at_management_group_request(
policy_set_definition_name=policy_set_definition_name,
management_group_id=management_group_id,
api_version=api_version,
template_url=self.delete_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}"} # type: ignore
@distributed_trace
def get_at_management_group(
self, policy_set_definition_name: str, management_group_id: str, **kwargs: Any
) -> _models.PolicySetDefinition:
"""Retrieves a policy set definition.
This operation retrieves the policy set definition in the given management group with the given
name.
:param policy_set_definition_name: The name of the policy set definition to get. Required.
:type policy_set_definition_name: str
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicySetDefinition or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicySetDefinition]
request = build_policy_set_definitions_get_at_management_group_request(
policy_set_definition_name=policy_set_definition_name,
management_group_id=management_group_id,
api_version=api_version,
template_url=self.get_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicySetDefinition", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policySetDefinitions/{policySetDefinitionName}"} # type: ignore
@distributed_trace
def list_by_management_group(
self, management_group_id: str, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.PolicySetDefinition"]:
"""Retrieves all policy set definitions in management group.
This operation retrieves a list of all the policy set definitions in a given management group
that match the optional given $filter. Valid values for $filter are: 'atExactScope()',
'policyType -eq {value}' or 'category eq '{value}''. If $filter is not provided, the unfiltered
list includes all policy set definitions associated with the management group, including those
that apply directly or from management groups that contain the given management group. If
$filter=atExactScope() is provided, the returned list only includes all policy set definitions
that at the given management group. If $filter='policyType -eq {value}' is provided, the
returned list only includes all policy set definitions whose type match the {value}. Possible
policyType values are NotSpecified, BuiltIn and Custom. If $filter='category -eq {value}' is
provided, the returned list only includes all policy set definitions whose category match the
{value}.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param filter: The filter to apply on the operation. Valid values for $filter are:
'atExactScope()', 'policyType -eq {value}' or 'category eq '{value}''. If $filter is not
provided, no filtering is performed. If $filter=atExactScope() is provided, the returned list
only includes all policy set definitions that at the given scope. If $filter='policyType -eq
{value}' is provided, the returned list only includes all policy set definitions whose type
match the {value}. Possible policyType values are NotSpecified, BuiltIn, Custom, and Static. If
$filter='category -eq {value}' is provided, the returned list only includes all policy set
definitions whose category match the {value}. Default value is None.
:type filter: str
:param top: Maximum number of records to return. When the $top filter is not provided, it will
return 500 records. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicySetDefinition or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicySetDefinition]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicySetDefinitionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_set_definitions_list_by_management_group_request(
management_group_id=management_group_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list_by_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicySetDefinitionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_by_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policySetDefinitions"} # type: ignore
class PolicyExemptionsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.policy.v2021_06_01.PolicyClient`'s
:attr:`policy_exemptions` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
self, scope: str, policy_exemption_name: str, **kwargs: Any
) -> None:
"""Deletes a policy exemption.
This operation deletes a policy exemption, given its name and the scope it was created in. The
scope of a policy exemption is the part of its ID preceding
'/providers/Microsoft.Authorization/policyExemptions/{policyExemptionName}'.
:param scope: The scope of the policy exemption. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_exemption_name: The name of the policy exemption to delete. Required.
:type policy_exemption_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_policy_exemptions_delete_request(
scope=scope,
policy_exemption_name=policy_exemption_name,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/{scope}/providers/Microsoft.Authorization/policyExemptions/{policyExemptionName}"} # type: ignore
@overload
def create_or_update(
self,
scope: str,
policy_exemption_name: str,
parameters: _models.PolicyExemption,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyExemption:
"""Creates or updates a policy exemption.
This operation creates or updates a policy exemption with the given scope and name. Policy
exemptions apply to all resources contained within their scope. For example, when you create a
policy exemption at resource group scope for a policy assignment at the same or above level,
the exemption exempts to all applicable resources in the resource group.
:param scope: The scope of the policy exemption. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_exemption_name: The name of the policy exemption to delete. Required.
:type policy_exemption_name: str
:param parameters: Parameters for the policy exemption. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyExemption
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyExemption or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyExemption
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update(
self,
scope: str,
policy_exemption_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyExemption:
"""Creates or updates a policy exemption.
This operation creates or updates a policy exemption with the given scope and name. Policy
exemptions apply to all resources contained within their scope. For example, when you create a
policy exemption at resource group scope for a policy assignment at the same or above level,
the exemption exempts to all applicable resources in the resource group.
:param scope: The scope of the policy exemption. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_exemption_name: The name of the policy exemption to delete. Required.
:type policy_exemption_name: str
:param parameters: Parameters for the policy exemption. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyExemption or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyExemption
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update(
self, scope: str, policy_exemption_name: str, parameters: Union[_models.PolicyExemption, IO], **kwargs: Any
) -> _models.PolicyExemption:
"""Creates or updates a policy exemption.
This operation creates or updates a policy exemption with the given scope and name. Policy
exemptions apply to all resources contained within their scope. For example, when you create a
policy exemption at resource group scope for a policy assignment at the same or above level,
the exemption exempts to all applicable resources in the resource group.
:param scope: The scope of the policy exemption. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_exemption_name: The name of the policy exemption to delete. Required.
:type policy_exemption_name: str
:param parameters: Parameters for the policy exemption. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyExemption or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyExemption or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyExemption
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyExemption]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PolicyExemption")
request = build_policy_exemptions_create_or_update_request(
scope=scope,
policy_exemption_name=policy_exemption_name,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("PolicyExemption", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("PolicyExemption", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {"url": "/{scope}/providers/Microsoft.Authorization/policyExemptions/{policyExemptionName}"} # type: ignore
@distributed_trace
def get(self, scope: str, policy_exemption_name: str, **kwargs: Any) -> _models.PolicyExemption:
"""Retrieves a policy exemption.
This operation retrieves a single policy exemption, given its name and the scope it was created
at.
:param scope: The scope of the policy exemption. Valid scopes are: management group (format:
'/providers/Microsoft.Management/managementGroups/{managementGroup}'), subscription (format:
'/subscriptions/{subscriptionId}'), resource group (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}', or resource (format:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/[{parentResourcePath}/]{resourceType}/{resourceName}'.
Required.
:type scope: str
:param policy_exemption_name: The name of the policy exemption to delete. Required.
:type policy_exemption_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyExemption or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.PolicyExemption
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyExemption]
request = build_policy_exemptions_get_request(
scope=scope,
policy_exemption_name=policy_exemption_name,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PolicyExemption", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/{scope}/providers/Microsoft.Authorization/policyExemptions/{policyExemptionName}"} # type: ignore
@distributed_trace
def list(self, filter: Optional[str] = None, **kwargs: Any) -> Iterable["_models.PolicyExemption"]:
"""Retrieves all policy exemptions that apply to a subscription.
This operation retrieves the list of all policy exemptions associated with the given
subscription that match the optional given $filter. Valid values for $filter are: 'atScope()',
'atExactScope()', 'excludeExpired()' or 'policyAssignmentId eq '{value}''. If $filter is not
provided, the unfiltered list includes all policy exemptions associated with the subscription,
including those that apply directly or from management groups that contain the given
subscription, as well as any applied to objects contained within the subscription.
:param filter: The filter to apply on the operation. Valid values for $filter are: 'atScope()',
'atExactScope()', 'excludeExpired()' or 'policyAssignmentId eq '{value}''. If $filter is not
provided, no filtering is performed. If $filter is not provided, the unfiltered list includes
all policy exemptions associated with the scope, including those that apply directly or apply
from containing scopes. If $filter=atScope() is provided, the returned list only includes all
policy exemptions that apply to the scope, which is everything in the unfiltered list except
those applied to sub scopes contained within the given scope. If $filter=atExactScope() is
provided, the returned list only includes all policy exemptions that at the given scope. If
$filter=excludeExpired() is provided, the returned list only includes all policy exemptions
that either haven't expired or didn't set expiration date. If $filter=policyAssignmentId eq
'{value}' is provided. the returned list only includes all policy exemptions that are
associated with the give policyAssignmentId. Default value is None.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyExemption or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyExemption]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyExemptionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_exemptions_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyExemptionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyExemptions"} # type: ignore
@distributed_trace
def list_for_resource_group(
self, resource_group_name: str, filter: Optional[str] = None, **kwargs: Any
) -> Iterable["_models.PolicyExemption"]:
"""Retrieves all policy exemptions that apply to a resource group.
This operation retrieves the list of all policy exemptions associated with the given resource
group in the given subscription that match the optional given $filter. Valid values for $filter
are: 'atScope()', 'atExactScope()', 'excludeExpired()' or 'policyAssignmentId eq '{value}''. If
$filter is not provided, the unfiltered list includes all policy exemptions associated with the
resource group, including those that apply directly or apply from containing scopes, as well as
any applied to resources contained within the resource group.
:param resource_group_name: The name of the resource group containing the resource. Required.
:type resource_group_name: str
:param filter: The filter to apply on the operation. Valid values for $filter are: 'atScope()',
'atExactScope()', 'excludeExpired()' or 'policyAssignmentId eq '{value}''. If $filter is not
provided, no filtering is performed. If $filter is not provided, the unfiltered list includes
all policy exemptions associated with the scope, including those that apply directly or apply
from containing scopes. If $filter=atScope() is provided, the returned list only includes all
policy exemptions that apply to the scope, which is everything in the unfiltered list except
those applied to sub scopes contained within the given scope. If $filter=atExactScope() is
provided, the returned list only includes all policy exemptions that at the given scope. If
$filter=excludeExpired() is provided, the returned list only includes all policy exemptions
that either haven't expired or didn't set expiration date. If $filter=policyAssignmentId eq
'{value}' is provided. the returned list only includes all policy exemptions that are
associated with the give policyAssignmentId. Default value is None.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyExemption or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyExemption]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyExemptionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_exemptions_list_for_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
api_version=api_version,
template_url=self.list_for_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyExemptionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_for_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyExemptions"} # type: ignore
@distributed_trace
def list_for_resource(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyExemption"]:
"""Retrieves all policy exemptions that apply to a resource.
This operation retrieves the list of all policy exemptions associated with the specified
resource in the given resource group and subscription that match the optional given $filter.
Valid values for $filter are: 'atScope()', 'atExactScope()', 'excludeExpired()' or
'policyAssignmentId eq '{value}''. If $filter is not provided, the unfiltered list includes all
policy exemptions associated with the resource, including those that apply directly or from all
containing scopes, as well as any applied to resources contained within the resource. Three
parameters plus the resource name are used to identify a specific resource. If the resource is
not part of a parent resource (the more common case), the parent resource path should not be
provided (or provided as ''). For example a web app could be specified as
({resourceProviderNamespace} == 'Microsoft.Web', {parentResourcePath} == '', {resourceType} ==
'sites', {resourceName} == 'MyWebApp'). If the resource is part of a parent resource, then all
parameters should be provided. For example a virtual machine DNS name could be specified as
({resourceProviderNamespace} == 'Microsoft.Compute', {parentResourcePath} ==
'virtualMachines/MyVirtualMachine', {resourceType} == 'domainNames', {resourceName} ==
'MyComputerName'). A convenient alternative to providing the namespace and type name separately
is to provide both in the {resourceType} parameter, format: ({resourceProviderNamespace} == '',
{parentResourcePath} == '', {resourceType} == 'Microsoft.Web/sites', {resourceName} ==
'MyWebApp').
:param resource_group_name: The name of the resource group containing the resource. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. For example, the
namespace of a virtual machine is Microsoft.Compute (from Microsoft.Compute/virtualMachines).
Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource path. Use empty string if there is none.
Required.
:type parent_resource_path: str
:param resource_type: The resource type name. For example the type name of a web app is 'sites'
(from Microsoft.Web/sites). Required.
:type resource_type: str
:param resource_name: The name of the resource. Required.
:type resource_name: str
:param filter: The filter to apply on the operation. Valid values for $filter are: 'atScope()',
'atExactScope()', 'excludeExpired()' or 'policyAssignmentId eq '{value}''. If $filter is not
provided, no filtering is performed. If $filter is not provided, the unfiltered list includes
all policy exemptions associated with the scope, including those that apply directly or apply
from containing scopes. If $filter=atScope() is provided, the returned list only includes all
policy exemptions that apply to the scope, which is everything in the unfiltered list except
those applied to sub scopes contained within the given scope. If $filter=atExactScope() is
provided, the returned list only includes all policy exemptions that at the given scope. If
$filter=excludeExpired() is provided, the returned list only includes all policy exemptions
that either haven't expired or didn't set expiration date. If $filter=policyAssignmentId eq
'{value}' is provided. the returned list only includes all policy exemptions that are
associated with the give policyAssignmentId. Default value is None.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyExemption or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyExemption]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyExemptionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_exemptions_list_for_resource_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
api_version=api_version,
template_url=self.list_for_resource.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyExemptionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_for_resource.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyExemptions"} # type: ignore
@distributed_trace
def list_for_management_group(
self, management_group_id: str, filter: Optional[str] = None, **kwargs: Any
) -> Iterable["_models.PolicyExemption"]:
"""Retrieves all policy exemptions that apply to a management group.
This operation retrieves the list of all policy exemptions applicable to the management group
that match the given $filter. Valid values for $filter are: 'atScope()', 'atExactScope()',
'excludeExpired()' or 'policyAssignmentId eq '{value}''. If $filter=atScope() is provided, the
returned list includes all policy exemptions that are assigned to the management group or the
management group's ancestors.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param filter: The filter to apply on the operation. Valid values for $filter are: 'atScope()',
'atExactScope()', 'excludeExpired()' or 'policyAssignmentId eq '{value}''. If $filter is not
provided, no filtering is performed. If $filter is not provided, the unfiltered list includes
all policy exemptions associated with the scope, including those that apply directly or apply
from containing scopes. If $filter=atScope() is provided, the returned list only includes all
policy exemptions that apply to the scope, which is everything in the unfiltered list except
those applied to sub scopes contained within the given scope. If $filter=atExactScope() is
provided, the returned list only includes all policy exemptions that at the given scope. If
$filter=excludeExpired() is provided, the returned list only includes all policy exemptions
that either haven't expired or didn't set expiration date. If $filter=policyAssignmentId eq
'{value}' is provided. the returned list only includes all policy exemptions that are
associated with the give policyAssignmentId. Default value is None.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyExemption or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.PolicyExemption]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-07-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PolicyExemptionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_policy_exemptions_list_for_management_group_request(
management_group_id=management_group_id,
filter=filter,
api_version=api_version,
template_url=self.list_for_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyExemptionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_for_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/policyExemptions"} # type: ignore
class VariablesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.policy.v2021_06_01.PolicyClient`'s
:attr:`variables` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def delete(self, variable_name: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements
"""Deletes a variable.
This operation deletes a variable, given its name and the subscription it was created in. The
scope of a variable is the part of its ID preceding
'/providers/Microsoft.Authorization/variables/{variableName}'.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_variables_delete_request(
variable_name=variable_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}"} # type: ignore
@overload
def create_or_update(
self, variable_name: str, parameters: _models.Variable, *, content_type: str = "application/json", **kwargs: Any
) -> _models.Variable:
"""Creates or updates a variable.
This operation creates or updates a variable with the given subscription and name. Policy
variables can only be used by a policy definition at the scope they are created or below.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param parameters: Parameters for the variable. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Variable or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update(
self, variable_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.Variable:
"""Creates or updates a variable.
This operation creates or updates a variable with the given subscription and name. Policy
variables can only be used by a policy definition at the scope they are created or below.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param parameters: Parameters for the variable. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Variable or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update(
self, variable_name: str, parameters: Union[_models.Variable, IO], **kwargs: Any
) -> _models.Variable:
"""Creates or updates a variable.
This operation creates or updates a variable with the given subscription and name. Policy
variables can only be used by a policy definition at the scope they are created or below.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param parameters: Parameters for the variable. Is either a model type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Variable or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.Variable]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "Variable")
request = build_variables_create_or_update_request(
variable_name=variable_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("Variable", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("Variable", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}"} # type: ignore
@distributed_trace
def get(self, variable_name: str, **kwargs: Any) -> _models.Variable:
"""Retrieves a variable.
This operation retrieves a single variable, given its name and the subscription it was created
at.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Variable or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.Variable]
request = build_variables_get_request(
variable_name=variable_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Variable", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}"} # type: ignore
@distributed_trace
def delete_at_management_group( # pylint: disable=inconsistent-return-statements
self, management_group_id: str, variable_name: str, **kwargs: Any
) -> None:
"""Deletes a variable.
This operation deletes a variable, given its name and the management group it was created in.
The scope of a variable is the part of its ID preceding
'/providers/Microsoft.Authorization/variables/{variableName}'.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_variables_delete_at_management_group_request(
management_group_id=management_group_id,
variable_name=variable_name,
api_version=api_version,
template_url=self.delete_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}"} # type: ignore
@overload
def create_or_update_at_management_group(
self,
management_group_id: str,
variable_name: str,
parameters: _models.Variable,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.Variable:
"""Creates or updates a variable.
This operation creates or updates a variable with the given management group and name. Policy
variables can only be used by a policy definition at the scope they are created or below.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param parameters: Parameters for the variable. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Variable or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update_at_management_group(
self,
management_group_id: str,
variable_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.Variable:
"""Creates or updates a variable.
This operation creates or updates a variable with the given management group and name. Policy
variables can only be used by a policy definition at the scope they are created or below.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param parameters: Parameters for the variable. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Variable or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update_at_management_group(
self, management_group_id: str, variable_name: str, parameters: Union[_models.Variable, IO], **kwargs: Any
) -> _models.Variable:
"""Creates or updates a variable.
This operation creates or updates a variable with the given management group and name. Policy
variables can only be used by a policy definition at the scope they are created or below.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param parameters: Parameters for the variable. Is either a model type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Variable or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.Variable]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "Variable")
request = build_variables_create_or_update_at_management_group_request(
management_group_id=management_group_id,
variable_name=variable_name,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("Variable", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("Variable", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}"} # type: ignore
@distributed_trace
def get_at_management_group(self, management_group_id: str, variable_name: str, **kwargs: Any) -> _models.Variable:
"""Retrieves a variable.
This operation retrieves a single variable, given its name and the management group it was
created at.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Variable or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.Variable
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.Variable]
request = build_variables_get_at_management_group_request(
management_group_id=management_group_id,
variable_name=variable_name,
api_version=api_version,
template_url=self.get_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Variable", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}"} # type: ignore
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.Variable"]:
"""Retrieves all variables that are at this subscription level.
This operation retrieves the list of all variables associated with the given subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Variable or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.Variable]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.VariableListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_variables_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("VariableListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables"} # type: ignore
@distributed_trace
def list_for_management_group(self, management_group_id: str, **kwargs: Any) -> Iterable["_models.Variable"]:
"""Retrieves all variables that are at this management group level.
This operation retrieves the list of all variables applicable to the management group.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Variable or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.Variable]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.VariableListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_variables_list_for_management_group_request(
management_group_id=management_group_id,
api_version=api_version,
template_url=self.list_for_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("VariableListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_for_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables"} # type: ignore
class VariableValuesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.policy.v2021_06_01.PolicyClient`'s
:attr:`variable_values` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
self, variable_name: str, variable_value_name: str, **kwargs: Any
) -> None:
"""Deletes a variable value.
This operation deletes a variable value, given its name, the subscription it was created in,
and the variable it belongs to. The scope of a variable value is the part of its ID preceding
'/providers/Microsoft.Authorization/variables/{variableName}'.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param variable_value_name: The name of the variable value to operate on. Required.
:type variable_value_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_variable_values_delete_request(
variable_name=variable_name,
variable_value_name=variable_value_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}"} # type: ignore
@overload
def create_or_update(
self,
variable_name: str,
variable_value_name: str,
parameters: _models.VariableValue,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.VariableValue:
"""Creates or updates a variable value.
This operation creates or updates a variable value with the given subscription and name for a
given variable. Variable values are scoped to the variable for which they are created for.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param variable_value_name: The name of the variable value to operate on. Required.
:type variable_value_name: str
:param parameters: Parameters for the variable value. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VariableValue or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update(
self,
variable_name: str,
variable_value_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.VariableValue:
"""Creates or updates a variable value.
This operation creates or updates a variable value with the given subscription and name for a
given variable. Variable values are scoped to the variable for which they are created for.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param variable_value_name: The name of the variable value to operate on. Required.
:type variable_value_name: str
:param parameters: Parameters for the variable value. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VariableValue or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update(
self, variable_name: str, variable_value_name: str, parameters: Union[_models.VariableValue, IO], **kwargs: Any
) -> _models.VariableValue:
"""Creates or updates a variable value.
This operation creates or updates a variable value with the given subscription and name for a
given variable. Variable values are scoped to the variable for which they are created for.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param variable_value_name: The name of the variable value to operate on. Required.
:type variable_value_name: str
:param parameters: Parameters for the variable value. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VariableValue or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.VariableValue]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "VariableValue")
request = build_variable_values_create_or_update_request(
variable_name=variable_name,
variable_value_name=variable_value_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("VariableValue", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("VariableValue", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}"} # type: ignore
@distributed_trace
def get(self, variable_name: str, variable_value_name: str, **kwargs: Any) -> _models.VariableValue:
"""Retrieves a variable value.
This operation retrieves a single variable value; given its name, subscription it was created
at and the variable it's created for.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param variable_value_name: The name of the variable value to operate on. Required.
:type variable_value_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VariableValue or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.VariableValue]
request = build_variable_values_get_request(
variable_name=variable_name,
variable_value_name=variable_value_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("VariableValue", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}"} # type: ignore
@distributed_trace
def list(self, variable_name: str, **kwargs: Any) -> Iterable["_models.VariableValue"]:
"""List variable values for a variable.
This operation retrieves the list of all variable values associated with the given variable
that is at a subscription level.
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VariableValue or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.VariableValueListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_variable_values_list_request(
variable_name=variable_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("VariableValueListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/variables/{variableName}/values"} # type: ignore
@distributed_trace
def list_for_management_group(
self, management_group_id: str, variable_name: str, **kwargs: Any
) -> Iterable["_models.VariableValue"]:
"""List variable values at management group level.
This operation retrieves the list of all variable values applicable the variable indicated at
the management group scope.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VariableValue or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.VariableValueListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_variable_values_list_for_management_group_request(
management_group_id=management_group_id,
variable_name=variable_name,
api_version=api_version,
template_url=self.list_for_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("VariableValueListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_for_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}/values"} # type: ignore
@distributed_trace
def delete_at_management_group( # pylint: disable=inconsistent-return-statements
self, management_group_id: str, variable_name: str, variable_value_name: str, **kwargs: Any
) -> None:
"""Deletes a variable value.
This operation deletes a variable value, given its name, the management group it was created
in, and the variable it belongs to. The scope of a variable value is the part of its ID
preceding '/providers/Microsoft.Authorization/variables/{variableName}'.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param variable_value_name: The name of the variable value to operate on. Required.
:type variable_value_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_variable_values_delete_at_management_group_request(
management_group_id=management_group_id,
variable_name=variable_name,
variable_value_name=variable_value_name,
api_version=api_version,
template_url=self.delete_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}"} # type: ignore
@overload
def create_or_update_at_management_group(
self,
management_group_id: str,
variable_name: str,
variable_value_name: str,
parameters: _models.VariableValue,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.VariableValue:
"""Creates or updates a variable value.
This operation creates or updates a variable value with the given management group and name for
a given variable. Variable values are scoped to the variable for which they are created for.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param variable_value_name: The name of the variable value to operate on. Required.
:type variable_value_name: str
:param parameters: Parameters for the variable value. Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VariableValue or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update_at_management_group(
self,
management_group_id: str,
variable_name: str,
variable_value_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.VariableValue:
"""Creates or updates a variable value.
This operation creates or updates a variable value with the given management group and name for
a given variable. Variable values are scoped to the variable for which they are created for.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param variable_value_name: The name of the variable value to operate on. Required.
:type variable_value_name: str
:param parameters: Parameters for the variable value. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VariableValue or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update_at_management_group(
self,
management_group_id: str,
variable_name: str,
variable_value_name: str,
parameters: Union[_models.VariableValue, IO],
**kwargs: Any
) -> _models.VariableValue:
"""Creates or updates a variable value.
This operation creates or updates a variable value with the given management group and name for
a given variable. Variable values are scoped to the variable for which they are created for.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param variable_value_name: The name of the variable value to operate on. Required.
:type variable_value_name: str
:param parameters: Parameters for the variable value. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VariableValue or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.VariableValue]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "VariableValue")
request = build_variable_values_create_or_update_at_management_group_request(
management_group_id=management_group_id,
variable_name=variable_name,
variable_value_name=variable_value_name,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("VariableValue", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("VariableValue", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}"} # type: ignore
@distributed_trace
def get_at_management_group(
self, management_group_id: str, variable_name: str, variable_value_name: str, **kwargs: Any
) -> _models.VariableValue:
"""Retrieves a variable value.
This operation retrieves a single variable value; given its name, management group it was
created at and the variable it's created for.
:param management_group_id: The ID of the management group. Required.
:type management_group_id: str
:param variable_name: The name of the variable to operate on. Required.
:type variable_name: str
:param variable_value_name: The name of the variable value to operate on. Required.
:type variable_value_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VariableValue or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2021_06_01.models.VariableValue
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.VariableValue]
request = build_variable_values_get_at_management_group_request(
management_group_id=management_group_id,
variable_name=variable_name,
variable_value_name=variable_value_name,
api_version=api_version,
template_url=self.get_at_management_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("VariableValue", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_management_group.metadata = {"url": "/providers/Microsoft.Management/managementGroups/{managementGroupId}/providers/Microsoft.Authorization/variables/{variableName}/values/{variableValueName}"} # type: ignore
| {
"content_hash": "58bb4584d66182ba104c0499a36182c9",
"timestamp": "",
"source": "github",
"line_count": 7088,
"max_line_length": 267,
"avg_line_length": 47.67127539503386,
"alnum_prop": 0.6552794663415153,
"repo_name": "Azure/azure-sdk-for-python",
"id": "2b54a968098a0a4bdaeb108c7beb05054827f725",
"size": "338394",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/resources/azure-mgmt-resource/azure/mgmt/resource/policy/v2021_06_01/operations/_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import bisect
from collections import defaultdict
import io
import json
import logging
import zipfile
from babelfish import Language
from guessit import guessit
from requests import Session
from . import ParserBeautifulSoup, Provider
from .. import __short_version__
from ..cache import SHOW_EXPIRATION_TIME, region
from ..exceptions import AuthenticationError, ConfigurationError, ProviderError
from ..subtitle import Subtitle, fix_line_ending, guess_matches
from ..utils import sanitize
from ..video import Episode, Movie
logger = logging.getLogger(__name__)
class SubsCenterSubtitle(Subtitle):
"""SubsCenter Subtitle."""
provider_name = 'subscenter'
def __init__(self, language, hearing_impaired, page_link, series, season, episode, title, subtitle_id, subtitle_key,
downloaded, releases):
super(SubsCenterSubtitle, self).__init__(language, hearing_impaired, page_link)
self.series = series
self.season = season
self.episode = episode
self.title = title
self.subtitle_id = subtitle_id
self.subtitle_key = subtitle_key
self.downloaded = downloaded
self.releases = releases
@property
def id(self):
return str(self.subtitle_id)
def get_matches(self, video):
matches = set()
# episode
if isinstance(video, Episode):
# series
if video.series and sanitize(self.series) == sanitize(video.series):
matches.add('series')
# season
if video.season and self.season == video.season:
matches.add('season')
# episode
if video.episode and self.episode == video.episode:
matches.add('episode')
# guess
for release in self.releases:
matches |= guess_matches(video, guessit(release, {'type': 'episode'}))
# movie
elif isinstance(video, Movie):
# guess
for release in self.releases:
matches |= guess_matches(video, guessit(release, {'type': 'movie'}))
# title
if video.title and sanitize(self.title) == sanitize(video.title):
matches.add('title')
return matches
class SubsCenterProvider(Provider):
"""SubsCenter Provider."""
languages = {Language.fromalpha2(l) for l in ['he']}
server_url = 'http://subscenter.cinemast.com/he/'
def __init__(self, username=None, password=None):
if username is not None and password is None or username is None and password is not None:
raise ConfigurationError('Username and password must be specified')
self.username = username
self.password = password
self.logged_in = False
def initialize(self):
self.session = Session()
self.session.headers['User-Agent'] = 'Subliminal/%s' % __short_version__
# login
if self.username is not None and self.password is not None:
logger.debug('Logging in')
url = self.server_url + 'subscenter/accounts/login/'
# retrieve CSRF token
self.session.get(url)
csrf_token = self.session.cookies['csrftoken']
# actual login
data = {'username': self.username, 'password': self.password, 'csrfmiddlewaretoken': csrf_token}
r = self.session.post(url, data, allow_redirects=False, timeout=10)
if r.status_code != 302:
raise AuthenticationError(self.username)
logger.info('Logged in')
self.logged_in = True
def terminate(self):
# logout
if self.logged_in:
logger.info('Logging out')
r = self.session.get(self.server_url + 'subscenter/accounts/logout/', timeout=10)
r.raise_for_status()
logger.info('Logged out')
self.logged_in = False
self.session.close()
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME)
def _search_url_titles(self, title):
"""Search the URL titles by kind for the given `title`.
:param str title: title to search for.
:return: the URL titles by kind.
:rtype: collections.defaultdict
"""
# make the search
logger.info('Searching title name for %r', title)
r = self.session.get(self.server_url + 'subtitle/search/', params={'q': title}, timeout=10)
r.raise_for_status()
# get the suggestions
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
links = soup.select('#processes div.generalWindowTop a')
logger.debug('Found %d suggestions', len(links))
url_titles = defaultdict(list)
for link in links:
parts = link.attrs['href'].split('/')
url_titles[parts[-3]].append(parts[-2])
return url_titles
def query(self, title, season=None, episode=None):
# search for the url title
url_titles = self._search_url_titles(title)
# episode
if season and episode:
if 'series' not in url_titles:
logger.error('No URL title found for series %r', title)
return []
url_title = url_titles['series'][0]
logger.debug('Using series title %r', url_title)
url = self.server_url + 'cinemast/data/series/sb/{}/{}/{}/'.format(url_title, season, episode)
page_link = self.server_url + 'subtitle/series/{}/{}/{}/'.format(url_title, season, episode)
else:
if 'movie' not in url_titles:
logger.error('No URL title found for movie %r', title)
return []
url_title = url_titles['movie'][0]
logger.debug('Using movie title %r', url_title)
url = self.server_url + 'cinemast/data/movie/sb/{}/'.format(url_title)
page_link = self.server_url + 'subtitle/movie/{}/'.format(url_title)
# get the list of subtitles
logger.debug('Getting the list of subtitles')
r = self.session.get(url)
r.raise_for_status()
results = json.loads(r.text)
# loop over results
subtitles = {}
for language_code, language_data in results.items():
for quality_data in language_data.values():
for quality, subtitles_data in quality_data.items():
for subtitle_item in subtitles_data.values():
# read the item
language = Language.fromalpha2(language_code)
hearing_impaired = bool(subtitle_item['hearing_impaired'])
subtitle_id = subtitle_item['id']
subtitle_key = subtitle_item['key']
downloaded = subtitle_item['downloaded']
release = subtitle_item['subtitle_version']
# add the release and increment downloaded count if we already have the subtitle
if subtitle_id in subtitles:
logger.debug('Found additional release %r for subtitle %d', release, subtitle_id)
bisect.insort_left(subtitles[subtitle_id].releases, release) # deterministic order
subtitles[subtitle_id].downloaded += downloaded
continue
# otherwise create it
subtitle = SubsCenterSubtitle(language, hearing_impaired, page_link, title, season, episode,
title, subtitle_id, subtitle_key, downloaded, [release])
logger.debug('Found subtitle %r', subtitle)
subtitles[subtitle_id] = subtitle
return subtitles.values()
def list_subtitles(self, video, languages):
season = episode = None
title = video.title
if isinstance(video, Episode):
title = video.series
season = video.season
episode = video.episode
return [s for s in self.query(title, season, episode) if s.language in languages]
def download_subtitle(self, subtitle):
# download
url = self.server_url + 'subtitle/download/{}/{}/'.format(subtitle.language.alpha2, subtitle.subtitle_id)
params = {'v': subtitle.releases[0], 'key': subtitle.subtitle_key}
r = self.session.get(url, params=params, headers={'Referer': subtitle.page_link}, timeout=10)
r.raise_for_status()
# open the zip
with zipfile.ZipFile(io.BytesIO(r.content)) as zf:
# remove some filenames from the namelist
namelist = [n for n in zf.namelist() if not n.endswith('.txt')]
if len(namelist) > 1:
raise ProviderError('More than one file to unzip')
subtitle.content = fix_line_ending(zf.read(namelist[0]))
| {
"content_hash": "5603096772a0d567d61e4feb4c5bbcaa",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 120,
"avg_line_length": 39.440528634361236,
"alnum_prop": 0.5872891768122417,
"repo_name": "neo1691/subliminal",
"id": "ac620a3d1f6f601c5031d20b156e545e8feb37a6",
"size": "8977",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "subliminal/providers/subscenter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "317334"
}
],
"symlink_target": ""
} |
from copy import copy
from optparse import Option, OptionValueError
def _check_mesos_cluster(option, opt, value):
cluster_name = value
if option.clusters and cluster_name in option.clusters:
return option.clusters[cluster_name]
elif option.cluster_provider:
return option.cluster_provider(cluster_name)
cluster_list = ""
if option.clusters:
cluster_list = 'Valid options for clusters are %s' % ' '.join(option.clusters)
raise OptionValueError(
'%s is not a valid cluster for the %s option. %s' % (value, opt, cluster_list))
class ClusterOption(Option):
"""A command-line Option that requires a valid cluster name and returns a Cluster object.
Use in an @app.command_option decorator to avoid boilerplate. For example:
CLUSTER_PATH = os.path.expanduser('~/.clusters')
CLUSTERS = Clusters.from_json(CLUSTER_PATH)
@app.command
@app.command_option(ClusterOption('--cluster', default='smf1-test', clusters=CLUSTERS))
def get_health(args, options):
if options.cluster.zk_server:
do_something(options.cluster)
@app.command
@app.command_option(ClusterOption('-s',
'--source_cluster',
default='smf1-test',
clusters=CLUSTERS,
help='Source cluster to pull metadata from.'))
@app.command_option(ClusterOption('-d',
'--dest_cluster',
clusters=CLUSTERS,
default='smf1-test'))
def copy_metadata(args, options):
if not options.source_cluster:
print('required option source_cluster missing!')
metadata_copy(options.source_cluster, options.dest_cluster)
"""
# Needed since we're creating a new type for validation - see optparse docs.
TYPES = copy(Option.TYPES) + ('mesos_cluster',)
TYPE_CHECKER = copy(Option.TYPE_CHECKER)
TYPE_CHECKER['mesos_cluster'] = _check_mesos_cluster
def __init__(self, *opt_str, **attrs):
"""
*opt_str: Same meaning as in twitter.common.options.Option, at least one is required.
**attrs: See twitter.common.options.Option, with the following caveats:
Exactly one of the following must be provided:
clusters: A static Clusters object from which to pick clusters.
cluster_provider: A function that takes a cluster name and returns a Cluster object.
"""
self.clusters = attrs.pop('clusters', None)
self.cluster_provider = attrs.pop('cluster_provider', None)
if not (self.clusters is not None) ^ (self.cluster_provider is not None):
raise ValueError('Must specify exactly one of clusters and cluster_provider.')
default_attrs = dict(
default=None,
action='store',
type='mesos_cluster',
help='Mesos cluster to use (Default: %%default)'
)
combined_attrs = default_attrs
combined_attrs.update(attrs) # Defensive copy
Option.__init__(self, *opt_str, **combined_attrs) # old-style superclass
| {
"content_hash": "791afa89f5481f2a70b59fa4a8c73ca2",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 91,
"avg_line_length": 36.30379746835443,
"alnum_prop": 0.6886331938633193,
"repo_name": "mkhutornenko/incubator-aurora",
"id": "77202c286529e59576cb8aeb1c3e24f86ee5cb27",
"size": "3417",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "src/main/python/apache/aurora/common/cluster_option.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2391"
},
{
"name": "Groovy",
"bytes": "15108"
},
{
"name": "Java",
"bytes": "1620408"
},
{
"name": "JavaScript",
"bytes": "71008"
},
{
"name": "Python",
"bytes": "1327488"
},
{
"name": "Ruby",
"bytes": "4252"
},
{
"name": "Shell",
"bytes": "53305"
}
],
"symlink_target": ""
} |
import myfunctions
import random
#Ask user for inputs and check validity
while True:
qns = int(input("How many problems would you like to attempt? "))
if qns <= 0:
print("Invalid number, try again\n")
continue
else:
break
while True:
width = int(input("How wide do you want your digits to be? 5-10: "))
if width < 5 or width > 10:
print("Invalid width, try again\n")
continue
else:
break
while True:
drill = str.lower(input("Would you like to activate 'drill' mode? yes or no: "))
if drill != "yes" and drill != "no":
print("Invalid response, try again\n")
continue
else:
break
print("\nHere we go!")
#Define variables to track score and statistics
tscore = 0
addition = 0
subtraction = 0
multiplication = 0
division = 0
addition_score = 0
subtraction_score = 0
multiplication_score = 0
division_score = 0
#Set number of questions
for i in range(qns):
print("\nWhat is .....\n")
#Define parameters
x = random.randint(0, 9)
op = random.randint(1, 4)
y = random.randint(0, 9)
#Check for valid division equation
if op == 4:
if y == 0:
y = random.randint(1, 9)
while x % y != 0:
x = random.randint(0, 9)
y = random.randint(1, 9)
#Display first number
if x == 0:
myfunctions.number_0(width)
elif x == 1:
myfunctions.number_1(width)
elif x == 2:
myfunctions.number_2(width)
elif x == 3:
myfunctions.number_3(width)
elif x == 4:
myfunctions.number_4(width)
elif x == 5:
myfunctions.number_5(width)
elif x == 6:
myfunctions.number_6(width)
elif x == 7:
myfunctions.number_7(width)
elif x == 8:
myfunctions.number_8(width)
elif x == 9:
myfunctions.number_9(width)
#Display operator
if op == 1:
op = "+"
myfunctions.plus(width)
addition += 1
elif op == 2:
op = "-"
myfunctions.minus(width)
subtraction += 1
elif op == 3:
op = "*"
myfunctions.multiply(width)
multiplication += 1
elif op == 4:
op = "/"
myfunctions.divide(width)
division += 1
#Display second number
if y == 0:
myfunctions.number_0(width)
elif y == 1:
myfunctions.number_1(width)
elif y == 2:
myfunctions.number_2(width)
elif y == 3:
myfunctions.number_3(width)
elif y == 4:
myfunctions.number_4(width)
elif y == 5:
myfunctions.number_5(width)
elif y == 6:
myfunctions.number_6(width)
elif y == 7:
myfunctions.number_7(width)
elif y == 8:
myfunctions.number_8(width)
elif y == 9:
myfunctions.number_9(width)
#Ask user for answer and check answer
if drill == "no":
z = int(input("= "))
if myfunctions.check_answer(x, y, z, op) == True:
print("Correct!")
tscore += 1
if op == "+":
addition_score += 1
if op == "-":
subtraction_score += 1
if op == "*":
multiplication_score += 1
if op == "/":
division_score += 1
else:
print("Sorry, that's not correct.")
elif drill == "yes":
while True:
z = int(input("= "))
if myfunctions.check_answer(x, y, z, op) == False:
print("Sorry, that's not correct.")
if op == "+":
addition_score += 1
if op == "-":
subtraction_score += 1
if op == "*":
multiplication_score += 1
if op == "/":
division_score += 1
continue
else:
print("Correct!")
break
#Display score
if drill == "no":
print("\nYou got %d out of %d correct!" %(tscore, qns))
for operator, count, score in zip(["addition", "subtraction", "multiplication", "division"], [addition, subtraction, multiplication, division], [addition_score, subtraction_score, multiplication_score, division_score]):
if count == 0:
print("\nNo %s problems presented" %(operator))
else:
print("\nTotal %s problems presented: %d" %(operator, count))
print("Correct %s problems: %d (%s)" %(operator, score, format(score/count, ".1%")))
elif drill == "yes":
for operator, count, score in zip(["addition", "subtraction", "multiplication", "division"], [addition, subtraction, multiplication, division], [addition_score, subtraction_score, multiplication_score, division_score]):
if score == 0:
praise = "(perfect!)"
else:
praise = ""
if count == 0:
print("\nNo %s problems presented" %(operator))
else:
print("\nTotal %s problems presented: %d" %(operator, count))
print("# of extra attempts needed: %d %s" %(score, praise))
| {
"content_hash": "1c8cfcecee1e05b506bd545ad0865ec6",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 223,
"avg_line_length": 30.03409090909091,
"alnum_prop": 0.5107832009080591,
"repo_name": "sojournexx/python",
"id": "b4f467de862ac5d1ca87acf2d7d8a6057e9f8502",
"size": "5318",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Assignments/TanAndrew_assign6.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "48441"
}
],
"symlink_target": ""
} |
from module_base import ModuleBase
from module_mixins import FilenameViewModuleMixin
import module_utils
import vtk
class vtiRDR(FilenameViewModuleMixin, ModuleBase):
def __init__(self, module_manager):
# call parent constructor
ModuleBase.__init__(self, module_manager)
self._reader = vtk.vtkXMLImageDataReader()
# ctor for this specific mixin
FilenameViewModuleMixin.__init__(
self,
'Select a filename',
'VTK Image Data (*.vti)|*.vti|All files (*)|*',
{'vtkXMLImageDataReader': self._reader,
'Module (self)' : self})
module_utils.setup_vtk_object_progress(
self, self._reader,
'Reading VTK ImageData')
# set up some defaults
self._config.filename = ''
# there is no view yet...
self._module_manager.sync_module_logic_with_config(self)
def close(self):
del self._reader
FilenameViewModuleMixin.close(self)
def get_input_descriptions(self):
return ()
def set_input(self, idx, input_stream):
raise Exception
def get_output_descriptions(self):
return ('vtkImageData',)
def get_output(self, idx):
return self._reader.GetOutput()
def logic_to_config(self):
filename = self._reader.GetFileName()
if filename == None:
filename = ''
self._config.filename = filename
def config_to_logic(self):
self._reader.SetFileName(self._config.filename)
def view_to_config(self):
self._config.filename = self._getViewFrameFilename()
def config_to_view(self):
self._setViewFrameFilename(self._config.filename)
def execute_module(self):
# get the vtkPolyDataReader to try and execute
if len(self._reader.GetFileName()):
self._reader.Update()
def streaming_execute_module(self):
if len(self._reader.GetFileName()):
self._reader.UpdateInformation()
self._reader.GetOutput().SetUpdateExtentToWholeExtent()
self._reader.Update()
| {
"content_hash": "b508aa2e2167c2d33641ec154d421e34",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 67,
"avg_line_length": 27.743589743589745,
"alnum_prop": 0.6030499075785583,
"repo_name": "chrisidefix/devide",
"id": "4fdd5bc150789a8c98d7fe31f8011c5630fbb523",
"size": "2172",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "modules/readers/vtiRDR.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Diff",
"bytes": "1373"
},
{
"name": "NSIS",
"bytes": "2786"
},
{
"name": "Python",
"bytes": "3104368"
},
{
"name": "Shell",
"bytes": "7369"
}
],
"symlink_target": ""
} |
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ.get(
'SECRET_KEY') or 'WILL.I.AM tour with Wale featuring Leon_Omosh'
MAIL_SERVER = 'smtp.googlemail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
WANANCHI_MAIL_SUBJECT_PREFIX = '[CHUDI INVESTMENTS]'
WANANCHI_MAIL_SENDER = 'CHUDI INVESTMENTS ADMININSTRATOR'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
WANANCHI_ADMIN = os.environ.get('CHUDI_ADMIN')
username = os.environ.get('username')
api_key = os.environ.get('api_key')
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}
| {
"content_hash": "68de6b4f7cea71dede044a691a8082fe",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 72,
"avg_line_length": 29.551020408163264,
"alnum_prop": 0.669889502762431,
"repo_name": "Kimanicodes/wananchi",
"id": "c847e6d17e7e48b21665b02b83a4ecedb4af39b2",
"size": "1448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "356373"
},
{
"name": "HTML",
"bytes": "195521"
},
{
"name": "JavaScript",
"bytes": "938345"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "96355"
}
],
"symlink_target": ""
} |
"""RequestContext: context for requests that persist through all of nova."""
import copy
import uuid
from nova.openstack.common import local
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
from nova import policy
LOG = logging.getLogger(__name__)
def generate_request_id():
return 'req-' + str(uuid.uuid4())
class RequestContext(object):
"""Security context and request information.
Represents the user taking a given action within the system.
"""
def __init__(self, user_id, project_id, is_admin=None, read_deleted="no",
roles=None, remote_address=None, timestamp=None,
request_id=None, auth_token=None, overwrite=True,
quota_class=None, user_name=None, project_name=None,
service_catalog=None, instance_lock_checked=False, **kwargs):
"""
:param read_deleted: 'no' indicates deleted records are hidden, 'yes'
indicates deleted records are visible, 'only' indicates that
*only* deleted records are visible.
:param overwrite: Set to False to ensure that the greenthread local
copy of the index is not overwritten.
:param kwargs: Extra arguments that might be present, but we ignore
because they possibly came in from older rpc messages.
"""
if kwargs:
LOG.warn(_('Arguments dropped when creating context: %s') %
str(kwargs))
self.user_id = user_id
self.project_id = project_id
self.roles = roles or []
self.is_admin = is_admin
if self.is_admin is None:
self.is_admin = policy.check_is_admin(self.roles)
self.read_deleted = read_deleted
self.remote_address = remote_address
if not timestamp:
timestamp = timeutils.utcnow()
if isinstance(timestamp, basestring):
timestamp = timeutils.parse_strtime(timestamp)
self.timestamp = timestamp
if not request_id:
request_id = generate_request_id()
self.request_id = request_id
self.auth_token = auth_token
self.service_catalog = service_catalog
self.instance_lock_checked = instance_lock_checked
# NOTE(markmc): this attribute is currently only used by the
# rs_limits turnstile pre-processor.
# See https://lists.launchpad.net/openstack/msg12200.html
self.quota_class = quota_class
self.user_name = user_name
self.project_name = project_name
if overwrite or not hasattr(local.store, 'context'):
self.update_store()
def _get_read_deleted(self):
return self._read_deleted
def _set_read_deleted(self, read_deleted):
if read_deleted not in ('no', 'yes', 'only'):
raise ValueError(_("read_deleted can only be one of 'no', "
"'yes' or 'only', not %r") % read_deleted)
self._read_deleted = read_deleted
def _del_read_deleted(self):
del self._read_deleted
read_deleted = property(_get_read_deleted, _set_read_deleted,
_del_read_deleted)
def update_store(self):
local.store.context = self
def to_dict(self):
return {'user_id': self.user_id,
'project_id': self.project_id,
'is_admin': self.is_admin,
'read_deleted': self.read_deleted,
'roles': self.roles,
'remote_address': self.remote_address,
'timestamp': timeutils.strtime(self.timestamp),
'request_id': self.request_id,
'auth_token': self.auth_token,
'quota_class': self.quota_class,
'user_name': self.user_name,
'service_catalog': self.service_catalog,
'project_name': self.project_name,
'instance_lock_checked': self.instance_lock_checked}
@classmethod
def from_dict(cls, values):
return cls(**values)
def elevated(self, read_deleted=None, overwrite=False):
"""Return a version of this context with admin flag set."""
context = copy.copy(self)
context.is_admin = True
if 'admin' not in context.roles:
context.roles.append('admin')
if read_deleted is not None:
context.read_deleted = read_deleted
return context
def get_admin_context(read_deleted="no"):
return RequestContext(user_id=None,
project_id=None,
is_admin=True,
read_deleted=read_deleted,
overwrite=False)
| {
"content_hash": "e146c1486978d27205be6d9505a986de",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 78,
"avg_line_length": 35.54887218045113,
"alnum_prop": 0.5930626057529611,
"repo_name": "houshengbo/nova_vmware_compute_driver",
"id": "094e2bffbc33804d992949d79295234811de502c",
"size": "5537",
"binary": false,
"copies": "2",
"ref": "refs/heads/attach-detach-VMware-iSCSI-driver",
"path": "nova/context.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "7173520"
},
{
"name": "Shell",
"bytes": "15478"
}
],
"symlink_target": ""
} |
import os
import sys
import math
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
LOG_PATH = "logs-2.txt"
SEARCHSTRING = "NARF: "
SAVENAME = "plot2"
x = []
y = []
z = []
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
f = open(LOG_PATH, "r")
lines = f.readlines()
for line in lines:
pos = line.index(SEARCHSTRING) + len(SEARCHSTRING)
payload = line[pos:-2]
# last line as no CRLF, don't care
foo = payload.split(";")
x.append(float(foo[0]))
y.append(float(foo[1]))
z.append(float(foo[2]))
ax.scatter(x, y, zs=z) #, c="r", marker="s", label="ground truth")
plt.savefig(os.path.join(".", SAVENAME + ".png"), bbox_inches='tight')
plt.show() | {
"content_hash": "1e2ad518b7a9cee8b5af0dc04a74fa22",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 70,
"avg_line_length": 21.87878787878788,
"alnum_prop": 0.6260387811634349,
"repo_name": "mcguenther/MIScreen",
"id": "60d3340a6b6436fc1738ac4dab7f5a3bcff7839a",
"size": "722",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "analysis/plot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "201719"
},
{
"name": "Python",
"bytes": "722"
}
],
"symlink_target": ""
} |
"""
Tools for managing data for use with `~windspharm.standard.VectorWind`
(or indeed `spharm.Spharmt`).
"""
# Copyright (c) 2012-2013 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import numpy as np
def __order_dims(d, inorder):
if 'x' not in inorder or 'y' not in inorder:
raise ValueError('a latitude-longitude grid is required')
lonpos = inorder.lower().find('x')
latpos = inorder.lower().find('y')
d = np.rollaxis(d, lonpos)
if latpos < lonpos:
latpos += 1
d = np.rollaxis(d, latpos)
outorder = inorder.replace('x', '')
outorder = outorder.replace('y', '')
outorder = 'yx' + outorder
return d, outorder
def __reshape(d):
out = d.reshape(d.shape[:2] + (np.prod(d.shape[2:]),))
return out, d.shape
def prep_data(data, dimorder):
"""
Prepare data for input to `~windspharm.standard.VectorWind` (or to
`spharm.Spharmt` method calls).
Returns a dictionary of intermediate information that can be passed
to `recover_data` or `get_recovery` to recover the original shape
and order of the data.
**Arguments:**
*data*
Data array. The array must be at least 2D.
*dimorder*
String specifying the order of dimensions in the data array. The
characters 'x' and 'y' represent longitude and latitude
respectively. Any other characters can be used to represent
other dimensions.
**Returns:**
*pdata*
*data* reshaped/reordered to (latitude, longitude, other).
*info*
A dictionary of information required to recover *data*.
**See also:**
`recover_data`, `get_recovery`.
**Examples:**
Prepare an array with dimensions (12, 17, 73, 144) where the
dimensions are (time, level, latitude, longitude)::
pdata, info = prep_data(data, 'tzyx')
Prepare an array with dimensions (144, 16, 73, 21) where the first
dimension is longitude and the third dimension is latitude. The
characters used to represent the other dimensions are arbitrary::
pdata, info = prep_data(data, 'xayb')
"""
# Returns the prepared data and some data info to help data recovery.
pdata, intorder = __order_dims(data, dimorder)
pdata, intshape = __reshape(pdata)
info = dict(intermediate_shape=intshape,
intermediate_order=intorder,
original_order=dimorder)
return pdata, info
def recover_data(pdata, info):
"""
Recover the shape and dimension order of an array output from
`~windspharm.standard.VectorWind` methods (or from `spharm.Spharmt`
methods).
This function performs the opposite of `prep_data`.
For recovering the shape of multiple variables, see `get_recovery`.
**Arguments:**
*pdata*
Data array with either 2 or 3 dimensions. The first two
dimensions are latitude and longitude respectively.
*info*
Information dictionary output from `prep_data`.
**Returns:**
*data*
The data reshaped/reordered.
**See also:**
`prep_data`, `get_recovery`.
**Example:**
Recover the original input shape and dimension order of an array
processed with `prep_data` or an output of
`~windspharm.standard.VectorWind` or `sparm.Spharmt` method calls on
such data::
data = recover_data(pdata, info)
"""
# Convert to intermediate shape (full dimensionality, windspharm order).
data = pdata.reshape(info['intermediate_shape'])
# Re-order dimensions correctly.
rolldims = np.array([info['intermediate_order'].index(dim)
for dim in info['original_order'][::-1]])
for i in xrange(len(rolldims)):
# Roll the axis to the front.
data = np.rollaxis(data, rolldims[i])
rolldims = np.where(rolldims < rolldims[i], rolldims + 1, rolldims)
return data
__recover_docstring_template = """Shape/dimension recovery.
Recovers variable shape/dimension according to:
{!s}
Returns a `list` of variables.
"""
def get_recovery(info):
"""
Return a function that can be used to recover the shape and
dimension order of multiple arrays output from
`~windspharm.standard.VectorWind` methods (or from `spharm.Spharmt`
methods) according to a single dictionary of recovery information.
**Argument:**
*info*
Information dictionary output from `prep_data`.
**Returns:**
*recover*
A function used to recover arrays.
**See also:**
`recover_data`, `prep_data`.
**Example:**
Generate a function to recover the original input shape and
dimension order of arrays processed with `prep_data` and outputs of
`~windspharm.standard.VectorWind` method calls on this data::
u, info = prep_data(u, 'tzyx')
v, info = prep_data(v, 'tzyx')
w = VectorWind(u, v)
sf, vp = w.sfvp()
recover = get_recovery(info)
u, v, sf, vp = recover(u, v, sf, vp)
"""
def __recover(*args):
return [recover_data(arg, info) for arg in args]
info_nice = ["'{!s}': {!s}".format(key, value)
for key, value in info.items()]
__recover.__name__ = 'recover'
__recover.__doc__ = __recover_docstring_template.format(
'\n'.join(info_nice))
return __recover
def reverse_latdim(u, v, axis=0):
"""
Reverse the order of the latitude dimension of zonal and meridional
wind components.
**Arguments:**
*u*, *v*
Zonal and meridional wind components respectively.
**Optional argument:**
*axis*
Index of the latitude dimension. This dimension will be reversed
in the input arrays. Defaults to 0 (the first dimension).
**Returns:**
*ur*, *vr*
Zonal and meridional wind components with the latitude dimensions
reversed. These are always copies of the input.
**See also:**
`order_latdim`.
**Examples:**
Reverse the dimension corresponding to latitude when it is the first
dimension of the inputs::
u, v = reverse_latdim(u, v)
Reverse the dimension corresponding to latitude when it is the third
dimension of the inputs::
u, v = reverse_latdim(u, v, axis=2)
"""
slicelist = [slice(0, None)] * u.ndim
slicelist[axis] = slice(None, None, -1)
u = u.copy()[slicelist]
v = v.copy()[slicelist]
return u, v
def order_latdim(latdim, u, v, axis=0):
"""Ensure the latitude dimension is north-to-south.
Returns copies of the latitude dimension and wind components
with the latitude dimension going from north to south. If the
latitude dimension is already in this order then the output will
just be copies of the input.
**Arguments:**
*latdim*
Array of latitude values.
*u*, *v*
Zonal and meridional wind components respectively.
**Keyword argument:**
*axis*
Index of the latitude dimension in the zonal and meridional wind
components. Defaults to 0 (the first dimension).
**Returns:**
*latdimr*
Possibly reversed *latdim*, always a copy of *latdim*.
*ur*, *vr*
Possibly reversed *u* and *v* respectively. Always copies of *u*
and *v* respectively.
**See also:**
`reverse_latdim`.
**Examples:**
Order the latitude dimension when latitude is the first dimension of
the wind components::
latdim, u, v = order_latdim(latdim, u, v)
Order the latitude dimension when latitude is the third dimension of
the wind components::
latdim, u, v = order_latdim(latdim, u, v, axis=2)
"""
latdim = latdim.copy()
if latdim[0] < latdim[-1]:
latdim = latdim[::-1]
# reverse_latdim() will make copies of u and v
u, v = reverse_latdim(u, v, axis=axis)
else:
# we return copies from this function
u, v = u.copy(), v.copy()
return latdim, u, v
| {
"content_hash": "79dd80e39c90b5044330af3b152bf16b",
"timestamp": "",
"source": "github",
"line_count": 313,
"max_line_length": 79,
"avg_line_length": 28.782747603833865,
"alnum_prop": 0.6517926517926518,
"repo_name": "nicolasfauchereau/windspharm",
"id": "f4384940c0bb836a6b9a7fb7f31fe1a7ca0e357d",
"size": "9009",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/windspharm/tools.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "132549"
}
],
"symlink_target": ""
} |