repo_name
stringclasses
4 values
method_name
stringlengths
3
72
method_code
stringlengths
87
3.59k
method_summary
stringlengths
12
196
original_method_code
stringlengths
129
8.98k
method_path
stringlengths
15
136
Azure/azure-sdk-for-python
ServiceBusManagementService.get_metrics_rollups_relay
def get_metrics_rollups_relay(self, name, relay_name, metric): response = self._perform_get( self._get_get_metrics_rollup_relay_path(name, relay_name, metric), None) return _MinidomXmlToObject.convert_response_to_feeds( response, partial( _ServiceBusManagementXmlSerializer.xml_to_metrics, object_type=MetricRollups ) )
This operation gets rollup data for Service Bus metrics relay. Rollup data includes the time granularity for the telemetry aggregation as well as the retention settings for each time granularity.
def get_metrics_rollups_relay(self, name, relay_name, metric): ''' This operation gets rollup data for Service Bus metrics relay. Rollup data includes the time granularity for the telemetry aggregation as well as the retention settings for each time granularity. name: Name of the service bus namespace. relay_name: Name of the service bus relay in this namespace. metric: name of a supported metric ''' response = self._perform_get( self._get_get_metrics_rollup_relay_path(name, relay_name, metric), None) return _MinidomXmlToObject.convert_response_to_feeds( response, partial( _ServiceBusManagementXmlSerializer.xml_to_metrics, object_type=MetricRollups ) )
azure-servicemanagement-legacy/azure/servicemanagement/servicebusmanagementservice.py
Azure/azure-sdk-for-python
create
def create(env_dir, system_site_packages=False, clear=False, symlinks=False, with_pip=False, prompt=None): builder = ExtendedEnvBuilder(system_site_packages=system_site_packages, clear=clear, symlinks=symlinks, with_pip=with_pip, prompt=prompt) builder.create(env_dir) return builder.context
Create a virtual environment in a directory.
def create(env_dir, system_site_packages=False, clear=False, symlinks=False, with_pip=False, prompt=None): """Create a virtual environment in a directory.""" builder = ExtendedEnvBuilder(system_site_packages=system_site_packages, clear=clear, symlinks=symlinks, with_pip=with_pip, prompt=prompt) builder.create(env_dir) return builder.context
azure-sdk-tools/packaging_tools/venvtools.py
Azure/azure-sdk-for-python
create_venv_with_package
def create_venv_with_package(packages): with tempfile.TemporaryDirectory() as tempdir: myenv = create(tempdir, with_pip=True) pip_call = [ myenv.env_exe, "-m", "pip", "install", ] subprocess.check_call(pip_call + ['-U', 'pip']) if packages: subprocess.check_call(pip_call + packages) yield myenv
Create a venv with these packages in a temp dir and yielf the env. packages should be an iterable of pip version instructio (e.g. package~=1.2.3)
def create_venv_with_package(packages): """Create a venv with these packages in a temp dir and yielf the env. packages should be an iterable of pip version instructio (e.g. package~=1.2.3) """ with tempfile.TemporaryDirectory() as tempdir: myenv = create(tempdir, with_pip=True) pip_call = [ myenv.env_exe, "-m", "pip", "install", ] subprocess.check_call(pip_call + ['-U', 'pip']) if packages: subprocess.check_call(pip_call + packages) yield myenv
azure-sdk-tools/packaging_tools/venvtools.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.create_server
def create_server(self, admin_login, admin_password, location): _validate_not_none('admin_login', admin_login) _validate_not_none('admin_password', admin_password) _validate_not_none('location', location) response = self.perform_post( self._get_servers_path(), _SqlManagementXmlSerializer.create_server_to_xml( admin_login, admin_password, location ) ) return _SqlManagementXmlSerializer.xml_to_create_server_response( response.body)
Create a new Azure SQL Database server.
def create_server(self, admin_login, admin_password, location): ''' Create a new Azure SQL Database server. admin_login: The administrator login name for the new server. admin_password: The administrator login password for the new server. location: The region to deploy the new server. ''' _validate_not_none('admin_login', admin_login) _validate_not_none('admin_password', admin_password) _validate_not_none('location', location) response = self.perform_post( self._get_servers_path(), _SqlManagementXmlSerializer.create_server_to_xml( admin_login, admin_password, location ) ) return _SqlManagementXmlSerializer.xml_to_create_server_response( response.body)
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.set_server_admin_password
def set_server_admin_password(self, server_name, admin_password): _validate_not_none('server_name', server_name) _validate_not_none('admin_password', admin_password) return self._perform_post( self._get_servers_path(server_name) + '?op=ResetPassword', _SqlManagementXmlSerializer.set_server_admin_password_to_xml( admin_password ) )
Reset the administrator password for a server.
def set_server_admin_password(self, server_name, admin_password): ''' Reset the administrator password for a server. server_name: Name of the server to change the password. admin_password: The new administrator password for the server. ''' _validate_not_none('server_name', server_name) _validate_not_none('admin_password', admin_password) return self._perform_post( self._get_servers_path(server_name) + '?op=ResetPassword', _SqlManagementXmlSerializer.set_server_admin_password_to_xml( admin_password ) )
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.list_quotas
def list_quotas(self, server_name): _validate_not_none('server_name', server_name) response = self._perform_get(self._get_quotas_path(server_name), None) return _MinidomXmlToObject.parse_service_resources_response( response, ServerQuota)
Gets quotas for an Azure SQL Database Server.
def list_quotas(self, server_name): ''' Gets quotas for an Azure SQL Database Server. server_name: Name of the server. ''' _validate_not_none('server_name', server_name) response = self._perform_get(self._get_quotas_path(server_name), None) return _MinidomXmlToObject.parse_service_resources_response( response, ServerQuota)
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.get_server_event_logs
def get_server_event_logs(self, server_name, start_date, interval_size_in_minutes, event_types=''): _validate_not_none('server_name', server_name) _validate_not_none('start_date', start_date) _validate_not_none('interval_size_in_minutes', interval_size_in_minutes) _validate_not_none('event_types', event_types) path = self._get_server_event_logs_path(server_name) + \ '?startDate={0}&intervalSizeInMinutes={1}&eventTypes={2}'.format( start_date, interval_size_in_minutes, event_types) response = self._perform_get(path, None) return _MinidomXmlToObject.parse_service_resources_response( response, EventLog)
Gets the event logs for an Azure SQL Database Server.
def get_server_event_logs(self, server_name, start_date, interval_size_in_minutes, event_types=''): ''' Gets the event logs for an Azure SQL Database Server. server_name: Name of the server to retrieve the event logs from. start_date: The starting date and time of the events to retrieve in UTC format, for example '2011-09-28 16:05:00'. interval_size_in_minutes: Size of the event logs to retrieve (in minutes). Valid values are: 5, 60, or 1440. event_types: The event type of the log entries you want to retrieve. Valid values are: - connection_successful - connection_failed - connection_terminated - deadlock - throttling - throttling_long_transaction To return all event types pass in an empty string. ''' _validate_not_none('server_name', server_name) _validate_not_none('start_date', start_date) _validate_not_none('interval_size_in_minutes', interval_size_in_minutes) _validate_not_none('event_types', event_types) path = self._get_server_event_logs_path(server_name) + \ '?startDate={0}&intervalSizeInMinutes={1}&eventTypes={2}'.format( start_date, interval_size_in_minutes, event_types) response = self._perform_get(path, None) return _MinidomXmlToObject.parse_service_resources_response( response, EventLog)
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.create_firewall_rule
def create_firewall_rule(self, server_name, name, start_ip_address, end_ip_address): _validate_not_none('server_name', server_name) _validate_not_none('name', name) _validate_not_none('start_ip_address', start_ip_address) _validate_not_none('end_ip_address', end_ip_address) return self._perform_post( self._get_firewall_rules_path(server_name), _SqlManagementXmlSerializer.create_firewall_rule_to_xml( name, start_ip_address, end_ip_address ) )
Creates an Azure SQL Database server firewall rule.
def create_firewall_rule(self, server_name, name, start_ip_address, end_ip_address): ''' Creates an Azure SQL Database server firewall rule. server_name: Name of the server to set the firewall rule on. name: The name of the new firewall rule. start_ip_address: The lowest IP address in the range of the server-level firewall setting. IP addresses equal to or greater than this can attempt to connect to the server. The lowest possible IP address is 0.0.0.0. end_ip_address: The highest IP address in the range of the server-level firewall setting. IP addresses equal to or less than this can attempt to connect to the server. The highest possible IP address is 255.255.255.255. ''' _validate_not_none('server_name', server_name) _validate_not_none('name', name) _validate_not_none('start_ip_address', start_ip_address) _validate_not_none('end_ip_address', end_ip_address) return self._perform_post( self._get_firewall_rules_path(server_name), _SqlManagementXmlSerializer.create_firewall_rule_to_xml( name, start_ip_address, end_ip_address ) )
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.update_firewall_rule
def update_firewall_rule(self, server_name, name, start_ip_address, end_ip_address): _validate_not_none('server_name', server_name) _validate_not_none('name', name) _validate_not_none('start_ip_address', start_ip_address) _validate_not_none('end_ip_address', end_ip_address) return self._perform_put( self._get_firewall_rules_path(server_name, name), _SqlManagementXmlSerializer.update_firewall_rule_to_xml( name, start_ip_address, end_ip_address ) )
Update a firewall rule for an Azure SQL Database server.
def update_firewall_rule(self, server_name, name, start_ip_address, end_ip_address): ''' Update a firewall rule for an Azure SQL Database server. server_name: Name of the server to set the firewall rule on. name: The name of the firewall rule to update. start_ip_address: The lowest IP address in the range of the server-level firewall setting. IP addresses equal to or greater than this can attempt to connect to the server. The lowest possible IP address is 0.0.0.0. end_ip_address: The highest IP address in the range of the server-level firewall setting. IP addresses equal to or less than this can attempt to connect to the server. The highest possible IP address is 255.255.255.255. ''' _validate_not_none('server_name', server_name) _validate_not_none('name', name) _validate_not_none('start_ip_address', start_ip_address) _validate_not_none('end_ip_address', end_ip_address) return self._perform_put( self._get_firewall_rules_path(server_name, name), _SqlManagementXmlSerializer.update_firewall_rule_to_xml( name, start_ip_address, end_ip_address ) )
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.delete_firewall_rule
def delete_firewall_rule(self, server_name, name): _validate_not_none('server_name', server_name) _validate_not_none('name', name) return self._perform_delete( self._get_firewall_rules_path(server_name, name))
Deletes an Azure SQL Database server firewall rule.
def delete_firewall_rule(self, server_name, name): ''' Deletes an Azure SQL Database server firewall rule. server_name: Name of the server with the firewall rule you want to delete. name: Name of the firewall rule you want to delete. ''' _validate_not_none('server_name', server_name) _validate_not_none('name', name) return self._perform_delete( self._get_firewall_rules_path(server_name, name))
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.list_firewall_rules
def list_firewall_rules(self, server_name): _validate_not_none('server_name', server_name) response = self._perform_get(self._get_firewall_rules_path(server_name), None) return _MinidomXmlToObject.parse_service_resources_response( response, FirewallRule)
Retrieves the set of firewall rules for an Azure SQL Database Server.
def list_firewall_rules(self, server_name): ''' Retrieves the set of firewall rules for an Azure SQL Database Server. server_name: Name of the server. ''' _validate_not_none('server_name', server_name) response = self._perform_get(self._get_firewall_rules_path(server_name), None) return _MinidomXmlToObject.parse_service_resources_response( response, FirewallRule)
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.list_service_level_objectives
def list_service_level_objectives(self, server_name): _validate_not_none('server_name', server_name) response = self._perform_get( self._get_service_objectives_path(server_name), None) return _MinidomXmlToObject.parse_service_resources_response( response, ServiceObjective)
Gets the service level objectives for an Azure SQL Database server.
def list_service_level_objectives(self, server_name): ''' Gets the service level objectives for an Azure SQL Database server. server_name: Name of the server. ''' _validate_not_none('server_name', server_name) response = self._perform_get( self._get_service_objectives_path(server_name), None) return _MinidomXmlToObject.parse_service_resources_response( response, ServiceObjective)
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.create_database
def create_database(self, server_name, name, service_objective_id, edition=None, collation_name=None, max_size_bytes=None): _validate_not_none('server_name', server_name) _validate_not_none('name', name) _validate_not_none('service_objective_id', service_objective_id) return self._perform_post( self._get_databases_path(server_name), _SqlManagementXmlSerializer.create_database_to_xml( name, service_objective_id, edition, collation_name, max_size_bytes ) )
Creates a new Azure SQL Database.
def create_database(self, server_name, name, service_objective_id, edition=None, collation_name=None, max_size_bytes=None): ''' Creates a new Azure SQL Database. server_name: Name of the server to contain the new database. name: Required. The name for the new database. See Naming Requirements in Azure SQL Database General Guidelines and Limitations and Database Identifiers for more information. service_objective_id: Required. The GUID corresponding to the performance level for Edition. See List Service Level Objectives for current values. edition: Optional. The Service Tier (Edition) for the new database. If omitted, the default is Web. Valid values are Web, Business, Basic, Standard, and Premium. See Azure SQL Database Service Tiers (Editions) and Web and Business Edition Sunset FAQ for more information. collation_name: Optional. The database collation. This can be any collation supported by SQL. If omitted, the default collation is used. See SQL Server Collation Support in Azure SQL Database General Guidelines and Limitations for more information. max_size_bytes: Optional. Sets the maximum size, in bytes, for the database. This value must be within the range of allowed values for Edition. If omitted, the default value for the edition is used. See Azure SQL Database Service Tiers (Editions) for current maximum databases sizes. Convert MB or GB values to bytes. 1 MB = 1048576 bytes. 1 GB = 1073741824 bytes. ''' _validate_not_none('server_name', server_name) _validate_not_none('name', name) _validate_not_none('service_objective_id', service_objective_id) return self._perform_post( self._get_databases_path(server_name), _SqlManagementXmlSerializer.create_database_to_xml( name, service_objective_id, edition, collation_name, max_size_bytes ) )
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.update_database
def update_database(self, server_name, name, new_database_name=None, service_objective_id=None, edition=None, max_size_bytes=None): _validate_not_none('server_name', server_name) _validate_not_none('name', name) return self._perform_put( self._get_databases_path(server_name, name), _SqlManagementXmlSerializer.update_database_to_xml( new_database_name, service_objective_id, edition, max_size_bytes ) )
Updates existing database details.
def update_database(self, server_name, name, new_database_name=None, service_objective_id=None, edition=None, max_size_bytes=None): ''' Updates existing database details. server_name: Name of the server to contain the new database. name: Required. The name for the new database. See Naming Requirements in Azure SQL Database General Guidelines and Limitations and Database Identifiers for more information. new_database_name: Optional. The new name for the new database. service_objective_id: Optional. The new service level to apply to the database. For more information about service levels, see Azure SQL Database Service Tiers and Performance Levels. Use List Service Level Objectives to get the correct ID for the desired service objective. edition: Optional. The new edition for the new database. max_size_bytes: Optional. The new size of the database in bytes. For information on available sizes for each edition, see Azure SQL Database Service Tiers (Editions). ''' _validate_not_none('server_name', server_name) _validate_not_none('name', name) return self._perform_put( self._get_databases_path(server_name, name), _SqlManagementXmlSerializer.update_database_to_xml( new_database_name, service_objective_id, edition, max_size_bytes ) )
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.delete_database
def delete_database(self, server_name, name): return self._perform_delete(self._get_databases_path(server_name, name))
Deletes an Azure SQL Database.
def delete_database(self, server_name, name): ''' Deletes an Azure SQL Database. server_name: Name of the server where the database is located. name: Name of the database to delete. ''' return self._perform_delete(self._get_databases_path(server_name, name))
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
SqlDatabaseManagementService.list_databases
def list_databases(self, name): response = self._perform_get(self._get_list_databases_path(name), None) return _MinidomXmlToObject.parse_service_resources_response( response, Database)
List the SQL databases defined on the specified server name
def list_databases(self, name): ''' List the SQL databases defined on the specified server name ''' response = self._perform_get(self._get_list_databases_path(name), None) return _MinidomXmlToObject.parse_service_resources_response( response, Database)
azure-servicemanagement-legacy/azure/servicemanagement/sqldatabasemanagementservice.py
Azure/azure-sdk-for-python
TopLevelDomainsOperations.list_agreements
def list_agreements( self, name, include_privacy=None, for_transfer=None, custom_headers=None, raw=False, **operation_config): agreement_option = models.TopLevelDomainAgreementOption(include_privacy=include_privacy, for_transfer=for_transfer) def internal_paging(next_link=None, raw=False): if not next_link: url = self.list_agreements.metadata['url'] path_format_arguments = { 'name': self._serialize.url("name", name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') else: url = next_link query_parameters = {} header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') body_content = self._serialize.body(agreement_option, 'TopLevelDomainAgreementOption') request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.DefaultErrorResponseException(self._deserialize, response) return response deserialized = models.TldLegalAgreementPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.TldLegalAgreementPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized
Gets all legal agreements that user needs to accept before purchasing a domain. Gets all legal agreements that user needs to accept before purchasing a domain.
def list_agreements( self, name, include_privacy=None, for_transfer=None, custom_headers=None, raw=False, **operation_config): """Gets all legal agreements that user needs to accept before purchasing a domain. Gets all legal agreements that user needs to accept before purchasing a domain. :param name: Name of the top-level domain. :type name: str :param include_privacy: If <code>true</code>, then the list of agreements will include agreements for domain privacy as well; otherwise, <code>false</code>. :type include_privacy: bool :param for_transfer: If <code>true</code>, then the list of agreements will include agreements for domain transfer as well; otherwise, <code>false</code>. :type for_transfer: bool :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of TldLegalAgreement :rtype: ~azure.mgmt.web.models.TldLegalAgreementPaged[~azure.mgmt.web.models.TldLegalAgreement] :raises: :class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>` """ agreement_option = models.TopLevelDomainAgreementOption(include_privacy=include_privacy, for_transfer=for_transfer) def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = self.list_agreements.metadata['url'] path_format_arguments = { 'name': self._serialize.url("name", name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(agreement_option, 'TopLevelDomainAgreementOption') # Construct and send request request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.DefaultErrorResponseException(self._deserialize, response) return response # Deserialize response deserialized = models.TldLegalAgreementPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.TldLegalAgreementPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized
azure-mgmt-web/azure/mgmt/web/operations/top_level_domains_operations.py
Azure/azure-sdk-for-python
SessionReceiver.get_session_state
async def get_session_state(self): await self._can_run() response = await self._mgmt_request_response( REQUEST_RESPONSE_GET_SESSION_STATE_OPERATION, {'session-id': self.session_id}, mgmt_handlers.default) session_state = response.get(b'session-state') if isinstance(session_state, six.binary_type): session_state = session_state.decode('UTF-8') return session_state
Get the session state.
async def get_session_state(self): """Get the session state. Returns None if no state has been set. :rtype: str Example: .. literalinclude:: ../examples/async_examples/test_examples_async.py :start-after: [START set_session_state] :end-before: [END set_session_state] :language: python :dedent: 4 :caption: Getting and setting the state of a session. """ await self._can_run() response = await self._mgmt_request_response( REQUEST_RESPONSE_GET_SESSION_STATE_OPERATION, {'session-id': self.session_id}, mgmt_handlers.default) session_state = response.get(b'session-state') if isinstance(session_state, six.binary_type): session_state = session_state.decode('UTF-8') return session_state
azure-servicebus/azure/servicebus/aio/async_receive_handler.py
Azure/azure-sdk-for-python
SessionReceiver.set_session_state
async def set_session_state(self, state): await self._can_run() state = state.encode(self.encoding) if isinstance(state, six.text_type) else state return await self._mgmt_request_response( REQUEST_RESPONSE_SET_SESSION_STATE_OPERATION, {'session-id': self.session_id, 'session-state': bytearray(state)}, mgmt_handlers.default)
Set the session state.
async def set_session_state(self, state): """Set the session state. :param state: The state value. :type state: str or bytes or bytearray Example: .. literalinclude:: ../examples/async_examples/test_examples_async.py :start-after: [START set_session_state] :end-before: [END set_session_state] :language: python :dedent: 4 :caption: Getting and setting the state of a session. """ await self._can_run() state = state.encode(self.encoding) if isinstance(state, six.text_type) else state return await self._mgmt_request_response( REQUEST_RESPONSE_SET_SESSION_STATE_OPERATION, {'session-id': self.session_id, 'session-state': bytearray(state)}, mgmt_handlers.default)
azure-servicebus/azure/servicebus/aio/async_receive_handler.py
Azure/azure-sdk-for-python
ReservationOperations.merge
def merge( self, reservation_order_id, sources=None, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._merge_initial( reservation_order_id=reservation_order_id, sources=sources, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('[ReservationResponse]', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Merges two `Reservation`s. Merge the specified `Reservation`s into a new `Reservation`. The two `Reservation`s being merged must have same properties.
def merge( self, reservation_order_id, sources=None, custom_headers=None, raw=False, polling=True, **operation_config): """Merges two `Reservation`s. Merge the specified `Reservation`s into a new `Reservation`. The two `Reservation`s being merged must have same properties. :param reservation_order_id: Order Id of the reservation :type reservation_order_id: str :param sources: Format of the resource id should be /providers/Microsoft.Capacity/reservationOrders/{reservationOrderId}/reservations/{reservationId} :type sources: list[str] :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns list or ClientRawResponse<list> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[list[~azure.mgmt.reservations.models.ReservationResponse]] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[list[~azure.mgmt.reservations.models.ReservationResponse]]] :raises: :class:`ErrorException<azure.mgmt.reservations.models.ErrorException>` """ raw_result = self._merge_initial( reservation_order_id=reservation_order_id, sources=sources, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('[ReservationResponse]', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-reservations/azure/mgmt/reservations/operations/reservation_operations.py
Azure/azure-sdk-for-python
HttpBearerChallenge._validate_challenge
def _validate_challenge(self, challenge): bearer_string = 'Bearer ' if not challenge: raise ValueError('Challenge cannot be empty') challenge = challenge.strip() if not challenge.startswith(bearer_string): raise ValueError('Challenge is not Bearer') return challenge[len(bearer_string):]
Verifies that the challenge is a Bearer challenge and returns the key=value pairs.
def _validate_challenge(self, challenge): """ Verifies that the challenge is a Bearer challenge and returns the key=value pairs. """ bearer_string = 'Bearer ' if not challenge: raise ValueError('Challenge cannot be empty') challenge = challenge.strip() if not challenge.startswith(bearer_string): raise ValueError('Challenge is not Bearer') return challenge[len(bearer_string):]
azure-keyvault/azure/keyvault/http_bearer_challenge.py
Azure/azure-sdk-for-python
WorkspacesOperations.purge
def purge( self, resource_group_name, workspace_name, table, filters, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._purge_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, table=table, filters=filters, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('object', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Purges data in an Log Analytics workspace by a set of user-defined filters.
def purge( self, resource_group_name, workspace_name, table, filters, custom_headers=None, raw=False, polling=True, **operation_config): """Purges data in an Log Analytics workspace by a set of user-defined filters. :param resource_group_name: The name of the resource group to get. The name is case insensitive. :type resource_group_name: str :param workspace_name: Log Analytics workspace name :type workspace_name: str :param table: Table from which to purge data. :type table: str :param filters: The set of columns and filters (queries) to run over them to purge the resulting data. :type filters: list[~azure.mgmt.loganalytics.models.WorkspacePurgeBodyFilters] :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns object or ClientRawResponse<object> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[object] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[object]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._purge_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, table=table, filters=filters, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('object', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/workspaces_operations.py
Azure/azure-sdk-for-python
_error_handler
def _error_handler(error): if error.condition == b'com.microsoft:server-busy': return errors.ErrorAction(retry=True, backoff=4) if error.condition == b'com.microsoft:timeout': return errors.ErrorAction(retry=True, backoff=2) if error.condition == b'com.microsoft:operation-cancelled': return errors.ErrorAction(retry=True) if error.condition == b"com.microsoft:container-close": return errors.ErrorAction(retry=True, backoff=4) if error.condition in _NO_RETRY_ERRORS: return errors.ErrorAction(retry=False) return errors.ErrorAction(retry=True)
Handle connection and service errors. Called internally when an event has failed to send so we can parse the error to determine whether we should attempt to retry sending the event again.
def _error_handler(error): """Handle connection and service errors. Called internally when an event has failed to send so we can parse the error to determine whether we should attempt to retry sending the event again. Returns the action to take according to error type. :param error: The error received in the send attempt. :type error: Exception :rtype: ~uamqp.errors.ErrorAction """ if error.condition == b'com.microsoft:server-busy': return errors.ErrorAction(retry=True, backoff=4) if error.condition == b'com.microsoft:timeout': return errors.ErrorAction(retry=True, backoff=2) if error.condition == b'com.microsoft:operation-cancelled': return errors.ErrorAction(retry=True) if error.condition == b"com.microsoft:container-close": return errors.ErrorAction(retry=True, backoff=4) if error.condition in _NO_RETRY_ERRORS: return errors.ErrorAction(retry=False) return errors.ErrorAction(retry=True)
azure-servicebus/azure/servicebus/common/errors.py
Azure/azure-sdk-for-python
ServiceBusService.create_queue
def create_queue(self, queue_name, queue=None, fail_on_exist=False): _validate_not_none('queue_name', queue_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(queue_name) + '' request.body = _get_request_body(_convert_queue_to_xml(queue)) request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
Creates a new queue. Once created, this queue's resource manifest is immutable.
def create_queue(self, queue_name, queue=None, fail_on_exist=False): ''' Creates a new queue. Once created, this queue's resource manifest is immutable. queue_name: Name of the queue to create. queue: Queue object to create. fail_on_exist: Specify whether to throw an exception when the queue exists. ''' _validate_not_none('queue_name', queue_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(queue_name) + '' request.body = _get_request_body(_convert_queue_to_xml(queue)) request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.delete_queue
def delete_queue(self, queue_name, fail_not_exist=False): _validate_not_none('queue_name', queue_name) request = HTTPRequest() request.method = 'DELETE' request.host = self._get_host() request.path = '/' + _str(queue_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) if not fail_not_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_not_exist(ex) return False else: self._perform_request(request) return True
Deletes an existing queue. This operation will also remove all associated state including messages in the queue.
def delete_queue(self, queue_name, fail_not_exist=False): ''' Deletes an existing queue. This operation will also remove all associated state including messages in the queue. queue_name: Name of the queue to delete. fail_not_exist: Specify whether to throw an exception if the queue doesn't exist. ''' _validate_not_none('queue_name', queue_name) request = HTTPRequest() request.method = 'DELETE' request.host = self._get_host() request.path = '/' + _str(queue_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) if not fail_not_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_not_exist(ex) return False else: self._perform_request(request) return True
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.get_queue
def get_queue(self, queue_name): _validate_not_none('queue_name', queue_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + _str(queue_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_queue(response)
Retrieves an existing queue.
def get_queue(self, queue_name): ''' Retrieves an existing queue. queue_name: Name of the queue. ''' _validate_not_none('queue_name', queue_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + _str(queue_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_queue(response)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.create_topic
def create_topic(self, topic_name, topic=None, fail_on_exist=False): _validate_not_none('topic_name', topic_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(topic_name) + '' request.body = _get_request_body(_convert_topic_to_xml(topic)) request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
Creates a new topic. Once created, this topic resource manifest is immutable.
def create_topic(self, topic_name, topic=None, fail_on_exist=False): ''' Creates a new topic. Once created, this topic resource manifest is immutable. topic_name: Name of the topic to create. topic: Topic object to create. fail_on_exist: Specify whether to throw an exception when the topic exists. ''' _validate_not_none('topic_name', topic_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(topic_name) + '' request.body = _get_request_body(_convert_topic_to_xml(topic)) request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.get_topic
def get_topic(self, topic_name): _validate_not_none('topic_name', topic_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + _str(topic_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_topic(response)
Retrieves the description for the specified topic.
def get_topic(self, topic_name): ''' Retrieves the description for the specified topic. topic_name: Name of the topic. ''' _validate_not_none('topic_name', topic_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + _str(topic_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_topic(response)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.create_rule
def create_rule(self, topic_name, subscription_name, rule_name, rule=None, fail_on_exist=False): _validate_not_none('topic_name', topic_name) _validate_not_none('subscription_name', subscription_name) _validate_not_none('rule_name', rule_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(topic_name) + '/subscriptions/' + \ _str(subscription_name) + \ '/rules/' + _str(rule_name) + '' request.body = _get_request_body(_convert_rule_to_xml(rule)) request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
Creates a new rule. Once created, this rule's resource manifest is immutable.
def create_rule(self, topic_name, subscription_name, rule_name, rule=None, fail_on_exist=False): ''' Creates a new rule. Once created, this rule's resource manifest is immutable. topic_name: Name of the topic. subscription_name: Name of the subscription. rule_name: Name of the rule. fail_on_exist: Specify whether to throw an exception when the rule exists. ''' _validate_not_none('topic_name', topic_name) _validate_not_none('subscription_name', subscription_name) _validate_not_none('rule_name', rule_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(topic_name) + '/subscriptions/' + \ _str(subscription_name) + \ '/rules/' + _str(rule_name) + '' request.body = _get_request_body(_convert_rule_to_xml(rule)) request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.get_rule
def get_rule(self, topic_name, subscription_name, rule_name): _validate_not_none('topic_name', topic_name) _validate_not_none('subscription_name', subscription_name) _validate_not_none('rule_name', rule_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + _str(topic_name) + '/subscriptions/' + \ _str(subscription_name) + \ '/rules/' + _str(rule_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_rule(response)
Retrieves the description for the specified rule.
def get_rule(self, topic_name, subscription_name, rule_name): ''' Retrieves the description for the specified rule. topic_name: Name of the topic. subscription_name: Name of the subscription. rule_name: Name of the rule. ''' _validate_not_none('topic_name', topic_name) _validate_not_none('subscription_name', subscription_name) _validate_not_none('rule_name', rule_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + _str(topic_name) + '/subscriptions/' + \ _str(subscription_name) + \ '/rules/' + _str(rule_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_rule(response)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.list_rules
def list_rules(self, topic_name, subscription_name): _validate_not_none('topic_name', topic_name) _validate_not_none('subscription_name', subscription_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + \ _str(topic_name) + '/subscriptions/' + \ _str(subscription_name) + '/rules/' request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _ETreeXmlToObject.convert_response_to_feeds( response, _convert_etree_element_to_rule)
Retrieves the rules that exist under the specified subscription.
def list_rules(self, topic_name, subscription_name): ''' Retrieves the rules that exist under the specified subscription. topic_name: Name of the topic. subscription_name: Name of the subscription. ''' _validate_not_none('topic_name', topic_name) _validate_not_none('subscription_name', subscription_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + \ _str(topic_name) + '/subscriptions/' + \ _str(subscription_name) + '/rules/' request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _ETreeXmlToObject.convert_response_to_feeds( response, _convert_etree_element_to_rule)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.create_subscription
def create_subscription(self, topic_name, subscription_name, subscription=None, fail_on_exist=False): _validate_not_none('topic_name', topic_name) _validate_not_none('subscription_name', subscription_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + \ _str(topic_name) + '/subscriptions/' + _str(subscription_name) + '' request.body = _get_request_body( _convert_subscription_to_xml(subscription)) request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
Creates a new subscription. Once created, this subscription resource manifest is immutable.
def create_subscription(self, topic_name, subscription_name, subscription=None, fail_on_exist=False): ''' Creates a new subscription. Once created, this subscription resource manifest is immutable. topic_name: Name of the topic. subscription_name: Name of the subscription. fail_on_exist: Specify whether throw exception when subscription exists. ''' _validate_not_none('topic_name', topic_name) _validate_not_none('subscription_name', subscription_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + \ _str(topic_name) + '/subscriptions/' + _str(subscription_name) + '' request.body = _get_request_body( _convert_subscription_to_xml(subscription)) request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.get_subscription
def get_subscription(self, topic_name, subscription_name): _validate_not_none('topic_name', topic_name) _validate_not_none('subscription_name', subscription_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + \ _str(topic_name) + '/subscriptions/' + _str(subscription_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_subscription(response)
Gets an existing subscription.
def get_subscription(self, topic_name, subscription_name): ''' Gets an existing subscription. topic_name: Name of the topic. subscription_name: Name of the subscription. ''' _validate_not_none('topic_name', topic_name) _validate_not_none('subscription_name', subscription_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + \ _str(topic_name) + '/subscriptions/' + _str(subscription_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_subscription(response)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.list_subscriptions
def list_subscriptions(self, topic_name): _validate_not_none('topic_name', topic_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + _str(topic_name) + '/subscriptions/' request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _ETreeXmlToObject.convert_response_to_feeds( response, _convert_etree_element_to_subscription)
Retrieves the subscriptions in the specified topic.
def list_subscriptions(self, topic_name): ''' Retrieves the subscriptions in the specified topic. topic_name: Name of the topic. ''' _validate_not_none('topic_name', topic_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + _str(topic_name) + '/subscriptions/' request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _ETreeXmlToObject.convert_response_to_feeds( response, _convert_etree_element_to_subscription)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.receive_queue_message
def receive_queue_message(self, queue_name, peek_lock=True, timeout=60): if peek_lock: return self.peek_lock_queue_message(queue_name, timeout) return self.read_delete_queue_message(queue_name, timeout)
Receive a message from a queue for processing.
def receive_queue_message(self, queue_name, peek_lock=True, timeout=60): ''' Receive a message from a queue for processing. queue_name: Name of the queue. peek_lock: Optional. True to retrieve and lock the message. False to read and delete the message. Default is True (lock). timeout: Optional. The timeout parameter is expressed in seconds. ''' if peek_lock: return self.peek_lock_queue_message(queue_name, timeout) return self.read_delete_queue_message(queue_name, timeout)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.receive_subscription_message
def receive_subscription_message(self, topic_name, subscription_name, peek_lock=True, timeout=60): if peek_lock: return self.peek_lock_subscription_message(topic_name, subscription_name, timeout) return self.read_delete_subscription_message(topic_name, subscription_name, timeout)
Receive a message from a subscription for processing.
def receive_subscription_message(self, topic_name, subscription_name, peek_lock=True, timeout=60): ''' Receive a message from a subscription for processing. topic_name: Name of the topic. subscription_name: Name of the subscription. peek_lock: Optional. True to retrieve and lock the message. False to read and delete the message. Default is True (lock). timeout: Optional. The timeout parameter is expressed in seconds. ''' if peek_lock: return self.peek_lock_subscription_message(topic_name, subscription_name, timeout) return self.read_delete_subscription_message(topic_name, subscription_name, timeout)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.create_event_hub
def create_event_hub(self, hub_name, hub=None, fail_on_exist=False): _validate_not_none('hub_name', hub_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(hub_name) + '?api-version=2014-01' request.body = _get_request_body(_convert_event_hub_to_xml(hub)) request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
Creates a new Event Hub.
def create_event_hub(self, hub_name, hub=None, fail_on_exist=False): ''' Creates a new Event Hub. hub_name: Name of event hub. hub: Optional. Event hub properties. Instance of EventHub class. hub.message_retention_in_days: Number of days to retain the events for this Event Hub. hub.status: Status of the Event Hub (enabled or disabled). hub.user_metadata: User metadata. hub.partition_count: Number of shards on the Event Hub. fail_on_exist: Specify whether to throw an exception when the event hub exists. ''' _validate_not_none('hub_name', hub_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(hub_name) + '?api-version=2014-01' request.body = _get_request_body(_convert_event_hub_to_xml(hub)) request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.update_event_hub
def update_event_hub(self, hub_name, hub=None): _validate_not_none('hub_name', hub_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(hub_name) + '?api-version=2014-01' request.body = _get_request_body(_convert_event_hub_to_xml(hub)) request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers.append(('If-Match', '*')) request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_event_hub(response)
Updates an Event Hub.
def update_event_hub(self, hub_name, hub=None): ''' Updates an Event Hub. hub_name: Name of event hub. hub: Optional. Event hub properties. Instance of EventHub class. hub.message_retention_in_days: Number of days to retain the events for this Event Hub. ''' _validate_not_none('hub_name', hub_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(hub_name) + '?api-version=2014-01' request.body = _get_request_body(_convert_event_hub_to_xml(hub)) request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers.append(('If-Match', '*')) request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_event_hub(response)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.get_event_hub
def get_event_hub(self, hub_name): _validate_not_none('hub_name', hub_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + _str(hub_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_event_hub(response)
Retrieves an existing event hub.
def get_event_hub(self, hub_name): ''' Retrieves an existing event hub. hub_name: Name of the event hub. ''' _validate_not_none('hub_name', hub_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = '/' + _str(hub_name) + '' request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_event_hub(response)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService.send_event
def send_event(self, hub_name, message, device_id=None, broker_properties=None): _validate_not_none('hub_name', hub_name) request = HTTPRequest() request.method = 'POST' request.host = self._get_host() if device_id: request.path = '/{0}/publishers/{1}/messages?api-version=2014-01'.format(hub_name, device_id) else: request.path = '/{0}/messages?api-version=2014-01'.format(hub_name) if broker_properties: request.headers.append( ('BrokerProperties', str(broker_properties))) request.body = _get_request_body(message) request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers = self._update_service_bus_header(request) self._perform_request(request)
Sends a new message event to an Event Hub.
def send_event(self, hub_name, message, device_id=None, broker_properties=None): ''' Sends a new message event to an Event Hub. ''' _validate_not_none('hub_name', hub_name) request = HTTPRequest() request.method = 'POST' request.host = self._get_host() if device_id: request.path = '/{0}/publishers/{1}/messages?api-version=2014-01'.format(hub_name, device_id) else: request.path = '/{0}/messages?api-version=2014-01'.format(hub_name) if broker_properties: request.headers.append( ('BrokerProperties', str(broker_properties))) request.body = _get_request_body(message) request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) self._perform_request(request)
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusService._update_service_bus_header
def _update_service_bus_header(self, request): if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']: request.headers.append(('Content-Length', str(len(request.body)))) if not request.method in ['GET', 'HEAD']: for name, _ in request.headers: if name.lower() == 'content-type': break else: request.headers.append( ('Content-Type', 'application/atom+xml;type=entry;charset=utf-8')) self.authentication.sign_request(request, self._httpclient) return request.headers
Add additional headers for Service Bus.
def _update_service_bus_header(self, request): ''' Add additional headers for Service Bus. ''' if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']: request.headers.append(('Content-Length', str(len(request.body)))) # if it is not GET or HEAD request, must set content-type. if not request.method in ['GET', 'HEAD']: for name, _ in request.headers: if name.lower() == 'content-type': break else: request.headers.append( ('Content-Type', 'application/atom+xml;type=entry;charset=utf-8')) # Adds authorization header for authentication. self.authentication.sign_request(request, self._httpclient) return request.headers
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusWrapTokenAuthentication._get_authorization
def _get_authorization(self, request, httpclient): return 'WRAP access_token="' + \ self._get_token(request.host, request.path, httpclient) + '"'
return the signed string with token.
def _get_authorization(self, request, httpclient): ''' return the signed string with token. ''' return 'WRAP access_token="' + \ self._get_token(request.host, request.path, httpclient) + '"'
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ServiceBusWrapTokenAuthentication._token_is_expired
def _token_is_expired(self, token): time_pos_begin = token.find('ExpiresOn=') + len('ExpiresOn=') time_pos_end = token.find('&', time_pos_begin) token_expire_time = int(token[time_pos_begin:time_pos_end]) time_now = time.mktime(time.localtime()) return (token_expire_time - time_now) < 30
Check if token expires or not.
def _token_is_expired(self, token): # pylint: disable=no-self-use ''' Check if token expires or not. ''' time_pos_begin = token.find('ExpiresOn=') + len('ExpiresOn=') time_pos_end = token.find('&', time_pos_begin) token_expire_time = int(token[time_pos_begin:time_pos_end]) time_now = time.mktime(time.localtime()) # Adding 30 seconds so the token wouldn't be expired when we send the # token to server. return (token_expire_time - time_now) < 30
azure-servicebus/azure/servicebus/control_client/servicebusservice.py
Azure/azure-sdk-for-python
ManagedClustersOperations.reset_service_principal_profile
def reset_service_principal_profile( self, resource_group_name, resource_name, client_id, secret=None, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._reset_service_principal_profile_initial( resource_group_name=resource_group_name, resource_name=resource_name, client_id=client_id, secret=secret, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Reset Service Principal Profile of a managed cluster. Update the service principal Profile for a managed cluster.
def reset_service_principal_profile( self, resource_group_name, resource_name, client_id, secret=None, custom_headers=None, raw=False, polling=True, **operation_config): """Reset Service Principal Profile of a managed cluster. Update the service principal Profile for a managed cluster. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param resource_name: The name of the managed cluster resource. :type resource_name: str :param client_id: The ID for the service principal. :type client_id: str :param secret: The secret password associated with the service principal in plain text. :type secret: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns None or ClientRawResponse<None> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._reset_service_principal_profile_initial( resource_group_name=resource_group_name, resource_name=resource_name, client_id=client_id, secret=secret, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-containerservice/azure/mgmt/containerservice/v2018_08_01_preview/operations/managed_clusters_operations.py
Azure/azure-sdk-for-python
Message.delete
def delete(self): if self._queue_name: self.service_bus_service.delete_queue_message( self._queue_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) elif self._topic_name and self._subscription_name: self.service_bus_service.delete_subscription_message( self._topic_name, self._subscription_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) else: raise AzureServiceBusPeekLockError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE)
Deletes itself if find queue name or topic name and subscription name.
def delete(self): ''' Deletes itself if find queue name or topic name and subscription name. ''' if self._queue_name: self.service_bus_service.delete_queue_message( self._queue_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) elif self._topic_name and self._subscription_name: self.service_bus_service.delete_subscription_message( self._topic_name, self._subscription_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) else: raise AzureServiceBusPeekLockError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE)
azure-servicebus/azure/servicebus/control_client/models.py
Azure/azure-sdk-for-python
Message.unlock
def unlock(self): if self._queue_name: self.service_bus_service.unlock_queue_message( self._queue_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) elif self._topic_name and self._subscription_name: self.service_bus_service.unlock_subscription_message( self._topic_name, self._subscription_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) else: raise AzureServiceBusPeekLockError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK)
Unlocks itself if find queue name or topic name and subscription name.
def unlock(self): ''' Unlocks itself if find queue name or topic name and subscription name. ''' if self._queue_name: self.service_bus_service.unlock_queue_message( self._queue_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) elif self._topic_name and self._subscription_name: self.service_bus_service.unlock_subscription_message( self._topic_name, self._subscription_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) else: raise AzureServiceBusPeekLockError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK)
azure-servicebus/azure/servicebus/control_client/models.py
Azure/azure-sdk-for-python
Message.renew_lock
def renew_lock(self): if self._queue_name: self.service_bus_service.renew_lock_queue_message( self._queue_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) elif self._topic_name and self._subscription_name: self.service_bus_service.renew_lock_subscription_message( self._topic_name, self._subscription_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) else: raise AzureServiceBusPeekLockError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_RENEW_LOCK)
Renew lock on itself if find queue name or topic name and subscription name.
def renew_lock(self): ''' Renew lock on itself if find queue name or topic name and subscription name. ''' if self._queue_name: self.service_bus_service.renew_lock_queue_message( self._queue_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) elif self._topic_name and self._subscription_name: self.service_bus_service.renew_lock_subscription_message( self._topic_name, self._subscription_name, self.broker_properties['SequenceNumber'], self.broker_properties['LockToken']) else: raise AzureServiceBusPeekLockError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_RENEW_LOCK)
azure-servicebus/azure/servicebus/control_client/models.py
Azure/azure-sdk-for-python
Message.add_headers
def add_headers(self, request): if self.custom_properties: for name, value in self.custom_properties.items(): request.headers.append((name, self._serialize_escaped_properties_value(value))) request.headers.append(('Content-Type', self.type)) if self.broker_properties: if hasattr(self.broker_properties, 'items'): broker_properties = {name: self._serialize_basic_properties_value(value) for name, value in self.broker_properties.items()} broker_properties = json.dumps(broker_properties) else: broker_properties = self.broker_properties request.headers.append( ('BrokerProperties', str(broker_properties))) return request.headers
add addtional headers to request for message request.
def add_headers(self, request): ''' add addtional headers to request for message request.''' # Adds custom properties if self.custom_properties: for name, value in self.custom_properties.items(): request.headers.append((name, self._serialize_escaped_properties_value(value))) # Adds content-type request.headers.append(('Content-Type', self.type)) # Adds BrokerProperties if self.broker_properties: if hasattr(self.broker_properties, 'items'): broker_properties = {name: self._serialize_basic_properties_value(value) for name, value in self.broker_properties.items()} broker_properties = json.dumps(broker_properties) else: broker_properties = self.broker_properties request.headers.append( ('BrokerProperties', str(broker_properties))) return request.headers
azure-servicebus/azure/servicebus/control_client/models.py
Azure/azure-sdk-for-python
Message.as_batch_body
def as_batch_body(self): if sys.version_info >= (3,) and isinstance(self.body, bytes): body = self.body.decode('utf-8') else: body = self.body result = {'Body': body} if self.custom_properties: result['UserProperties'] = {name: self._serialize_basic_properties_value(value) for name, value in self.custom_properties.items()} if self.broker_properties: result['BrokerProperties'] = {name: self._serialize_basic_properties_value(value) for name, value in self.broker_properties.items()} return result
return the current message as expected by batch body format
def as_batch_body(self): ''' return the current message as expected by batch body format''' if sys.version_info >= (3,) and isinstance(self.body, bytes): # It HAS to be string to be serialized in JSON body = self.body.decode('utf-8') else: # Python 2.7 people handle this themself body = self.body result = {'Body': body} # Adds custom properties if self.custom_properties: result['UserProperties'] = {name: self._serialize_basic_properties_value(value) for name, value in self.custom_properties.items()} # Adds BrokerProperties if self.broker_properties: result['BrokerProperties'] = {name: self._serialize_basic_properties_value(value) for name, value in self.broker_properties.items()} return result
azure-servicebus/azure/servicebus/control_client/models.py
Azure/azure-sdk-for-python
ServiceFabricClientAPIs.get_repair_task_list
def get_repair_task_list( self, task_id_filter=None, state_filter=None, executor_filter=None, custom_headers=None, raw=False, **operation_config): api_version = "6.0" url = self.get_repair_task_list.metadata['url'] query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if task_id_filter is not None: query_parameters['TaskIdFilter'] = self._serialize.query("task_id_filter", task_id_filter, 'str') if state_filter is not None: query_parameters['StateFilter'] = self._serialize.query("state_filter", state_filter, 'int') if executor_filter is not None: query_parameters['ExecutorFilter'] = self._serialize.query("executor_filter", executor_filter, 'str') header_parameters = {} header_parameters['Accept'] = 'application/json' if custom_headers: header_parameters.update(custom_headers) request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.FabricErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('[RepairTask]', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
Gets a list of repair tasks matching the given filters. This API supports the Service Fabric platform; it is not meant to be used directly from your code.
def get_repair_task_list( self, task_id_filter=None, state_filter=None, executor_filter=None, custom_headers=None, raw=False, **operation_config): """Gets a list of repair tasks matching the given filters. This API supports the Service Fabric platform; it is not meant to be used directly from your code. :param task_id_filter: The repair task ID prefix to be matched. :type task_id_filter: str :param state_filter: A bitwise-OR of the following values, specifying which task states should be included in the result list. - 1 - Created - 2 - Claimed - 4 - Preparing - 8 - Approved - 16 - Executing - 32 - Restoring - 64 - Completed :type state_filter: int :param executor_filter: The name of the repair executor whose claimed tasks should be included in the list. :type executor_filter: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: list or ClientRawResponse if raw=true :rtype: list[~azure.servicefabric.models.RepairTask] or ~msrest.pipeline.ClientRawResponse :raises: :class:`FabricErrorException<azure.servicefabric.models.FabricErrorException>` """ api_version = "6.0" # Construct URL url = self.get_repair_task_list.metadata['url'] # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if task_id_filter is not None: query_parameters['TaskIdFilter'] = self._serialize.query("task_id_filter", task_id_filter, 'str') if state_filter is not None: query_parameters['StateFilter'] = self._serialize.query("state_filter", state_filter, 'int') if executor_filter is not None: query_parameters['ExecutorFilter'] = self._serialize.query("executor_filter", executor_filter, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.FabricErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('[RepairTask]', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
azure-servicefabric/azure/servicefabric/service_fabric_client_ap_is.py
Azure/azure-sdk-for-python
ServiceFabricClientAPIs.submit_property_batch
def submit_property_batch( self, name_id, timeout=60, operations=None, custom_headers=None, raw=False, **operation_config): property_batch_description_list = models.PropertyBatchDescriptionList(operations=operations) api_version = "6.0" url = self.submit_property_batch.metadata['url'] path_format_arguments = { 'nameId': self._serialize.url("name_id", name_id, 'str', skip_quote=True) } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if timeout is not None: query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'long', maximum=4294967295, minimum=1) header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) body_content = self._serialize.body(property_batch_description_list, 'PropertyBatchDescriptionList') request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 409]: raise models.FabricErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('SuccessfulPropertyBatchInfo', response) if response.status_code == 409: deserialized = self._deserialize('FailedPropertyBatchInfo', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
Submits a property batch. Submits a batch of property operations. Either all or none of the operations will be committed.
def submit_property_batch( self, name_id, timeout=60, operations=None, custom_headers=None, raw=False, **operation_config): """Submits a property batch. Submits a batch of property operations. Either all or none of the operations will be committed. :param name_id: The Service Fabric name, without the 'fabric:' URI scheme. :type name_id: str :param timeout: The server timeout for performing the operation in seconds. This timeout specifies the time duration that the client is willing to wait for the requested operation to complete. The default value for this parameter is 60 seconds. :type timeout: long :param operations: A list of the property batch operations to be executed. :type operations: list[~azure.servicefabric.models.PropertyBatchOperation] :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: PropertyBatchInfo or ClientRawResponse if raw=true :rtype: ~azure.servicefabric.models.PropertyBatchInfo or ~msrest.pipeline.ClientRawResponse :raises: :class:`FabricErrorException<azure.servicefabric.models.FabricErrorException>` """ property_batch_description_list = models.PropertyBatchDescriptionList(operations=operations) api_version = "6.0" # Construct URL url = self.submit_property_batch.metadata['url'] path_format_arguments = { 'nameId': self._serialize.url("name_id", name_id, 'str', skip_quote=True) } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if timeout is not None: query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'long', maximum=4294967295, minimum=1) # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct body body_content = self._serialize.body(property_batch_description_list, 'PropertyBatchDescriptionList') # Construct and send request request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 409]: raise models.FabricErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('SuccessfulPropertyBatchInfo', response) if response.status_code == 409: deserialized = self._deserialize('FailedPropertyBatchInfo', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
azure-servicefabric/azure/servicefabric/service_fabric_client_ap_is.py
Azure/azure-sdk-for-python
_general_error_handler
def _general_error_handler(http_error): message = str(http_error) if http_error.respbody is not None: message += '\n' + http_error.respbody.decode('utf-8-sig') raise AzureHttpError(message, http_error.status)
Simple error handler for azure.
def _general_error_handler(http_error): ''' Simple error handler for azure.''' message = str(http_error) if http_error.respbody is not None: message += '\n' + http_error.respbody.decode('utf-8-sig') raise AzureHttpError(message, http_error.status)
azure-servicemanagement-legacy/azure/servicemanagement/_common_error.py
Azure/azure-sdk-for-python
WebAppsOperations.start_web_site_network_trace_operation
def start_web_site_network_trace_operation( self, resource_group_name, name, duration_in_seconds=None, max_frame_length=None, sas_url=None, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._start_web_site_network_trace_operation_initial( resource_group_name=resource_group_name, name=name, duration_in_seconds=duration_in_seconds, max_frame_length=max_frame_length, sas_url=sas_url, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('[NetworkTrace]', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Start capturing network packets for the site. Start capturing network packets for the site.
def start_web_site_network_trace_operation( self, resource_group_name, name, duration_in_seconds=None, max_frame_length=None, sas_url=None, custom_headers=None, raw=False, polling=True, **operation_config): """Start capturing network packets for the site. Start capturing network packets for the site. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str :param name: The name of the web app. :type name: str :param duration_in_seconds: The duration to keep capturing in seconds. :type duration_in_seconds: int :param max_frame_length: The maximum frame length in bytes (Optional). :type max_frame_length: int :param sas_url: The Blob URL to store capture file. :type sas_url: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns list or ClientRawResponse<list> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[list[~azure.mgmt.web.models.NetworkTrace]] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[list[~azure.mgmt.web.models.NetworkTrace]]] :raises: :class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>` """ raw_result = self._start_web_site_network_trace_operation_initial( resource_group_name=resource_group_name, name=name, duration_in_seconds=duration_in_seconds, max_frame_length=max_frame_length, sas_url=sas_url, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('[NetworkTrace]', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-web/azure/mgmt/web/operations/web_apps_operations.py
Azure/azure-sdk-for-python
WebAppsOperations.list_slot_differences_slot
def list_slot_differences_slot( self, resource_group_name, name, slot, target_slot, preserve_vnet, custom_headers=None, raw=False, **operation_config): slot_swap_entity = models.CsmSlotEntity(target_slot=target_slot, preserve_vnet=preserve_vnet) def internal_paging(next_link=None, raw=False): if not next_link: url = self.list_slot_differences_slot.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'), 'name': self._serialize.url("name", name, 'str'), 'slot': self._serialize.url("slot", slot, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') else: url = next_link query_parameters = {} header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') body_content = self._serialize.body(slot_swap_entity, 'CsmSlotEntity') request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.DefaultErrorResponseException(self._deserialize, response) return response deserialized = models.SlotDifferencePaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.SlotDifferencePaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized
Get the difference in configuration settings between two web app slots. Get the difference in configuration settings between two web app slots.
def list_slot_differences_slot( self, resource_group_name, name, slot, target_slot, preserve_vnet, custom_headers=None, raw=False, **operation_config): """Get the difference in configuration settings between two web app slots. Get the difference in configuration settings between two web app slots. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str :param name: Name of the app. :type name: str :param slot: Name of the source slot. If a slot is not specified, the production slot is used as the source slot. :type slot: str :param target_slot: Destination deployment slot during swap operation. :type target_slot: str :param preserve_vnet: <code>true</code> to preserve Virtual Network to the slot during swap; otherwise, <code>false</code>. :type preserve_vnet: bool :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of SlotDifference :rtype: ~azure.mgmt.web.models.SlotDifferencePaged[~azure.mgmt.web.models.SlotDifference] :raises: :class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>` """ slot_swap_entity = models.CsmSlotEntity(target_slot=target_slot, preserve_vnet=preserve_vnet) def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = self.list_slot_differences_slot.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'), 'name': self._serialize.url("name", name, 'str'), 'slot': self._serialize.url("slot", slot, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(slot_swap_entity, 'CsmSlotEntity') # Construct and send request request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.DefaultErrorResponseException(self._deserialize, response) return response # Deserialize response deserialized = models.SlotDifferencePaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.SlotDifferencePaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized
azure-mgmt-web/azure/mgmt/web/operations/web_apps_operations.py
Azure/azure-sdk-for-python
WebAppsOperations.swap_slot_slot
def swap_slot_slot( self, resource_group_name, name, slot, target_slot, preserve_vnet, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._swap_slot_slot_initial( resource_group_name=resource_group_name, name=name, slot=slot, target_slot=target_slot, preserve_vnet=preserve_vnet, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Swaps two deployment slots of an app. Swaps two deployment slots of an app.
def swap_slot_slot( self, resource_group_name, name, slot, target_slot, preserve_vnet, custom_headers=None, raw=False, polling=True, **operation_config): """Swaps two deployment slots of an app. Swaps two deployment slots of an app. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str :param name: Name of the app. :type name: str :param slot: Name of the source slot. If a slot is not specified, the production slot is used as the source slot. :type slot: str :param target_slot: Destination deployment slot during swap operation. :type target_slot: str :param preserve_vnet: <code>true</code> to preserve Virtual Network to the slot during swap; otherwise, <code>false</code>. :type preserve_vnet: bool :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns None or ClientRawResponse<None> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._swap_slot_slot_initial( resource_group_name=resource_group_name, name=name, slot=slot, target_slot=target_slot, preserve_vnet=preserve_vnet, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-web/azure/mgmt/web/operations/web_apps_operations.py
Azure/azure-sdk-for-python
EventsOperations.get_by_type
def get_by_type( self, app_id, event_type, timespan=None, filter=None, search=None, orderby=None, select=None, skip=None, top=None, format=None, count=None, apply=None, custom_headers=None, raw=False, **operation_config): url = self.get_by_type.metadata['url'] path_format_arguments = { 'appId': self._serialize.url("app_id", app_id, 'str'), 'eventType': self._serialize.url("event_type", event_type, 'str') } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} if timespan is not None: query_parameters['timespan'] = self._serialize.query("timespan", timespan, 'str') if filter is not None: query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') if search is not None: query_parameters['$search'] = self._serialize.query("search", search, 'str') if orderby is not None: query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') if select is not None: query_parameters['$select'] = self._serialize.query("select", select, 'str') if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') if top is not None: query_parameters['$top'] = self._serialize.query("top", top, 'int') if format is not None: query_parameters['$format'] = self._serialize.query("format", format, 'str') if count is not None: query_parameters['$count'] = self._serialize.query("count", count, 'bool') if apply is not None: query_parameters['$apply'] = self._serialize.query("apply", apply, 'str') header_parameters = {} header_parameters['Accept'] = 'application/json' if custom_headers: header_parameters.update(custom_headers) request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.ErrorResponseException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('EventsResults', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
Execute OData query. Executes an OData query for events.
def get_by_type( self, app_id, event_type, timespan=None, filter=None, search=None, orderby=None, select=None, skip=None, top=None, format=None, count=None, apply=None, custom_headers=None, raw=False, **operation_config): """Execute OData query. Executes an OData query for events. :param app_id: ID of the application. This is Application ID from the API Access settings blade in the Azure portal. :type app_id: str :param event_type: The type of events to query; either a standard event type (`traces`, `customEvents`, `pageViews`, `requests`, `dependencies`, `exceptions`, `availabilityResults`) or `$all` to query across all event types. Possible values include: '$all', 'traces', 'customEvents', 'pageViews', 'browserTimings', 'requests', 'dependencies', 'exceptions', 'availabilityResults', 'performanceCounters', 'customMetrics' :type event_type: str or ~azure.applicationinsights.models.EventType :param timespan: Optional. The timespan over which to retrieve events. This is an ISO8601 time period value. This timespan is applied in addition to any that are specified in the Odata expression. :type timespan: str :param filter: An expression used to filter the returned events :type filter: str :param search: A free-text search expression to match for whether a particular event should be returned :type search: str :param orderby: A comma-separated list of properties with \\"asc\\" (the default) or \\"desc\\" to control the order of returned events :type orderby: str :param select: Limits the properties to just those requested on each returned event :type select: str :param skip: The number of items to skip over before returning events :type skip: int :param top: The number of events to return :type top: int :param format: Format for the returned events :type format: str :param count: Request a count of matching items included with the returned events :type count: bool :param apply: An expression used for aggregation over returned events :type apply: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: EventsResults or ClientRawResponse if raw=true :rtype: ~azure.applicationinsights.models.EventsResults or ~msrest.pipeline.ClientRawResponse :raises: :class:`ErrorResponseException<azure.applicationinsights.models.ErrorResponseException>` """ # Construct URL url = self.get_by_type.metadata['url'] path_format_arguments = { 'appId': self._serialize.url("app_id", app_id, 'str'), 'eventType': self._serialize.url("event_type", event_type, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} if timespan is not None: query_parameters['timespan'] = self._serialize.query("timespan", timespan, 'str') if filter is not None: query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') if search is not None: query_parameters['$search'] = self._serialize.query("search", search, 'str') if orderby is not None: query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') if select is not None: query_parameters['$select'] = self._serialize.query("select", select, 'str') if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') if top is not None: query_parameters['$top'] = self._serialize.query("top", top, 'int') if format is not None: query_parameters['$format'] = self._serialize.query("format", format, 'str') if count is not None: query_parameters['$count'] = self._serialize.query("count", count, 'bool') if apply is not None: query_parameters['$apply'] = self._serialize.query("apply", apply, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.ErrorResponseException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('EventsResults', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
azure-applicationinsights/azure/applicationinsights/operations/events_operations.py
Azure/azure-sdk-for-python
LargeFaceListOperations.add_face_from_stream
def add_face_from_stream( self, large_face_list_id, image, user_data=None, target_face=None, custom_headers=None, raw=False, callback=None, **operation_config): url = self.add_face_from_stream.metadata['url'] path_format_arguments = { 'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True), 'largeFaceListId': self._serialize.url("large_face_list_id", large_face_list_id, 'str', max_length=64, pattern=r'^[a-z0-9-_]+$') } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} if user_data is not None: query_parameters['userData'] = self._serialize.query("user_data", user_data, 'str', max_length=1024) if target_face is not None: query_parameters['targetFace'] = self._serialize.query("target_face", target_face, '[int]', div=',') header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/octet-stream' if custom_headers: header_parameters.update(custom_headers) body_content = self._client.stream_upload(image, callback) request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.APIErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('PersistedFace', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
Add a face to a large face list. The input face is specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face, and persistedFaceId will not expire.
def add_face_from_stream( self, large_face_list_id, image, user_data=None, target_face=None, custom_headers=None, raw=False, callback=None, **operation_config): """Add a face to a large face list. The input face is specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face, and persistedFaceId will not expire. :param large_face_list_id: Id referencing a particular large face list. :type large_face_list_id: str :param image: An image stream. :type image: Generator :param user_data: User-specified data about the face for any purpose. The maximum length is 1KB. :type user_data: str :param target_face: A face rectangle to specify the target face to be added to a person in the format of "targetFace=left,top,width,height". E.g. "targetFace=10,10,100,100". If there is more than one face in the image, targetFace is required to specify which face to add. No targetFace means there is only one face detected in the entire image. :type target_face: list[int] :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param callback: When specified, will be called with each chunk of data that is streamed. The callback should take two arguments, the bytes of the current chunk of data and the response object. If the data is uploading, response will be None. :type callback: Callable[Bytes, response=None] :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: PersistedFace or ClientRawResponse if raw=true :rtype: ~azure.cognitiveservices.vision.face.models.PersistedFace or ~msrest.pipeline.ClientRawResponse :raises: :class:`APIErrorException<azure.cognitiveservices.vision.face.models.APIErrorException>` """ # Construct URL url = self.add_face_from_stream.metadata['url'] path_format_arguments = { 'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True), 'largeFaceListId': self._serialize.url("large_face_list_id", large_face_list_id, 'str', max_length=64, pattern=r'^[a-z0-9-_]+$') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} if user_data is not None: query_parameters['userData'] = self._serialize.query("user_data", user_data, 'str', max_length=1024) if target_face is not None: query_parameters['targetFace'] = self._serialize.query("target_face", target_face, '[int]', div=',') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/octet-stream' if custom_headers: header_parameters.update(custom_headers) # Construct body body_content = self._client.stream_upload(image, callback) # Construct and send request request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.APIErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('PersistedFace', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
azure-cognitiveservices-vision-face/azure/cognitiveservices/vision/face/operations/large_face_list_operations.py
Azure/azure-sdk-for-python
KeyVaultAuthBase._handle_redirect
def _handle_redirect(self, r, **kwargs): if r.is_redirect: self._thread_local.auth_attempted = False
Reset auth_attempted on redirects.
def _handle_redirect(self, r, **kwargs): """Reset auth_attempted on redirects.""" if r.is_redirect: self._thread_local.auth_attempted = False
azure-keyvault/azure/keyvault/key_vault_authentication.py
Azure/azure-sdk-for-python
MigrationConfigsOperations.create_and_start_migration
def create_and_start_migration( self, resource_group_name, namespace_name, target_namespace, post_migration_name, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._create_and_start_migration_initial( resource_group_name=resource_group_name, namespace_name=namespace_name, target_namespace=target_namespace, post_migration_name=post_migration_name, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('MigrationConfigProperties', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Creates Migration configuration and starts migration of entities from Standard to Premium namespace.
def create_and_start_migration( self, resource_group_name, namespace_name, target_namespace, post_migration_name, custom_headers=None, raw=False, polling=True, **operation_config): """Creates Migration configuration and starts migration of entities from Standard to Premium namespace. :param resource_group_name: Name of the Resource group within the Azure subscription. :type resource_group_name: str :param namespace_name: The namespace name :type namespace_name: str :param target_namespace: Existing premium Namespace ARM Id name which has no entities, will be used for migration :type target_namespace: str :param post_migration_name: Name to access Standard Namespace after migration :type post_migration_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns MigrationConfigProperties or ClientRawResponse<MigrationConfigProperties> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.servicebus.models.MigrationConfigProperties] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.servicebus.models.MigrationConfigProperties]] :raises: :class:`ErrorResponseException<azure.mgmt.servicebus.models.ErrorResponseException>` """ raw_result = self._create_and_start_migration_initial( resource_group_name=resource_group_name, namespace_name=namespace_name, target_namespace=target_namespace, post_migration_name=post_migration_name, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('MigrationConfigProperties', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-servicebus/azure/mgmt/servicebus/operations/migration_configs_operations.py
Azure/azure-sdk-for-python
EventGridClient.publish_events
def publish_events( self, topic_hostname, events, custom_headers=None, raw=False, **operation_config): url = self.publish_events.metadata['url'] path_format_arguments = { 'topicHostname': self._serialize.url("topic_hostname", topic_hostname, 'str', skip_quote=True) } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) body_content = self._serialize.body(events, '[EventGridEvent]') request = self._client.post(url, query_parameters) response = self._client.send( request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response
Publishes a batch of events to an Azure Event Grid topic.
def publish_events( self, topic_hostname, events, custom_headers=None, raw=False, **operation_config): """Publishes a batch of events to an Azure Event Grid topic. :param topic_hostname: The host name of the topic, e.g. topic1.westus2-1.eventgrid.azure.net :type topic_hostname: str :param events: An array of events to be published to Event Grid. :type events: list[~azure.eventgrid.models.EventGridEvent] :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: None or ClientRawResponse if raw=true :rtype: None or ~msrest.pipeline.ClientRawResponse :raises: :class:`HttpOperationError<msrest.exceptions.HttpOperationError>` """ # Construct URL url = self.publish_events.metadata['url'] path_format_arguments = { 'topicHostname': self._serialize.url("topic_hostname", topic_hostname, 'str', skip_quote=True) } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct body body_content = self._serialize.body(events, '[EventGridEvent]') # Construct and send request request = self._client.post(url, query_parameters) response = self._client.send( request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response
azure-eventgrid/azure/eventgrid/event_grid_client.py
Azure/azure-sdk-for-python
DefaultProfile.use
def use(self, profile): if not isinstance(profile, (KnownProfiles, ProfileDefinition)): raise ValueError("Can only set as default a ProfileDefinition or a KnownProfiles") type(self).profile = profile
Define a new default profile.
def use(self, profile): """Define a new default profile.""" if not isinstance(profile, (KnownProfiles, ProfileDefinition)): raise ValueError("Can only set as default a ProfileDefinition or a KnownProfiles") type(self).profile = profile
azure-common/azure/profiles/__init__.py
Azure/azure-sdk-for-python
PolicyTrackedResourcesOperations.list_query_results_for_management_group
def list_query_results_for_management_group( self, management_group_name, query_options=None, custom_headers=None, raw=False, **operation_config): top = None if query_options is not None: top = query_options.top filter = None if query_options is not None: filter = query_options.filter def internal_paging(next_link=None, raw=False): if not next_link: url = self.list_query_results_for_management_group.metadata['url'] path_format_arguments = { 'managementGroupsNamespace': self._serialize.url("self.management_groups_namespace", self.management_groups_namespace, 'str'), 'managementGroupName': self._serialize.url("management_group_name", management_group_name, 'str'), 'policyTrackedResourcesResource': self._serialize.url("self.policy_tracked_resources_resource", self.policy_tracked_resources_resource, 'str') } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') if top is not None: query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=0) if filter is not None: query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') else: url = next_link query_parameters = {} header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.QueryFailureException(self._deserialize, response) return response deserialized = models.PolicyTrackedResourcePaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.PolicyTrackedResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized
Queries policy tracked resources under the management group.
def list_query_results_for_management_group( self, management_group_name, query_options=None, custom_headers=None, raw=False, **operation_config): """Queries policy tracked resources under the management group. :param management_group_name: Management group name. :type management_group_name: str :param query_options: Additional parameters for the operation :type query_options: ~azure.mgmt.policyinsights.models.QueryOptions :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of PolicyTrackedResource :rtype: ~azure.mgmt.policyinsights.models.PolicyTrackedResourcePaged[~azure.mgmt.policyinsights.models.PolicyTrackedResource] :raises: :class:`QueryFailureException<azure.mgmt.policyinsights.models.QueryFailureException>` """ top = None if query_options is not None: top = query_options.top filter = None if query_options is not None: filter = query_options.filter def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = self.list_query_results_for_management_group.metadata['url'] path_format_arguments = { 'managementGroupsNamespace': self._serialize.url("self.management_groups_namespace", self.management_groups_namespace, 'str'), 'managementGroupName': self._serialize.url("management_group_name", management_group_name, 'str'), 'policyTrackedResourcesResource': self._serialize.url("self.policy_tracked_resources_resource", self.policy_tracked_resources_resource, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') if top is not None: query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=0) if filter is not None: query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.QueryFailureException(self._deserialize, response) return response # Deserialize response deserialized = models.PolicyTrackedResourcePaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.PolicyTrackedResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized
azure-mgmt-policyinsights/azure/mgmt/policyinsights/operations/policy_tracked_resources_operations.py
Azure/azure-sdk-for-python
ServiceBusMixin.create_queue
def create_queue( self, queue_name, lock_duration=30, max_size_in_megabytes=None, requires_duplicate_detection=False, requires_session=False, default_message_time_to_live=None, dead_lettering_on_message_expiration=False, duplicate_detection_history_time_window=None, max_delivery_count=None, enable_batched_operations=None): queue_properties = Queue( lock_duration="PT{}S".format(int(lock_duration)), max_size_in_megabytes=max_size_in_megabytes, requires_duplicate_detection=requires_duplicate_detection, requires_session=requires_session, default_message_time_to_live=default_message_time_to_live, dead_lettering_on_message_expiration=dead_lettering_on_message_expiration, duplicate_detection_history_time_window=duplicate_detection_history_time_window, max_delivery_count=max_delivery_count, enable_batched_operations=enable_batched_operations) try: return self.mgmt_client.create_queue(queue_name, queue=queue_properties, fail_on_exist=True) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e)
Create a queue entity.
def create_queue( self, queue_name, lock_duration=30, max_size_in_megabytes=None, requires_duplicate_detection=False, requires_session=False, default_message_time_to_live=None, dead_lettering_on_message_expiration=False, duplicate_detection_history_time_window=None, max_delivery_count=None, enable_batched_operations=None): """Create a queue entity. :param queue_name: The name of the new queue. :type queue_name: str :param lock_duration: The lock durection in seconds for each message in the queue. :type lock_duration: int :param max_size_in_megabytes: The max size to allow the queue to grow to. :type max_size_in_megabytes: int :param requires_duplicate_detection: Whether the queue will require every message with a specified time frame to have a unique ID. Non-unique messages will be discarded. Default value is False. :type requires_duplicate_detection: bool :param requires_session: Whether the queue will be sessionful, and therefore require all message to have a Session ID and be received by a sessionful receiver. Default value is False. :type requires_session: bool :param default_message_time_to_live: The length of time a message will remain in the queue before it is either discarded or moved to the dead letter queue. :type default_message_time_to_live: ~datetime.timedelta :param dead_lettering_on_message_expiration: Whether to move expired messages to the dead letter queue. Default value is False. :type dead_lettering_on_message_expiration: bool :param duplicate_detection_history_time_window: The period within which all incoming messages must have a unique message ID. :type duplicate_detection_history_time_window: ~datetime.timedelta :param max_delivery_count: The maximum number of times a message will attempt to be delivered before it is moved to the dead letter queue. :type max_delivery_count: int :param enable_batched_operations: :type: enable_batched_operations: bool :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namespace is not found. :raises: ~azure.common.AzureConflictHttpError if a queue of the same name already exists. """ queue_properties = Queue( lock_duration="PT{}S".format(int(lock_duration)), max_size_in_megabytes=max_size_in_megabytes, requires_duplicate_detection=requires_duplicate_detection, requires_session=requires_session, default_message_time_to_live=default_message_time_to_live, dead_lettering_on_message_expiration=dead_lettering_on_message_expiration, duplicate_detection_history_time_window=duplicate_detection_history_time_window, max_delivery_count=max_delivery_count, enable_batched_operations=enable_batched_operations) try: return self.mgmt_client.create_queue(queue_name, queue=queue_properties, fail_on_exist=True) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e)
azure-servicebus/azure/servicebus/common/mixins.py
Azure/azure-sdk-for-python
ServiceBusMixin.delete_queue
def delete_queue(self, queue_name, fail_not_exist=False): try: return self.mgmt_client.delete_queue(queue_name, fail_not_exist=fail_not_exist) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) except azure.common.AzureMissingResourceHttpError as e: raise ServiceBusResourceNotFound("Specificed queue '{}' does not exist.".format(queue_name), e)
Delete a queue entity.
def delete_queue(self, queue_name, fail_not_exist=False): """Delete a queue entity. :param queue_name: The name of the queue to delete. :type queue_name: str :param fail_not_exist: Whether to raise an exception if the named queue is not found. If set to True, a ServiceBusResourceNotFound will be raised. Default value is False. :type fail_not_exist: bool :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namesapce is not found. :raises: ~azure.servicebus.common.errors.ServiceBusResourceNotFound if the queue is not found and `fail_not_exist` is set to True. """ try: return self.mgmt_client.delete_queue(queue_name, fail_not_exist=fail_not_exist) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) except azure.common.AzureMissingResourceHttpError as e: raise ServiceBusResourceNotFound("Specificed queue '{}' does not exist.".format(queue_name), e)
azure-servicebus/azure/servicebus/common/mixins.py
Azure/azure-sdk-for-python
ServiceBusMixin.create_topic
def create_topic( self, topic_name, default_message_time_to_live=None, max_size_in_megabytes=None, requires_duplicate_detection=None, duplicate_detection_history_time_window=None, enable_batched_operations=None): topic_properties = Topic( max_size_in_megabytes=max_size_in_megabytes, requires_duplicate_detection=requires_duplicate_detection, default_message_time_to_live=default_message_time_to_live, duplicate_detection_history_time_window=duplicate_detection_history_time_window, enable_batched_operations=enable_batched_operations) try: return self.mgmt_client.create_topic(topic_name, topic=topic_properties, fail_on_exist=True) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e)
Create a topic entity.
def create_topic( self, topic_name, default_message_time_to_live=None, max_size_in_megabytes=None, requires_duplicate_detection=None, duplicate_detection_history_time_window=None, enable_batched_operations=None): """Create a topic entity. :param topic_name: The name of the new topic. :type topic_name: str :param max_size_in_megabytes: The max size to allow the topic to grow to. :type max_size_in_megabytes: int :param requires_duplicate_detection: Whether the topic will require every message with a specified time frame to have a unique ID. Non-unique messages will be discarded. Default value is False. :type requires_duplicate_detection: bool :param default_message_time_to_live: The length of time a message will remain in the topic before it is either discarded or moved to the dead letter queue. :type default_message_time_to_live: ~datetime.timedelta :param duplicate_detection_history_time_window: The period within which all incoming messages must have a unique message ID. :type duplicate_detection_history_time_window: ~datetime.timedelta :param enable_batched_operations: :type: enable_batched_operations: bool :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namespace is not found. :raises: ~azure.common.AzureConflictHttpError if a topic of the same name already exists. """ topic_properties = Topic( max_size_in_megabytes=max_size_in_megabytes, requires_duplicate_detection=requires_duplicate_detection, default_message_time_to_live=default_message_time_to_live, duplicate_detection_history_time_window=duplicate_detection_history_time_window, enable_batched_operations=enable_batched_operations) try: return self.mgmt_client.create_topic(topic_name, topic=topic_properties, fail_on_exist=True) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e)
azure-servicebus/azure/servicebus/common/mixins.py
Azure/azure-sdk-for-python
ServiceBusMixin.delete_topic
def delete_topic(self, topic_name, fail_not_exist=False): try: return self.mgmt_client.delete_topic(topic_name, fail_not_exist=fail_not_exist) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) except azure.common.AzureMissingResourceHttpError as e: raise ServiceBusResourceNotFound("Specificed queue does not exist.", e)
Delete a topic entity.
def delete_topic(self, topic_name, fail_not_exist=False): """Delete a topic entity. :param topic_name: The name of the topic to delete. :type topic_name: str :param fail_not_exist: Whether to raise an exception if the named topic is not found. If set to True, a ServiceBusResourceNotFound will be raised. Default value is False. :type fail_not_exist: bool :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namesapce is not found. :raises: ~azure.servicebus.common.errors.ServiceBusResourceNotFound if the topic is not found and `fail_not_exist` is set to True. """ try: return self.mgmt_client.delete_topic(topic_name, fail_not_exist=fail_not_exist) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) except azure.common.AzureMissingResourceHttpError as e: raise ServiceBusResourceNotFound("Specificed queue does not exist.", e)
azure-servicebus/azure/servicebus/common/mixins.py
Azure/azure-sdk-for-python
ServiceBusMixin.create_subscription
def create_subscription( self, topic_name, subscription_name, lock_duration=30, requires_session=None, default_message_time_to_live=None, dead_lettering_on_message_expiration=None, dead_lettering_on_filter_evaluation_exceptions=None, enable_batched_operations=None, max_delivery_count=None): sub_properties = Subscription( lock_duration="PT{}S".format(int(lock_duration)), requires_session=requires_session, default_message_time_to_live=default_message_time_to_live, dead_lettering_on_message_expiration=dead_lettering_on_message_expiration, dead_lettering_on_filter_evaluation_exceptions=dead_lettering_on_filter_evaluation_exceptions, max_delivery_count=max_delivery_count, enable_batched_operations=enable_batched_operations) try: return self.mgmt_client.create_subscription( topic_name, subscription_name, subscription=sub_properties, fail_on_exist=True) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e)
Create a subscription entity.
def create_subscription( self, topic_name, subscription_name, lock_duration=30, requires_session=None, default_message_time_to_live=None, dead_lettering_on_message_expiration=None, dead_lettering_on_filter_evaluation_exceptions=None, enable_batched_operations=None, max_delivery_count=None): """Create a subscription entity. :param topic_name: The name of the topic under which to create the subscription. :param subscription_name: The name of the new subscription. :type subscription_name: str :param lock_duration: The lock durection in seconds for each message in the subscription. :type lock_duration: int :param requires_session: Whether the subscription will be sessionful, and therefore require all message to have a Session ID and be received by a sessionful receiver. Default value is False. :type requires_session: bool :param default_message_time_to_live: The length of time a message will remain in the subscription before it is either discarded or moved to the dead letter queue. :type default_message_time_to_live: ~datetime.timedelta :param dead_lettering_on_message_expiration: Whether to move expired messages to the dead letter queue. Default value is False. :type dead_lettering_on_message_expiration: bool :param dead_lettering_on_filter_evaluation_exceptions: Whether to move messages that error on filtering into the dead letter queue. Default is False, and the messages will be discarded. :type dead_lettering_on_filter_evaluation_exceptions: bool :param max_delivery_count: The maximum number of times a message will attempt to be delivered before it is moved to the dead letter queue. :type max_delivery_count: int :param enable_batched_operations: :type: enable_batched_operations: bool :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namespace is not found. :raises: ~azure.common.AzureConflictHttpError if a queue of the same name already exists. """ sub_properties = Subscription( lock_duration="PT{}S".format(int(lock_duration)), requires_session=requires_session, default_message_time_to_live=default_message_time_to_live, dead_lettering_on_message_expiration=dead_lettering_on_message_expiration, dead_lettering_on_filter_evaluation_exceptions=dead_lettering_on_filter_evaluation_exceptions, max_delivery_count=max_delivery_count, enable_batched_operations=enable_batched_operations) try: return self.mgmt_client.create_subscription( topic_name, subscription_name, subscription=sub_properties, fail_on_exist=True) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e)
azure-servicebus/azure/servicebus/common/mixins.py
Azure/azure-sdk-for-python
BaseClient.from_connection_string
def from_connection_string(cls, conn_str, name=None, **kwargs): address, policy, key, entity = parse_conn_str(conn_str) entity = name or entity address = build_uri(address, entity) name = address.split('/')[-1] return cls(address, name, shared_access_key_name=policy, shared_access_key_value=key, **kwargs)
Create a Client from a Service Bus connection string.
def from_connection_string(cls, conn_str, name=None, **kwargs): """Create a Client from a Service Bus connection string. :param conn_str: The connection string. :type conn_str: str :param name: The name of the entity, if the 'EntityName' property is not included in the connection string. """ address, policy, key, entity = parse_conn_str(conn_str) entity = name or entity address = build_uri(address, entity) name = address.split('/')[-1] return cls(address, name, shared_access_key_name=policy, shared_access_key_value=key, **kwargs)
azure-servicebus/azure/servicebus/common/mixins.py
Azure/azure-sdk-for-python
BaseClient.get_properties
def get_properties(self): try: self.entity = self._get_entity() self.properties = dict(self.entity) if hasattr(self.entity, 'requires_session'): self.requires_session = self.entity.requires_session return self.properties except AzureServiceBusResourceNotFound: raise ServiceBusResourceNotFound("Specificed queue does not exist.") except azure.common.AzureHttpError: self.entity = None self.properties = {} self.requires_session = False except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace not found", e)
Perform an operation to update the properties of the entity.
def get_properties(self): """Perform an operation to update the properties of the entity. :returns: The properties of the entity as a dictionary. :rtype: dict[str, Any] :raises: ~azure.servicebus.common.errors.ServiceBusResourceNotFound if the entity does not exist. :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the endpoint cannot be reached. :raises: ~azure.common.AzureHTTPError if the credentials are invalid. """ try: self.entity = self._get_entity() self.properties = dict(self.entity) if hasattr(self.entity, 'requires_session'): self.requires_session = self.entity.requires_session return self.properties except AzureServiceBusResourceNotFound: raise ServiceBusResourceNotFound("Specificed queue does not exist.") except azure.common.AzureHttpError: self.entity = None self.properties = {} self.requires_session = False except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace not found", e)
azure-servicebus/azure/servicebus/common/mixins.py
Azure/azure-sdk-for-python
SessionMixin.expired
def expired(self): if self.locked_until and self.locked_until <= datetime.datetime.now(): return True return False
Whether the receivers lock on a particular session has expired.
def expired(self): """Whether the receivers lock on a particular session has expired. :rtype: bool """ if self.locked_until and self.locked_until <= datetime.datetime.now(): return True return False
azure-servicebus/azure/servicebus/common/mixins.py
Azure/azure-sdk-for-python
SessionOperations.create
def create( self, resource_group_name, node_name, session, user_name=None, password=None, retention_period=None, credential_data_format=None, encryption_certificate_thumbprint=None, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._create_initial( resource_group_name=resource_group_name, node_name=node_name, session=session, user_name=user_name, password=password, retention_period=retention_period, credential_data_format=credential_data_format, encryption_certificate_thumbprint=encryption_certificate_thumbprint, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('SessionResource', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Creates a session for a node.
def create( self, resource_group_name, node_name, session, user_name=None, password=None, retention_period=None, credential_data_format=None, encryption_certificate_thumbprint=None, custom_headers=None, raw=False, polling=True, **operation_config): """Creates a session for a node. :param resource_group_name: The resource group name uniquely identifies the resource group within the user subscriptionId. :type resource_group_name: str :param node_name: The node name (256 characters maximum). :type node_name: str :param session: The sessionId from the user. :type session: str :param user_name: Encrypted User name to be used to connect to node. :type user_name: str :param password: Encrypted Password associated with user name. :type password: str :param retention_period: Session retention period. Possible values include: 'Session', 'Persistent' :type retention_period: str or ~azure.mgmt.servermanager.models.RetentionPeriod :param credential_data_format: Credential data format. Possible values include: 'RsaEncrypted' :type credential_data_format: str or ~azure.mgmt.servermanager.models.CredentialDataFormat :param encryption_certificate_thumbprint: Encryption certificate thumbprint. :type encryption_certificate_thumbprint: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns SessionResource or ClientRawResponse<SessionResource> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.servermanager.models.SessionResource] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.servermanager.models.SessionResource]] :raises: :class:`ErrorException<azure.mgmt.servermanager.models.ErrorException>` """ raw_result = self._create_initial( resource_group_name=resource_group_name, node_name=node_name, session=session, user_name=user_name, password=password, retention_period=retention_period, credential_data_format=credential_data_format, encryption_certificate_thumbprint=encryption_certificate_thumbprint, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('SessionResource', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-servermanager/azure/mgmt/servermanager/operations/session_operations.py
Azure/azure-sdk-for-python
SubscriptionFactoryOperations.create_subscription
def create_subscription( self, billing_account_name, invoice_section_name, body, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._create_subscription_initial( billing_account_name=billing_account_name, invoice_section_name=invoice_section_name, body=body, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): header_dict = { 'Location': 'str', 'Retry-After': 'int', } deserialized = self._deserialize('SubscriptionCreationResult', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) client_raw_response.add_headers(header_dict) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Creates an Azure subscription.
def create_subscription( self, billing_account_name, invoice_section_name, body, custom_headers=None, raw=False, polling=True, **operation_config): """Creates an Azure subscription. :param billing_account_name: The name of the commerce root billing account. :type billing_account_name: str :param invoice_section_name: The name of the invoice section. :type invoice_section_name: str :param body: The subscription creation parameters. :type body: ~azure.mgmt.subscription.models.SubscriptionCreationParameters :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns SubscriptionCreationResult or ClientRawResponse<SubscriptionCreationResult> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.subscription.models.SubscriptionCreationResult] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.subscription.models.SubscriptionCreationResult]] :raises: :class:`ErrorResponseException<azure.mgmt.subscription.models.ErrorResponseException>` """ raw_result = self._create_subscription_initial( billing_account_name=billing_account_name, invoice_section_name=invoice_section_name, body=body, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): header_dict = { 'Location': 'str', 'Retry-After': 'int', } deserialized = self._deserialize('SubscriptionCreationResult', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) client_raw_response.add_headers(header_dict) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-subscription/azure/mgmt/subscription/operations/subscription_factory_operations.py
Azure/azure-sdk-for-python
LogAnalyticsOperations.export_request_rate_by_interval
def export_request_rate_by_interval( self, parameters, location, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._export_request_rate_by_interval_initial( parameters=parameters, location=location, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('LogAnalyticsOperationResult', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Export logs that show Api requests made by this subscription in the given time window to show throttling activities.
def export_request_rate_by_interval( self, parameters, location, custom_headers=None, raw=False, polling=True, **operation_config): """Export logs that show Api requests made by this subscription in the given time window to show throttling activities. :param parameters: Parameters supplied to the LogAnalytics getRequestRateByInterval Api. :type parameters: ~azure.mgmt.compute.v2018_04_01.models.RequestRateByIntervalInput :param location: The location upon which virtual-machine-sizes is queried. :type location: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns LogAnalyticsOperationResult or ClientRawResponse<LogAnalyticsOperationResult> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2018_04_01.models.LogAnalyticsOperationResult] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2018_04_01.models.LogAnalyticsOperationResult]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._export_request_rate_by_interval_initial( parameters=parameters, location=location, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('LogAnalyticsOperationResult', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-compute/azure/mgmt/compute/v2018_04_01/operations/log_analytics_operations.py
Azure/azure-sdk-for-python
_handle_output
def _handle_output(results_queue): results = [] while results_queue: queue_item = results_queue.pop() results.append(queue_item) return results
Scan output for exceptions If there is an output from an add task collection call add it to the results.
def _handle_output(results_queue): """Scan output for exceptions If there is an output from an add task collection call add it to the results. :param results_queue: Queue containing results of attempted add_collection's :type results_queue: collections.deque :return: list of TaskAddResults :rtype: list[~TaskAddResult] """ results = [] while results_queue: queue_item = results_queue.pop() results.append(queue_item) return results
azure-batch/azure/batch/custom/patch.py
Azure/azure-sdk-for-python
_TaskWorkflowManager._bulk_add_tasks
def _bulk_add_tasks(self, results_queue, chunk_tasks_to_add): try: add_collection_response = self._original_add_collection( self._client, self._job_id, chunk_tasks_to_add, self._task_add_collection_options, self._custom_headers, self._raw) except BatchErrorException as e: if e.error.code == "RequestBodyTooLarge": if len(chunk_tasks_to_add) == 1: failed_task = chunk_tasks_to_add.pop() self.errors.appendleft(e) _LOGGER.error("Failed to add task with ID %s due to the body" " exceeding the maximum request size", failed_task.id) else: midpoint = int(len(chunk_tasks_to_add) / 2) with self._max_tasks_lock: if midpoint < self._max_tasks_per_request: self._max_tasks_per_request = midpoint _LOGGER.info("Amount of tasks per request reduced from %s to %s due to the" " request body being too large", str(self._max_tasks_per_request), str(midpoint)) self.tasks_to_add.extendleft(chunk_tasks_to_add[midpoint:]) self._bulk_add_tasks(results_queue, chunk_tasks_to_add[:midpoint]) elif 500 <= e.response.status_code <= 599: self.tasks_to_add.extendleft(chunk_tasks_to_add) else: self.tasks_to_add.extendleft(chunk_tasks_to_add) self.errors.appendleft(e) except Exception as e: self.tasks_to_add.extendleft(chunk_tasks_to_add) self.errors.appendleft(e) else: try: add_collection_response = add_collection_response.output except AttributeError: pass for task_result in add_collection_response.value: if task_result.status == TaskAddStatus.server_error: with self._pending_queue_lock: for task in chunk_tasks_to_add: if task.id == task_result.task_id: self.tasks_to_add.appendleft(task) elif (task_result.status == TaskAddStatus.client_error and not task_result.error.code == "TaskExists"): self.failure_tasks.appendleft(task_result) else: results_queue.appendleft(task_result)
Adds a chunk of tasks to the job Retry chunk if body exceeds the maximum request size and retry tasks if failed due to server errors.
def _bulk_add_tasks(self, results_queue, chunk_tasks_to_add): """Adds a chunk of tasks to the job Retry chunk if body exceeds the maximum request size and retry tasks if failed due to server errors. :param results_queue: Queue to place the return value of the request :type results_queue: collections.deque :param chunk_tasks_to_add: Chunk of at most 100 tasks with retry details :type chunk_tasks_to_add: list[~TrackedCloudTask] """ try: add_collection_response = self._original_add_collection( self._client, self._job_id, chunk_tasks_to_add, self._task_add_collection_options, self._custom_headers, self._raw) except BatchErrorException as e: # In case of a chunk exceeding the MaxMessageSize split chunk in half # and resubmit smaller chunk requests # TODO: Replace string with constant variable once available in SDK if e.error.code == "RequestBodyTooLarge": # pylint: disable=no-member # In this case the task is misbehaved and will not be able to be added due to: # 1) The task exceeding the max message size # 2) A single cell of the task exceeds the per-cell limit, or # 3) Sum of all cells exceeds max row limit if len(chunk_tasks_to_add) == 1: failed_task = chunk_tasks_to_add.pop() self.errors.appendleft(e) _LOGGER.error("Failed to add task with ID %s due to the body" " exceeding the maximum request size", failed_task.id) else: # Assumption: Tasks are relatively close in size therefore if one batch exceeds size limit # we should decrease the initial task collection size to avoid repeating the error # Midpoint is lower bounded by 1 due to above base case midpoint = int(len(chunk_tasks_to_add) / 2) # Restrict one thread at a time to do this compare and set, # therefore forcing max_tasks_per_request to be strictly decreasing with self._max_tasks_lock: if midpoint < self._max_tasks_per_request: self._max_tasks_per_request = midpoint _LOGGER.info("Amount of tasks per request reduced from %s to %s due to the" " request body being too large", str(self._max_tasks_per_request), str(midpoint)) # Not the most efficient solution for all cases, but the goal of this is to handle this # exception and have it work in all cases where tasks are well behaved # Behavior retries as a smaller chunk and # appends extra tasks to queue to be picked up by another thread . self.tasks_to_add.extendleft(chunk_tasks_to_add[midpoint:]) self._bulk_add_tasks(results_queue, chunk_tasks_to_add[:midpoint]) # Retry server side errors elif 500 <= e.response.status_code <= 599: self.tasks_to_add.extendleft(chunk_tasks_to_add) else: # Re-add to pending queue as unknown status / don't have result self.tasks_to_add.extendleft(chunk_tasks_to_add) # Unknown State - don't know if tasks failed to add or were successful self.errors.appendleft(e) except Exception as e: # pylint: disable=broad-except # Re-add to pending queue as unknown status / don't have result self.tasks_to_add.extendleft(chunk_tasks_to_add) # Unknown State - don't know if tasks failed to add or were successful self.errors.appendleft(e) else: try: add_collection_response = add_collection_response.output except AttributeError: pass for task_result in add_collection_response.value: # pylint: disable=no-member if task_result.status == TaskAddStatus.server_error: # Server error will be retried with self._pending_queue_lock: for task in chunk_tasks_to_add: if task.id == task_result.task_id: self.tasks_to_add.appendleft(task) elif (task_result.status == TaskAddStatus.client_error and not task_result.error.code == "TaskExists"): # Client error will be recorded unless Task already exists self.failure_tasks.appendleft(task_result) else: results_queue.appendleft(task_result)
azure-batch/azure/batch/custom/patch.py
Azure/azure-sdk-for-python
_TaskWorkflowManager.task_collection_thread_handler
def task_collection_thread_handler(self, results_queue): while self.tasks_to_add and not self.errors: max_tasks = self._max_tasks_per_request chunk_tasks_to_add = [] with self._pending_queue_lock: while len(chunk_tasks_to_add) < max_tasks and self.tasks_to_add: chunk_tasks_to_add.append(self.tasks_to_add.pop()) if chunk_tasks_to_add: self._bulk_add_tasks(results_queue, chunk_tasks_to_add)
Main method for worker to run Pops a chunk of tasks off the collection of pending tasks to be added and submits them to be added.
def task_collection_thread_handler(self, results_queue): """Main method for worker to run Pops a chunk of tasks off the collection of pending tasks to be added and submits them to be added. :param collections.deque results_queue: Queue for worker to output results to """ # Add tasks until either we run out or we run into an unexpected error while self.tasks_to_add and not self.errors: max_tasks = self._max_tasks_per_request # local copy chunk_tasks_to_add = [] with self._pending_queue_lock: while len(chunk_tasks_to_add) < max_tasks and self.tasks_to_add: chunk_tasks_to_add.append(self.tasks_to_add.pop()) if chunk_tasks_to_add: self._bulk_add_tasks(results_queue, chunk_tasks_to_add)
azure-batch/azure/batch/custom/patch.py
Azure/azure-sdk-for-python
build_config
def build_config(config : Dict[str, Any]) -> Dict[str, str]: result = config.copy() is_stable = result.pop("is_stable", False) if is_stable: result["classifier"] = "Development Status :: 5 - Production/Stable" else: result["classifier"] = "Development Status :: 4 - Beta" package_name = result["package_name"] result["package_nspkg"] = result.pop( "package_nspkg", package_name[:package_name.rindex('-')]+"-nspkg" ) result['is_arm'] = result.pop("is_arm", True) result['need_msrestazure'] = result.pop("need_msrestazure", True) package_parts = result["package_nspkg"][:-len('-nspkg')].split('-') result['nspkg_names'] = [ ".".join(package_parts[:i+1]) for i in range(len(package_parts)) ] result['init_names'] = [ "/".join(package_parts[:i+1])+"/__init__.py" for i in range(len(package_parts)) ] return result
Will build the actual config for Jinja2, based on SDK config.
def build_config(config : Dict[str, Any]) -> Dict[str, str]: """Will build the actual config for Jinja2, based on SDK config. """ result = config.copy() # Manage the classifier stable/beta is_stable = result.pop("is_stable", False) if is_stable: result["classifier"] = "Development Status :: 5 - Production/Stable" else: result["classifier"] = "Development Status :: 4 - Beta" # Manage the nspkg package_name = result["package_name"] result["package_nspkg"] = result.pop( "package_nspkg", package_name[:package_name.rindex('-')]+"-nspkg" ) # ARM? result['is_arm'] = result.pop("is_arm", True) # Do I need msrestazure for this package? result['need_msrestazure'] = result.pop("need_msrestazure", True) # Pre-compute some Jinja variable that are complicated to do inside the templates package_parts = result["package_nspkg"][:-len('-nspkg')].split('-') result['nspkg_names'] = [ ".".join(package_parts[:i+1]) for i in range(len(package_parts)) ] result['init_names'] = [ "/".join(package_parts[:i+1])+"/__init__.py" for i in range(len(package_parts)) ] # Return result return result
azure-sdk-tools/packaging_tools/__init__.py
Azure/azure-sdk-for-python
GlobalUsersOperations.reset_password
def reset_password( self, user_name, reset_password_payload, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._reset_password_initial( user_name=user_name, reset_password_payload=reset_password_payload, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Resets the user password on an environment This operation can take a while to complete.
def reset_password( self, user_name, reset_password_payload, custom_headers=None, raw=False, polling=True, **operation_config): """Resets the user password on an environment This operation can take a while to complete. :param user_name: The name of the user. :type user_name: str :param reset_password_payload: Represents the payload for resetting passwords. :type reset_password_payload: ~azure.mgmt.labservices.models.ResetPasswordPayload :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns None or ClientRawResponse<None> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._reset_password_initial( user_name=user_name, reset_password_payload=reset_password_payload, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-labservices/azure/mgmt/labservices/operations/global_users_operations.py
Azure/azure-sdk-for-python
GlobalUsersOperations.start_environment
def start_environment( self, user_name, environment_id, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._start_environment_initial( user_name=user_name, environment_id=environment_id, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Starts an environment by starting all resources inside the environment. This operation can take a while to complete.
def start_environment( self, user_name, environment_id, custom_headers=None, raw=False, polling=True, **operation_config): """Starts an environment by starting all resources inside the environment. This operation can take a while to complete. :param user_name: The name of the user. :type user_name: str :param environment_id: The resourceId of the environment :type environment_id: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns None or ClientRawResponse<None> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._start_environment_initial( user_name=user_name, environment_id=environment_id, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
azure-mgmt-labservices/azure/mgmt/labservices/operations/global_users_operations.py
Azure/azure-sdk-for-python
_create_message
def _create_message(response, service_instance): respbody = response.body custom_properties = {} broker_properties = None message_type = None message_location = None for name, value in response.headers: if name.lower() == 'brokerproperties': broker_properties = json.loads(value) elif name.lower() == 'content-type': message_type = value elif name.lower() == 'location': message_location = value elif name.lower() not in ['transfer-encoding', 'server', 'date', 'strict-transport-security']: if '"' in value: value = value[1:-1].replace('\\"', '"') try: custom_properties[name] = datetime.strptime( value, '%a, %d %b %Y %H:%M:%S GMT') except ValueError: custom_properties[name] = value elif value.lower() == 'true': custom_properties[name] = True elif value.lower() == 'false': custom_properties[name] = False else: try: float_value = float(value) if str(int(float_value)) == value: custom_properties[name] = int(value) else: custom_properties[name] = float_value except ValueError: pass if message_type is None: message = Message( respbody, service_instance, message_location, custom_properties, 'application/atom+xml;type=entry;charset=utf-8', broker_properties) else: message = Message(respbody, service_instance, message_location, custom_properties, message_type, broker_properties) return message
Create message from response.
def _create_message(response, service_instance): ''' Create message from response. response: response from Service Bus cloud server. service_instance: the Service Bus client. ''' respbody = response.body custom_properties = {} broker_properties = None message_type = None message_location = None # gets all information from respheaders. for name, value in response.headers: if name.lower() == 'brokerproperties': broker_properties = json.loads(value) elif name.lower() == 'content-type': message_type = value elif name.lower() == 'location': message_location = value # Exclude common HTTP headers to avoid noise. List # is not exhaustive. At worst, custom properties will contains # an unexpected content generated by the webserver and not the customer. elif name.lower() not in ['transfer-encoding', 'server', 'date', 'strict-transport-security']: # Follow the spec: # https://docs.microsoft.com/rest/api/servicebus/message-headers-and-properties if '"' in value: value = value[1:-1].replace('\\"', '"') try: custom_properties[name] = datetime.strptime( value, '%a, %d %b %Y %H:%M:%S GMT') except ValueError: custom_properties[name] = value elif value.lower() == 'true': custom_properties[name] = True elif value.lower() == 'false': custom_properties[name] = False else: # in theory, only int or float try: # int('3.1') doesn't work so need to get float('3.14') first float_value = float(value) if str(int(float_value)) == value: custom_properties[name] = int(value) else: custom_properties[name] = float_value except ValueError: # If we are here, this header does not respect the spec. # Could be an unexpected HTTP header or an invalid # header value. In both case we ignore without failing. pass if message_type is None: message = Message( respbody, service_instance, message_location, custom_properties, 'application/atom+xml;type=entry;charset=utf-8', broker_properties) else: message = Message(respbody, service_instance, message_location, custom_properties, message_type, broker_properties) return message
azure-servicebus/azure/servicebus/control_client/_serialization.py
Azure/azure-sdk-for-python
_convert_etree_element_to_rule
def _convert_etree_element_to_rule(entry_element): rule = Rule() rule_element = entry_element.find('./atom:content/sb:RuleDescription', _etree_sb_feed_namespaces) if rule_element is not None: filter_element = rule_element.find('./sb:Filter', _etree_sb_feed_namespaces) if filter_element is not None: rule.filter_type = filter_element.attrib.get( _make_etree_ns_attr_name(_etree_sb_feed_namespaces['i'], 'type'), None) sql_exp_element = filter_element.find('./sb:SqlExpression', _etree_sb_feed_namespaces) if sql_exp_element is not None: rule.filter_expression = sql_exp_element.text action_element = rule_element.find('./sb:Action', _etree_sb_feed_namespaces) if action_element is not None: rule.action_type = action_element.attrib.get( _make_etree_ns_attr_name(_etree_sb_feed_namespaces['i'], 'type'), None) sql_exp_element = action_element.find('./sb:SqlExpression', _etree_sb_feed_namespaces) if sql_exp_element is not None: rule.action_expression = sql_exp_element.text for name, value in _ETreeXmlToObject.get_entry_properties_from_element( entry_element, True, '/rules').items(): setattr(rule, name, value) return rule
Converts entry element to rule object. The format of xml for
def _convert_etree_element_to_rule(entry_element): ''' Converts entry element to rule object. The format of xml for rule: <entry xmlns='http://www.w3.org/2005/Atom'> <content type='application/xml'> <RuleDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> <Filter i:type="SqlFilterExpression"> <SqlExpression>MyProperty='XYZ'</SqlExpression> </Filter> <Action i:type="SqlFilterAction"> <SqlExpression>set MyProperty2 = 'ABC'</SqlExpression> </Action> </RuleDescription> </content> </entry> ''' rule = Rule() rule_element = entry_element.find('./atom:content/sb:RuleDescription', _etree_sb_feed_namespaces) if rule_element is not None: filter_element = rule_element.find('./sb:Filter', _etree_sb_feed_namespaces) if filter_element is not None: rule.filter_type = filter_element.attrib.get( _make_etree_ns_attr_name(_etree_sb_feed_namespaces['i'], 'type'), None) sql_exp_element = filter_element.find('./sb:SqlExpression', _etree_sb_feed_namespaces) if sql_exp_element is not None: rule.filter_expression = sql_exp_element.text action_element = rule_element.find('./sb:Action', _etree_sb_feed_namespaces) if action_element is not None: rule.action_type = action_element.attrib.get( _make_etree_ns_attr_name(_etree_sb_feed_namespaces['i'], 'type'), None) sql_exp_element = action_element.find('./sb:SqlExpression', _etree_sb_feed_namespaces) if sql_exp_element is not None: rule.action_expression = sql_exp_element.text # extract id, updated and name value from feed entry and set them of rule. for name, value in _ETreeXmlToObject.get_entry_properties_from_element( entry_element, True, '/rules').items(): setattr(rule, name, value) return rule
azure-servicebus/azure/servicebus/control_client/_serialization.py
Azure/azure-sdk-for-python
_convert_etree_element_to_queue
def _convert_etree_element_to_queue(entry_element): queue = Queue() invalid_queue = True queue_element = entry_element.find('./atom:content/sb:QueueDescription', _etree_sb_feed_namespaces) if queue_element is not None: mappings = [ ('LockDuration', 'lock_duration', None), ('MaxSizeInMegabytes', 'max_size_in_megabytes', int), ('RequiresDuplicateDetection', 'requires_duplicate_detection', _parse_bool), ('RequiresSession', 'requires_session', _parse_bool), ('DefaultMessageTimeToLive', 'default_message_time_to_live', None), ('DeadLetteringOnMessageExpiration', 'dead_lettering_on_message_expiration', _parse_bool), ('DuplicateDetectionHistoryTimeWindow', 'duplicate_detection_history_time_window', None), ('EnableBatchedOperations', 'enable_batched_operations', _parse_bool), ('MaxDeliveryCount', 'max_delivery_count', int), ('MessageCount', 'message_count', int), ('SizeInBytes', 'size_in_bytes', int), ] for mapping in mappings: if _read_etree_element(queue_element, mapping[0], queue, mapping[1], mapping[2]): invalid_queue = False if invalid_queue: raise AzureServiceBusResourceNotFound(_ERROR_QUEUE_NOT_FOUND) for name, value in _ETreeXmlToObject.get_entry_properties_from_element( entry_element, True).items(): setattr(queue, name, value) return queue
Converts entry element to queue object. The format of xml response for
def _convert_etree_element_to_queue(entry_element): ''' Converts entry element to queue object. The format of xml response for queue: <QueueDescription xmlns=\"http://schemas.microsoft.com/netservices/2010/10/servicebus/connect\"> <MaxSizeInBytes>10000</MaxSizeInBytes> <DefaultMessageTimeToLive>PT5M</DefaultMessageTimeToLive> <LockDuration>PT2M</LockDuration> <RequiresGroupedReceives>False</RequiresGroupedReceives> <SupportsDuplicateDetection>False</SupportsDuplicateDetection> ... </QueueDescription> ''' queue = Queue() # get node for each attribute in Queue class, if nothing found then the # response is not valid xml for Queue. invalid_queue = True queue_element = entry_element.find('./atom:content/sb:QueueDescription', _etree_sb_feed_namespaces) if queue_element is not None: mappings = [ ('LockDuration', 'lock_duration', None), ('MaxSizeInMegabytes', 'max_size_in_megabytes', int), ('RequiresDuplicateDetection', 'requires_duplicate_detection', _parse_bool), ('RequiresSession', 'requires_session', _parse_bool), ('DefaultMessageTimeToLive', 'default_message_time_to_live', None), ('DeadLetteringOnMessageExpiration', 'dead_lettering_on_message_expiration', _parse_bool), ('DuplicateDetectionHistoryTimeWindow', 'duplicate_detection_history_time_window', None), ('EnableBatchedOperations', 'enable_batched_operations', _parse_bool), ('MaxDeliveryCount', 'max_delivery_count', int), ('MessageCount', 'message_count', int), ('SizeInBytes', 'size_in_bytes', int), ] for mapping in mappings: if _read_etree_element(queue_element, mapping[0], queue, mapping[1], mapping[2]): invalid_queue = False if invalid_queue: raise AzureServiceBusResourceNotFound(_ERROR_QUEUE_NOT_FOUND) # extract id, updated and name value from feed entry and set them of queue. for name, value in _ETreeXmlToObject.get_entry_properties_from_element( entry_element, True).items(): setattr(queue, name, value) return queue
azure-servicebus/azure/servicebus/control_client/_serialization.py
Azure/azure-sdk-for-python
_convert_etree_element_to_topic
def _convert_etree_element_to_topic(entry_element): topic = Topic() invalid_topic = True topic_element = entry_element.find('./atom:content/sb:TopicDescription', _etree_sb_feed_namespaces) if topic_element is not None: mappings = [ ('DefaultMessageTimeToLive', 'default_message_time_to_live', None), ('MaxSizeInMegabytes', 'max_size_in_megabytes', int), ('RequiresDuplicateDetection', 'requires_duplicate_detection', _parse_bool), ('DuplicateDetectionHistoryTimeWindow', 'duplicate_detection_history_time_window', None), ('EnableBatchedOperations', 'enable_batched_operations', _parse_bool), ('SizeInBytes', 'size_in_bytes', int), ] for mapping in mappings: if _read_etree_element(topic_element, mapping[0], topic, mapping[1], mapping[2]): invalid_topic = False if invalid_topic: raise AzureServiceBusResourceNotFound(_ERROR_TOPIC_NOT_FOUND) for name, value in _ETreeXmlToObject.get_entry_properties_from_element( entry_element, True).items(): setattr(topic, name, value) return topic
Converts entry element to topic The xml format for
def _convert_etree_element_to_topic(entry_element): '''Converts entry element to topic The xml format for topic: <entry xmlns='http://www.w3.org/2005/Atom'> <content type='application/xml'> <TopicDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> <DefaultMessageTimeToLive>P10675199DT2H48M5.4775807S</DefaultMessageTimeToLive> <MaxSizeInMegabytes>1024</MaxSizeInMegabytes> <RequiresDuplicateDetection>false</RequiresDuplicateDetection> <DuplicateDetectionHistoryTimeWindow>P7D</DuplicateDetectionHistoryTimeWindow> <DeadLetteringOnFilterEvaluationExceptions>true</DeadLetteringOnFilterEvaluationExceptions> </TopicDescription> </content> </entry> ''' topic = Topic() invalid_topic = True topic_element = entry_element.find('./atom:content/sb:TopicDescription', _etree_sb_feed_namespaces) if topic_element is not None: mappings = [ ('DefaultMessageTimeToLive', 'default_message_time_to_live', None), ('MaxSizeInMegabytes', 'max_size_in_megabytes', int), ('RequiresDuplicateDetection', 'requires_duplicate_detection', _parse_bool), ('DuplicateDetectionHistoryTimeWindow', 'duplicate_detection_history_time_window', None), ('EnableBatchedOperations', 'enable_batched_operations', _parse_bool), ('SizeInBytes', 'size_in_bytes', int), ] for mapping in mappings: if _read_etree_element(topic_element, mapping[0], topic, mapping[1], mapping[2]): invalid_topic = False if invalid_topic: raise AzureServiceBusResourceNotFound(_ERROR_TOPIC_NOT_FOUND) # extract id, updated and name value from feed entry and set them of topic. for name, value in _ETreeXmlToObject.get_entry_properties_from_element( entry_element, True).items(): setattr(topic, name, value) return topic
azure-servicebus/azure/servicebus/control_client/_serialization.py
Azure/azure-sdk-for-python
_convert_etree_element_to_subscription
def _convert_etree_element_to_subscription(entry_element): subscription = Subscription() subscription_element = entry_element.find('./atom:content/sb:SubscriptionDescription', _etree_sb_feed_namespaces) if subscription_element is not None: mappings = [ ('LockDuration', 'lock_duration', None), ('RequiresSession', 'requires_session', _parse_bool), ('DefaultMessageTimeToLive', 'default_message_time_to_live', None), ('DeadLetteringOnFilterEvaluationExceptions', 'dead_lettering_on_filter_evaluation_exceptions', _parse_bool), ('DeadLetteringOnMessageExpiration', 'dead_lettering_on_message_expiration', _parse_bool), ('EnableBatchedOperations', 'enable_batched_operations', _parse_bool), ('MaxDeliveryCount', 'max_delivery_count', int), ('MessageCount', 'message_count', int), ] for mapping in mappings: _read_etree_element(subscription_element, mapping[0], subscription, mapping[1], mapping[2]) for name, value in _ETreeXmlToObject.get_entry_properties_from_element( entry_element, True, '/subscriptions').items(): setattr(subscription, name, value) return subscription
Converts entry element to subscription The xml format for
def _convert_etree_element_to_subscription(entry_element): '''Converts entry element to subscription The xml format for subscription: <entry xmlns='http://www.w3.org/2005/Atom'> <content type='application/xml'> <SubscriptionDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> <LockDuration>PT5M</LockDuration> <RequiresSession>false</RequiresSession> <DefaultMessageTimeToLive>P10675199DT2H48M5.4775807S</DefaultMessageTimeToLive> <DeadLetteringOnMessageExpiration>false</DeadLetteringOnMessageExpiration> <DeadLetteringOnFilterEvaluationExceptions>true</DeadLetteringOnFilterEvaluationExceptions> </SubscriptionDescription> </content> </entry> ''' subscription = Subscription() subscription_element = entry_element.find('./atom:content/sb:SubscriptionDescription', _etree_sb_feed_namespaces) if subscription_element is not None: mappings = [ ('LockDuration', 'lock_duration', None), ('RequiresSession', 'requires_session', _parse_bool), ('DefaultMessageTimeToLive', 'default_message_time_to_live', None), ('DeadLetteringOnFilterEvaluationExceptions', 'dead_lettering_on_filter_evaluation_exceptions', _parse_bool), # pylint: disable=line-too-long ('DeadLetteringOnMessageExpiration', 'dead_lettering_on_message_expiration', _parse_bool), ('EnableBatchedOperations', 'enable_batched_operations', _parse_bool), ('MaxDeliveryCount', 'max_delivery_count', int), ('MessageCount', 'message_count', int), ] for mapping in mappings: _read_etree_element(subscription_element, mapping[0], subscription, mapping[1], mapping[2]) for name, value in _ETreeXmlToObject.get_entry_properties_from_element( entry_element, True, '/subscriptions').items(): setattr(subscription, name, value) return subscription
azure-servicebus/azure/servicebus/control_client/_serialization.py
Azure/azure-sdk-for-python
CertificateOperations.create
def create( self, resource_group_name, account_name, certificate_name, parameters, if_match=None, if_none_match=None, custom_headers=None, raw=False, **operation_config): raw_result = self._create_initial( resource_group_name=resource_group_name, account_name=account_name, certificate_name=certificate_name, parameters=parameters, if_match=if_match, if_none_match=if_none_match, custom_headers=custom_headers, raw=True, **operation_config ) if raw: return raw_result def long_running_send(): return raw_result.response def get_long_running_status(status_link, headers=None): request = self._client.get(status_link) if headers: request.headers.update(headers) header_parameters = {} header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] return self._client.send( request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp header_dict = { 'ETag': 'str', } deserialized = self._deserialize('Certificate', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) client_raw_response.add_headers(header_dict) return client_raw_response return deserialized long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) return AzureOperationPoller( long_running_send, get_long_running_output, get_long_running_status, long_running_operation_timeout)
Creates a new certificate inside the specified account.
def create( self, resource_group_name, account_name, certificate_name, parameters, if_match=None, if_none_match=None, custom_headers=None, raw=False, **operation_config): """Creates a new certificate inside the specified account. :param resource_group_name: The name of the resource group that contains the Batch account. :type resource_group_name: str :param account_name: The name of the Batch account. :type account_name: str :param certificate_name: The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. :type certificate_name: str :param parameters: Additional parameters for certificate creation. :type parameters: ~azure.mgmt.batch.models.CertificateCreateOrUpdateParameters :param if_match: The entity state (ETag) version of the certificate to update. A value of "*" can be used to apply the operation only if the certificate already exists. If omitted, this operation will always be applied. :type if_match: str :param if_none_match: Set to '*' to allow a new certificate to be created, but to prevent updating an existing certificate. Other values will be ignored. :type if_none_match: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :return: An instance of AzureOperationPoller that returns Certificate or ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.batch.models.Certificate] or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._create_initial( resource_group_name=resource_group_name, account_name=account_name, certificate_name=certificate_name, parameters=parameters, if_match=if_match, if_none_match=if_none_match, custom_headers=custom_headers, raw=True, **operation_config ) if raw: return raw_result # Construct and send request def long_running_send(): return raw_result.response def get_long_running_status(status_link, headers=None): request = self._client.get(status_link) if headers: request.headers.update(headers) header_parameters = {} header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] return self._client.send( request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp header_dict = { 'ETag': 'str', } deserialized = self._deserialize('Certificate', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) client_raw_response.add_headers(header_dict) return client_raw_response return deserialized long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) return AzureOperationPoller( long_running_send, get_long_running_output, get_long_running_status, long_running_operation_timeout)
azure-mgmt-batch/azure/mgmt/batch/operations/certificate_operations.py
Azure/azure-sdk-for-python
CertificateOperations.delete
def delete( self, resource_group_name, account_name, certificate_name, custom_headers=None, raw=False, **operation_config): raw_result = self._delete_initial( resource_group_name=resource_group_name, account_name=account_name, certificate_name=certificate_name, custom_headers=custom_headers, raw=True, **operation_config ) if raw: return raw_result def long_running_send(): return raw_result.response def get_long_running_status(status_link, headers=None): request = self._client.get(status_link) if headers: request.headers.update(headers) header_parameters = {} header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] return self._client.send( request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): if response.status_code not in [200, 202, 204]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp if raw: client_raw_response = ClientRawResponse(None, response) client_raw_response.add_headers({ 'Location': 'str', 'Retry-After': 'int', }) return client_raw_response long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) return AzureOperationPoller( long_running_send, get_long_running_output, get_long_running_status, long_running_operation_timeout)
Deletes the specified certificate.
def delete( self, resource_group_name, account_name, certificate_name, custom_headers=None, raw=False, **operation_config): """Deletes the specified certificate. :param resource_group_name: The name of the resource group that contains the Batch account. :type resource_group_name: str :param account_name: The name of the Batch account. :type account_name: str :param certificate_name: The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. :type certificate_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :return: An instance of AzureOperationPoller that returns None or ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._delete_initial( resource_group_name=resource_group_name, account_name=account_name, certificate_name=certificate_name, custom_headers=custom_headers, raw=True, **operation_config ) if raw: return raw_result # Construct and send request def long_running_send(): return raw_result.response def get_long_running_status(status_link, headers=None): request = self._client.get(status_link) if headers: request.headers.update(headers) header_parameters = {} header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] return self._client.send( request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): if response.status_code not in [200, 202, 204]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp if raw: client_raw_response = ClientRawResponse(None, response) client_raw_response.add_headers({ 'Location': 'str', 'Retry-After': 'int', }) return client_raw_response long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) return AzureOperationPoller( long_running_send, get_long_running_output, get_long_running_status, long_running_operation_timeout)
azure-mgmt-batch/azure/mgmt/batch/operations/certificate_operations.py
Azure/azure-sdk-for-python
get_client_from_cli_profile
def get_client_from_cli_profile(client_class, **kwargs): cloud = get_cli_active_cloud() parameters = {} if 'credentials' not in kwargs or 'subscription_id' not in kwargs: resource, _ = _client_resource(client_class, cloud) credentials, subscription_id, tenant_id = get_azure_cli_credentials(resource=resource, with_tenant=True) parameters.update({ 'credentials': kwargs.get('credentials', credentials), 'subscription_id': kwargs.get('subscription_id', subscription_id) }) args = get_arg_spec(client_class.__init__).args if 'adla_job_dns_suffix' in args and 'adla_job_dns_suffix' not in kwargs: parameters['adla_job_dns_suffix'] = cloud.suffixes.azure_datalake_analytics_catalog_and_job_endpoint elif 'base_url' in args and 'base_url' not in kwargs: _, base_url = _client_resource(client_class, cloud) if base_url: parameters['base_url'] = base_url else: parameters['base_url'] = cloud.endpoints.resource_manager if 'tenant_id' in args and 'tenant_id' not in kwargs: parameters['tenant_id'] = tenant_id parameters.update(kwargs) return _instantiate_client(client_class, **parameters)
Return a SDK client initialized with current CLI credentials, CLI default subscription and CLI default cloud. This method will fill automatically the following client
def get_client_from_cli_profile(client_class, **kwargs): """Return a SDK client initialized with current CLI credentials, CLI default subscription and CLI default cloud. This method will fill automatically the following client parameters: - credentials - subscription_id - base_url Parameters provided in kwargs will override CLI parameters and be passed directly to the client. :Example: .. code:: python from azure.common.client_factory import get_client_from_cli_profile from azure.mgmt.compute import ComputeManagementClient client = get_client_from_cli_profile(ComputeManagementClient) .. versionadded:: 1.1.6 :param client_class: A SDK client class :return: An instantiated client :raises: ImportError if azure-cli-core package is not available """ cloud = get_cli_active_cloud() parameters = {} if 'credentials' not in kwargs or 'subscription_id' not in kwargs: resource, _ = _client_resource(client_class, cloud) credentials, subscription_id, tenant_id = get_azure_cli_credentials(resource=resource, with_tenant=True) parameters.update({ 'credentials': kwargs.get('credentials', credentials), 'subscription_id': kwargs.get('subscription_id', subscription_id) }) args = get_arg_spec(client_class.__init__).args if 'adla_job_dns_suffix' in args and 'adla_job_dns_suffix' not in kwargs: # Datalake # Let it raise here with AttributeError at worst, this would mean this cloud does not define # ADL endpoint and no manual suffix was given parameters['adla_job_dns_suffix'] = cloud.suffixes.azure_datalake_analytics_catalog_and_job_endpoint elif 'base_url' in args and 'base_url' not in kwargs: _, base_url = _client_resource(client_class, cloud) if base_url: parameters['base_url'] = base_url else: parameters['base_url'] = cloud.endpoints.resource_manager if 'tenant_id' in args and 'tenant_id' not in kwargs: parameters['tenant_id'] = tenant_id parameters.update(kwargs) return _instantiate_client(client_class, **parameters)
azure-common/azure/common/client_factory.py
Azure/azure-sdk-for-python
get_client_from_json_dict
def get_client_from_json_dict(client_class, config_dict, **kwargs): is_graphrbac = client_class.__name__ == 'GraphRbacManagementClient' parameters = { 'subscription_id': config_dict.get('subscriptionId'), 'base_url': config_dict.get('resourceManagerEndpointUrl'), 'tenant_id': config_dict.get('tenantId') } if is_graphrbac: parameters['base_url'] = config_dict['activeDirectoryGraphResourceId'] if 'credentials' not in kwargs: if is_graphrbac: resource = config_dict['activeDirectoryGraphResourceId'] else: if "activeDirectoryResourceId" not in config_dict and 'resourceManagerEndpointUrl' not in config_dict: raise ValueError("Need activeDirectoryResourceId or resourceManagerEndpointUrl key") resource = config_dict.get('activeDirectoryResourceId', config_dict['resourceManagerEndpointUrl']) authority_url = config_dict['activeDirectoryEndpointUrl'] is_adfs = bool(re.match('.+(/adfs|/adfs/)$', authority_url, re.I)) if is_adfs: authority_url = authority_url.rstrip('/') else: authority_url = authority_url + '/' + config_dict['tenantId'] context = adal.AuthenticationContext( authority_url, api_version=None, validate_authority=not is_adfs ) parameters['credentials'] = AdalAuthentication( context.acquire_token_with_client_credentials, resource, config_dict['clientId'], config_dict['clientSecret'] ) parameters.update(kwargs) return _instantiate_client(client_class, **parameters)
Return a SDK client initialized with a JSON auth dict. The easiest way to obtain this content is to call the following CLI
def get_client_from_json_dict(client_class, config_dict, **kwargs): """Return a SDK client initialized with a JSON auth dict. The easiest way to obtain this content is to call the following CLI commands: .. code:: bash az ad sp create-for-rbac --sdk-auth This method will fill automatically the following client parameters: - credentials - subscription_id - base_url - tenant_id Parameters provided in kwargs will override parameters and be passed directly to the client. :Example: .. code:: python from azure.common.client_factory import get_client_from_auth_file from azure.mgmt.compute import ComputeManagementClient config_dict = { "clientId": "ad735158-65ca-11e7-ba4d-ecb1d756380e", "clientSecret": "b70bb224-65ca-11e7-810c-ecb1d756380e", "subscriptionId": "bfc42d3a-65ca-11e7-95cf-ecb1d756380e", "tenantId": "c81da1d8-65ca-11e7-b1d1-ecb1d756380e", "activeDirectoryEndpointUrl": "https://login.microsoftonline.com", "resourceManagerEndpointUrl": "https://management.azure.com/", "activeDirectoryGraphResourceId": "https://graph.windows.net/", "sqlManagementEndpointUrl": "https://management.core.windows.net:8443/", "galleryEndpointUrl": "https://gallery.azure.com/", "managementEndpointUrl": "https://management.core.windows.net/" } client = get_client_from_json_dict(ComputeManagementClient, config_dict) .. versionadded:: 1.1.7 :param client_class: A SDK client class :param dict config_dict: A config dict. :return: An instantiated client """ is_graphrbac = client_class.__name__ == 'GraphRbacManagementClient' parameters = { 'subscription_id': config_dict.get('subscriptionId'), 'base_url': config_dict.get('resourceManagerEndpointUrl'), 'tenant_id': config_dict.get('tenantId') # GraphRbac } if is_graphrbac: parameters['base_url'] = config_dict['activeDirectoryGraphResourceId'] if 'credentials' not in kwargs: # Get the right resource for Credentials if is_graphrbac: resource = config_dict['activeDirectoryGraphResourceId'] else: if "activeDirectoryResourceId" not in config_dict and 'resourceManagerEndpointUrl' not in config_dict: raise ValueError("Need activeDirectoryResourceId or resourceManagerEndpointUrl key") resource = config_dict.get('activeDirectoryResourceId', config_dict['resourceManagerEndpointUrl']) authority_url = config_dict['activeDirectoryEndpointUrl'] is_adfs = bool(re.match('.+(/adfs|/adfs/)$', authority_url, re.I)) if is_adfs: authority_url = authority_url.rstrip('/') # workaround: ADAL is known to reject auth urls with trailing / else: authority_url = authority_url + '/' + config_dict['tenantId'] context = adal.AuthenticationContext( authority_url, api_version=None, validate_authority=not is_adfs ) parameters['credentials'] = AdalAuthentication( context.acquire_token_with_client_credentials, resource, config_dict['clientId'], config_dict['clientSecret'] ) parameters.update(kwargs) return _instantiate_client(client_class, **parameters)
azure-common/azure/common/client_factory.py
Azure/azure-sdk-for-python
get_client_from_auth_file
def get_client_from_auth_file(client_class, auth_path=None, **kwargs): auth_path = auth_path or os.environ['AZURE_AUTH_LOCATION'] with io.open(auth_path, 'r', encoding='utf-8-sig') as auth_fd: config_dict = json.load(auth_fd) return get_client_from_json_dict(client_class, config_dict, **kwargs)
Return a SDK client initialized with auth file. The easiest way to obtain this file is to call the following CLI
def get_client_from_auth_file(client_class, auth_path=None, **kwargs): """Return a SDK client initialized with auth file. The easiest way to obtain this file is to call the following CLI commands: .. code:: bash az ad sp create-for-rbac --sdk-auth You can specific the file path directly, or fill the environment variable AZURE_AUTH_LOCATION. File must be UTF-8. This method will fill automatically the following client parameters: - credentials - subscription_id - base_url Parameters provided in kwargs will override parameters and be passed directly to the client. :Example: .. code:: python from azure.common.client_factory import get_client_from_auth_file from azure.mgmt.compute import ComputeManagementClient client = get_client_from_auth_file(ComputeManagementClient) Example of file: .. code:: json { "clientId": "ad735158-65ca-11e7-ba4d-ecb1d756380e", "clientSecret": "b70bb224-65ca-11e7-810c-ecb1d756380e", "subscriptionId": "bfc42d3a-65ca-11e7-95cf-ecb1d756380e", "tenantId": "c81da1d8-65ca-11e7-b1d1-ecb1d756380e", "activeDirectoryEndpointUrl": "https://login.microsoftonline.com", "resourceManagerEndpointUrl": "https://management.azure.com/", "activeDirectoryGraphResourceId": "https://graph.windows.net/", "sqlManagementEndpointUrl": "https://management.core.windows.net:8443/", "galleryEndpointUrl": "https://gallery.azure.com/", "managementEndpointUrl": "https://management.core.windows.net/" } .. versionadded:: 1.1.7 :param client_class: A SDK client class :param str auth_path: Path to the file. :return: An instantiated client :raises: KeyError if AZURE_AUTH_LOCATION is not an environment variable and no path is provided :raises: FileNotFoundError if provided file path does not exists :raises: json.JSONDecodeError if provided file is not JSON valid :raises: UnicodeDecodeError if file is not UTF8 compliant """ auth_path = auth_path or os.environ['AZURE_AUTH_LOCATION'] with io.open(auth_path, 'r', encoding='utf-8-sig') as auth_fd: config_dict = json.load(auth_fd) return get_client_from_json_dict(client_class, config_dict, **kwargs)
azure-common/azure/common/client_factory.py
Azure/azure-sdk-for-python
_ETreeXmlToObject.get_entry_properties_from_element
def get_entry_properties_from_element(element, include_id, id_prefix_to_skip=None, use_title_as_id=False): properties = {} etag = element.attrib.get(_make_etree_ns_attr_name(_etree_entity_feed_namespaces['m'], 'etag'), None) if etag is not None: properties['etag'] = etag updated = element.findtext('./atom:updated', '', _etree_entity_feed_namespaces) if updated: properties['updated'] = updated author_name = element.findtext('./atom:author/atom:name', '', _etree_entity_feed_namespaces) if author_name: properties['author'] = author_name if include_id: if use_title_as_id: title = element.findtext('./atom:title', '', _etree_entity_feed_namespaces) if title: properties['name'] = title else: element_id = element.findtext('./atom:id', '', _etree_entity_feed_namespaces) if element_id: properties['name'] = _get_readable_id(element_id, id_prefix_to_skip) return properties
get properties from element tree element
def get_entry_properties_from_element(element, include_id, id_prefix_to_skip=None, use_title_as_id=False): ''' get properties from element tree element ''' properties = {} etag = element.attrib.get(_make_etree_ns_attr_name(_etree_entity_feed_namespaces['m'], 'etag'), None) if etag is not None: properties['etag'] = etag updated = element.findtext('./atom:updated', '', _etree_entity_feed_namespaces) if updated: properties['updated'] = updated author_name = element.findtext('./atom:author/atom:name', '', _etree_entity_feed_namespaces) if author_name: properties['author'] = author_name if include_id: if use_title_as_id: title = element.findtext('./atom:title', '', _etree_entity_feed_namespaces) if title: properties['name'] = title else: element_id = element.findtext('./atom:id', '', _etree_entity_feed_namespaces) if element_id: properties['name'] = _get_readable_id(element_id, id_prefix_to_skip) return properties
azure-servicebus/azure/servicebus/control_client/_common_serialization.py
Azure/azure-sdk-for-python
DpsCertificateOperations.delete
def delete( self, resource_group_name, if_match, provisioning_service_name, certificate_name, certificatename=None, certificateraw_bytes=None, certificateis_verified=None, certificatepurpose=None, certificatecreated=None, certificatelast_updated=None, certificatehas_private_key=None, certificatenonce=None, custom_headers=None, raw=False, **operation_config): url = self.delete.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'provisioningServiceName': self._serialize.url("provisioning_service_name", provisioning_service_name, 'str'), 'certificateName': self._serialize.url("certificate_name", certificate_name, 'str') } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} if certificatename is not None: query_parameters['certificate.name'] = self._serialize.query("certificatename", certificatename, 'str') if certificateraw_bytes is not None: query_parameters['certificate.rawBytes'] = self._serialize.query("certificateraw_bytes", certificateraw_bytes, 'bytearray') if certificateis_verified is not None: query_parameters['certificate.isVerified'] = self._serialize.query("certificateis_verified", certificateis_verified, 'bool') if certificatepurpose is not None: query_parameters['certificate.purpose'] = self._serialize.query("certificatepurpose", certificatepurpose, 'str') if certificatecreated is not None: query_parameters['certificate.created'] = self._serialize.query("certificatecreated", certificatecreated, 'iso-8601') if certificatelast_updated is not None: query_parameters['certificate.lastUpdated'] = self._serialize.query("certificatelast_updated", certificatelast_updated, 'iso-8601') if certificatehas_private_key is not None: query_parameters['certificate.hasPrivateKey'] = self._serialize.query("certificatehas_private_key", certificatehas_private_key, 'bool') if certificatenonce is not None: query_parameters['certificate.nonce'] = self._serialize.query("certificatenonce", certificatenonce, 'str') query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') request = self._client.delete(url, query_parameters) response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 204]: raise models.ErrorDetailsException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response
Delete the Provisioning Service Certificate. Deletes the specified certificate assosciated with the Provisioning Service.
def delete( self, resource_group_name, if_match, provisioning_service_name, certificate_name, certificatename=None, certificateraw_bytes=None, certificateis_verified=None, certificatepurpose=None, certificatecreated=None, certificatelast_updated=None, certificatehas_private_key=None, certificatenonce=None, custom_headers=None, raw=False, **operation_config): """Delete the Provisioning Service Certificate. Deletes the specified certificate assosciated with the Provisioning Service. :param resource_group_name: Resource group identifier. :type resource_group_name: str :param if_match: ETag of the certificate :type if_match: str :param provisioning_service_name: The name of the provisioning service. :type provisioning_service_name: str :param certificate_name: This is a mandatory field, and is the logical name of the certificate that the provisioning service will access by. :type certificate_name: str :param certificatename: This is optional, and it is the Common Name of the certificate. :type certificatename: str :param certificateraw_bytes: Raw data within the certificate. :type certificateraw_bytes: bytearray :param certificateis_verified: Indicates if certificate has been verified by owner of the private key. :type certificateis_verified: bool :param certificatepurpose: A description that mentions the purpose of the certificate. Possible values include: 'clientAuthentication', 'serverAuthentication' :type certificatepurpose: str or ~azure.mgmt.iothubprovisioningservices.models.CertificatePurpose :param certificatecreated: Time the certificate is created. :type certificatecreated: datetime :param certificatelast_updated: Time the certificate is last updated. :type certificatelast_updated: datetime :param certificatehas_private_key: Indicates if the certificate contains a private key. :type certificatehas_private_key: bool :param certificatenonce: Random number generated to indicate Proof of Possession. :type certificatenonce: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: None or ClientRawResponse if raw=true :rtype: None or ~msrest.pipeline.ClientRawResponse :raises: :class:`ErrorDetailsException<azure.mgmt.iothubprovisioningservices.models.ErrorDetailsException>` """ # Construct URL url = self.delete.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'provisioningServiceName': self._serialize.url("provisioning_service_name", provisioning_service_name, 'str'), 'certificateName': self._serialize.url("certificate_name", certificate_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} if certificatename is not None: query_parameters['certificate.name'] = self._serialize.query("certificatename", certificatename, 'str') if certificateraw_bytes is not None: query_parameters['certificate.rawBytes'] = self._serialize.query("certificateraw_bytes", certificateraw_bytes, 'bytearray') if certificateis_verified is not None: query_parameters['certificate.isVerified'] = self._serialize.query("certificateis_verified", certificateis_verified, 'bool') if certificatepurpose is not None: query_parameters['certificate.purpose'] = self._serialize.query("certificatepurpose", certificatepurpose, 'str') if certificatecreated is not None: query_parameters['certificate.created'] = self._serialize.query("certificatecreated", certificatecreated, 'iso-8601') if certificatelast_updated is not None: query_parameters['certificate.lastUpdated'] = self._serialize.query("certificatelast_updated", certificatelast_updated, 'iso-8601') if certificatehas_private_key is not None: query_parameters['certificate.hasPrivateKey'] = self._serialize.query("certificatehas_private_key", certificatehas_private_key, 'bool') if certificatenonce is not None: query_parameters['certificate.nonce'] = self._serialize.query("certificatenonce", certificatenonce, 'str') query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.delete(url, query_parameters) response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 204]: raise models.ErrorDetailsException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response
azure-mgmt-iothubprovisioningservices/azure/mgmt/iothubprovisioningservices/operations/dps_certificate_operations.py
Azure/azure-sdk-for-python
ServiceBusClient.get_queue
def get_queue(self, queue_name): try: queue = self.mgmt_client.get_queue(queue_name) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) except AzureServiceBusResourceNotFound: raise ServiceBusResourceNotFound("Specificed queue does not exist.") return QueueClient.from_entity( self._get_host(), queue, shared_access_key_name=self.shared_access_key_name, shared_access_key_value=self.shared_access_key_value, mgmt_client=self.mgmt_client, debug=self.debug)
Get a client for a queue entity.
def get_queue(self, queue_name): """Get a client for a queue entity. :param queue_name: The name of the queue. :type queue_name: str :rtype: ~azure.servicebus.servicebus_client.QueueClient :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namespace is not found. :raises: ~azure.servicebus.common.errors.ServiceBusResourceNotFound if the queue is not found. Example: .. literalinclude:: ../examples/test_examples.py :start-after: [START get_queue_client] :end-before: [END get_queue_client] :language: python :dedent: 8 :caption: Get the specific queue client from Service Bus client """ try: queue = self.mgmt_client.get_queue(queue_name) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) except AzureServiceBusResourceNotFound: raise ServiceBusResourceNotFound("Specificed queue does not exist.") return QueueClient.from_entity( self._get_host(), queue, shared_access_key_name=self.shared_access_key_name, shared_access_key_value=self.shared_access_key_value, mgmt_client=self.mgmt_client, debug=self.debug)
azure-servicebus/azure/servicebus/servicebus_client.py
Azure/azure-sdk-for-python
ServiceBusClient.list_queues
def list_queues(self): try: queues = self.mgmt_client.list_queues() except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) queue_clients = [] for queue in queues: queue_clients.append(QueueClient.from_entity( self._get_host(), queue, shared_access_key_name=self.shared_access_key_name, shared_access_key_value=self.shared_access_key_value, mgmt_client=self.mgmt_client, debug=self.debug)) return queue_clients
Get clients for all queue entities in the namespace.
def list_queues(self): """Get clients for all queue entities in the namespace. :rtype: list[~azure.servicebus.servicebus_client.QueueClient] :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namespace is not found. Example: .. literalinclude:: ../examples/test_examples.py :start-after: [START list_queues] :end-before: [END list_queues] :language: python :dedent: 4 :caption: List the queues from Service Bus client """ try: queues = self.mgmt_client.list_queues() except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) queue_clients = [] for queue in queues: queue_clients.append(QueueClient.from_entity( self._get_host(), queue, shared_access_key_name=self.shared_access_key_name, shared_access_key_value=self.shared_access_key_value, mgmt_client=self.mgmt_client, debug=self.debug)) return queue_clients
azure-servicebus/azure/servicebus/servicebus_client.py
Azure/azure-sdk-for-python
ServiceBusClient.get_topic
def get_topic(self, topic_name): try: topic = self.mgmt_client.get_topic(topic_name) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) except AzureServiceBusResourceNotFound: raise ServiceBusResourceNotFound("Specificed topic does not exist.") return TopicClient.from_entity( self._get_host(), topic, shared_access_key_name=self.shared_access_key_name, shared_access_key_value=self.shared_access_key_value, debug=self.debug)
Get a client for a topic entity.
def get_topic(self, topic_name): """Get a client for a topic entity. :param topic_name: The name of the topic. :type topic_name: str :rtype: ~azure.servicebus.servicebus_client.TopicClient :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namespace is not found. :raises: ~azure.servicebus.common.errors.ServiceBusResourceNotFound if the topic is not found. Example: .. literalinclude:: ../examples/test_examples.py :start-after: [START get_topic_client] :end-before: [END get_topic_client] :language: python :dedent: 8 :caption: Get the specific topic client from Service Bus client """ try: topic = self.mgmt_client.get_topic(topic_name) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) except AzureServiceBusResourceNotFound: raise ServiceBusResourceNotFound("Specificed topic does not exist.") return TopicClient.from_entity( self._get_host(), topic, shared_access_key_name=self.shared_access_key_name, shared_access_key_value=self.shared_access_key_value, debug=self.debug)
azure-servicebus/azure/servicebus/servicebus_client.py
Azure/azure-sdk-for-python
ServiceBusClient.list_topics
def list_topics(self): try: topics = self.mgmt_client.list_topics() except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) topic_clients = [] for topic in topics: topic_clients.append(TopicClient.from_entity( self._get_host(), topic, shared_access_key_name=self.shared_access_key_name, shared_access_key_value=self.shared_access_key_value, debug=self.debug)) return topic_clients
Get a client for all topic entities in the namespace.
def list_topics(self): """Get a client for all topic entities in the namespace. :rtype: list[~azure.servicebus.servicebus_client.TopicClient] :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namespace is not found. Example: .. literalinclude:: ../examples/test_examples.py :start-after: [START list_topics] :end-before: [END list_topics] :language: python :dedent: 4 :caption: List the topics from Service Bus client """ try: topics = self.mgmt_client.list_topics() except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) topic_clients = [] for topic in topics: topic_clients.append(TopicClient.from_entity( self._get_host(), topic, shared_access_key_name=self.shared_access_key_name, shared_access_key_value=self.shared_access_key_value, debug=self.debug)) return topic_clients
azure-servicebus/azure/servicebus/servicebus_client.py
Azure/azure-sdk-for-python
ReceiveClientMixin.settle_deferred_messages
def settle_deferred_messages(self, settlement, messages, **kwargs): if (self.entity and self.requires_session) or kwargs.get('session'): raise ValueError("Sessionful deferred messages can only be settled within a locked receive session.") if settlement.lower() not in ['completed', 'suspended', 'abandoned']: raise ValueError("Settlement must be one of: 'completed', 'suspended', 'abandoned'") if not messages: raise ValueError("At least one message must be specified.") message = { 'disposition-status': settlement.lower(), 'lock-tokens': types.AMQPArray([m.lock_token for m in messages])} with BaseHandler(self.entity_uri, self.auth_config, debug=self.debug, **kwargs) as handler: return handler._mgmt_request_response( REQUEST_RESPONSE_UPDATE_DISPOSTION_OPERATION, message, mgmt_handlers.default)
Settle messages that have been previously deferred.
def settle_deferred_messages(self, settlement, messages, **kwargs): """Settle messages that have been previously deferred. :param settlement: How the messages are to be settled. This must be a string of one of the following values: 'completed', 'suspended', 'abandoned'. :type settlement: str :param messages: A list of deferred messages to be settled. :type messages: list[~azure.servicebus.common.message.DeferredMessage] Example: .. literalinclude:: ../examples/test_examples.py :start-after: [START settle_deferred_messages_service_bus] :end-before: [END settle_deferred_messages_service_bus] :language: python :dedent: 8 :caption: Settle deferred messages. """ if (self.entity and self.requires_session) or kwargs.get('session'): raise ValueError("Sessionful deferred messages can only be settled within a locked receive session.") if settlement.lower() not in ['completed', 'suspended', 'abandoned']: raise ValueError("Settlement must be one of: 'completed', 'suspended', 'abandoned'") if not messages: raise ValueError("At least one message must be specified.") message = { 'disposition-status': settlement.lower(), 'lock-tokens': types.AMQPArray([m.lock_token for m in messages])} with BaseHandler(self.entity_uri, self.auth_config, debug=self.debug, **kwargs) as handler: return handler._mgmt_request_response( # pylint: disable=protected-access REQUEST_RESPONSE_UPDATE_DISPOSTION_OPERATION, message, mgmt_handlers.default)
azure-servicebus/azure/servicebus/servicebus_client.py
Azure/azure-sdk-for-python
WebsiteManagementService.get_site
def get_site(self, webspace_name, website_name): return self._perform_get(self._get_sites_details_path(webspace_name, website_name), Site)
List the web sites defined on this webspace.
def get_site(self, webspace_name, website_name): ''' List the web sites defined on this webspace. webspace_name: The name of the webspace. website_name: The name of the website. ''' return self._perform_get(self._get_sites_details_path(webspace_name, website_name), Site)
azure-servicemanagement-legacy/azure/servicemanagement/websitemanagementservice.py
Azure/azure-sdk-for-python
WebsiteManagementService.create_site
def create_site(self, webspace_name, website_name, geo_region, host_names, plan='VirtualDedicatedPlan', compute_mode='Shared', server_farm=None, site_mode=None): xml = _XmlSerializer.create_website_to_xml(webspace_name, website_name, geo_region, plan, host_names, compute_mode, server_farm, site_mode) return self._perform_post( self._get_sites_path(webspace_name), xml, Site)
Create a website.
def create_site(self, webspace_name, website_name, geo_region, host_names, plan='VirtualDedicatedPlan', compute_mode='Shared', server_farm=None, site_mode=None): ''' Create a website. webspace_name: The name of the webspace. website_name: The name of the website. geo_region: The geographical region of the webspace that will be created. host_names: An array of fully qualified domain names for website. Only one hostname can be specified in the azurewebsites.net domain. The hostname should match the name of the website. Custom domains can only be specified for Shared or Standard websites. plan: This value must be 'VirtualDedicatedPlan'. compute_mode: This value should be 'Shared' for the Free or Paid Shared offerings, or 'Dedicated' for the Standard offering. The default value is 'Shared'. If you set it to 'Dedicated', you must specify a value for the server_farm parameter. server_farm: The name of the Server Farm associated with this website. This is a required value for Standard mode. site_mode: Can be None, 'Limited' or 'Basic'. This value is 'Limited' for the Free offering, and 'Basic' for the Paid Shared offering. Standard mode does not use the site_mode parameter; it uses the compute_mode parameter. ''' xml = _XmlSerializer.create_website_to_xml(webspace_name, website_name, geo_region, plan, host_names, compute_mode, server_farm, site_mode) return self._perform_post( self._get_sites_path(webspace_name), xml, Site)
azure-servicemanagement-legacy/azure/servicemanagement/websitemanagementservice.py
Azure/azure-sdk-for-python
WebsiteManagementService.delete_site
def delete_site(self, webspace_name, website_name, delete_empty_server_farm=False, delete_metrics=False): path = self._get_sites_details_path(webspace_name, website_name) query = '' if delete_empty_server_farm: query += '&deleteEmptyServerFarm=true' if delete_metrics: query += '&deleteMetrics=true' if query: path = path + '?' + query.lstrip('&') return self._perform_delete(path)
Delete a website.
def delete_site(self, webspace_name, website_name, delete_empty_server_farm=False, delete_metrics=False): ''' Delete a website. webspace_name: The name of the webspace. website_name: The name of the website. delete_empty_server_farm: If the site being deleted is the last web site in a server farm, you can delete the server farm by setting this to True. delete_metrics: To also delete the metrics for the site that you are deleting, you can set this to True. ''' path = self._get_sites_details_path(webspace_name, website_name) query = '' if delete_empty_server_farm: query += '&deleteEmptyServerFarm=true' if delete_metrics: query += '&deleteMetrics=true' if query: path = path + '?' + query.lstrip('&') return self._perform_delete(path)
azure-servicemanagement-legacy/azure/servicemanagement/websitemanagementservice.py
Azure/azure-sdk-for-python
WebsiteManagementService.update_site
def update_site(self, webspace_name, website_name, state=None): xml = _XmlSerializer.update_website_to_xml(state) return self._perform_put( self._get_sites_details_path(webspace_name, website_name), xml, as_async=True)
Update a web site.
def update_site(self, webspace_name, website_name, state=None): ''' Update a web site. webspace_name: The name of the webspace. website_name: The name of the website. state: The wanted state ('Running' or 'Stopped' accepted) ''' xml = _XmlSerializer.update_website_to_xml(state) return self._perform_put( self._get_sites_details_path(webspace_name, website_name), xml, as_async=True)
azure-servicemanagement-legacy/azure/servicemanagement/websitemanagementservice.py