repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/operations/_virtual_network_gateways_operations.py | 1 | 103438 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkGatewaysOperations(object):
"""VirtualNetworkGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetworkGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGateway"]
"""Creates or updates a virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to create or update virtual network gateway operation.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGateway"
"""Gets the specified virtual network gateway by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGateway"]
"""Updates a virtual network gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to update virtual network gateway tags.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkGatewayListResult"]
"""Gets all virtual network gateways by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways'} # type: ignore
def list_connections(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkGatewayListConnectionsResult"]
"""Gets all the connections in a virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListConnectionsResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGatewayListConnectionsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListConnectionsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_connections.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListConnectionsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_connections.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/connections'} # type: ignore
def _reset_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
gateway_vip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.VirtualNetworkGateway"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._reset_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if gateway_vip is not None:
query_parameters['gatewayVip'] = self._serialize.query("gateway_vip", gateway_vip, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
def begin_reset(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
gateway_vip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGateway"]
"""Resets the primary of the virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param gateway_vip: Virtual network gateway vip address supplied to the begin reset of the
active-active feature enabled gateway.
:type gateway_vip: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._reset_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
gateway_vip=gateway_vip,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
def _reset_vpn_client_shared_key_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
# Construct URL
url = self._reset_vpn_client_shared_key_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_reset_vpn_client_shared_key_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/resetvpnclientsharedkey'} # type: ignore
def begin_reset_vpn_client_shared_key(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Resets the VPN client shared key of the virtual network gateway in the specified resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._reset_vpn_client_shared_key_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset_vpn_client_shared_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/resetvpnclientsharedkey'} # type: ignore
def _generatevpnclientpackage_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._generatevpnclientpackage_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generatevpnclientpackage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
def begin_generatevpnclientpackage(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Generates VPN client package for P2S client of the virtual network gateway in the specified
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._generatevpnclientpackage_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generatevpnclientpackage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
def _generate_vpn_profile_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._generate_vpn_profile_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generate_vpn_profile_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnprofile'} # type: ignore
def begin_generate_vpn_profile(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Generates VPN profile for P2S client of the virtual network gateway in the specified resource
group. Used for IKEV2 and radius based authentication.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._generate_vpn_profile_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generate_vpn_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnprofile'} # type: ignore
def _get_vpn_profile_package_url_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._get_vpn_profile_package_url_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vpn_profile_package_url_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnprofilepackageurl'} # type: ignore
def begin_get_vpn_profile_package_url(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Gets pre-generated VPN profile for P2S client of the virtual network gateway in the specified
resource group. The profile needs to be generated first using generateVpnProfile.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_vpn_profile_package_url_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vpn_profile_package_url.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnprofilepackageurl'} # type: ignore
def _get_bgp_peer_status_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.BgpPeerStatusListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BgpPeerStatusListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._get_bgp_peer_status_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if peer is not None:
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_bgp_peer_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
def begin_get_bgp_peer_status(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.BgpPeerStatusListResult"]
"""The GetBgpPeerStatus operation retrieves the status of all BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer to retrieve the status of.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either BgpPeerStatusListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.BgpPeerStatusListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpPeerStatusListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_bgp_peer_status_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_bgp_peer_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
def supported_vpn_devices(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> str
"""Gets a xml format representation for supported vpn devices.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.supported_vpn_devices.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
supported_vpn_devices.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/supportedvpndevices'} # type: ignore
def _get_learned_routes_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.GatewayRouteListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._get_learned_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_learned_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
def begin_get_learned_routes(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.GatewayRouteListResult"]
"""This operation retrieves a list of routes the virtual network gateway has learned, including
routes learned from BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_learned_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_learned_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
def _get_advertised_routes_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.GatewayRouteListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._get_advertised_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_advertised_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
def begin_get_advertised_routes(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.GatewayRouteListResult"]
"""This operation retrieves a list of routes the virtual network gateway is advertising to the
specified peer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_advertised_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_advertised_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
def _set_vpnclient_ipsec_parameters_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
vpnclient_ipsec_params, # type: "_models.VpnClientIPsecParameters"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.VpnClientIPsecParameters"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VpnClientIPsecParameters"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._set_vpnclient_ipsec_parameters_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpnclient_ipsec_params, 'VpnClientIPsecParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_set_vpnclient_ipsec_parameters_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/setvpnclientipsecparameters'} # type: ignore
def begin_set_vpnclient_ipsec_parameters(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
vpnclient_ipsec_params, # type: "_models.VpnClientIPsecParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VpnClientIPsecParameters"]
"""The Set VpnclientIpsecParameters operation sets the vpnclient ipsec policy for P2S client of
virtual network gateway in the specified resource group through Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param vpnclient_ipsec_params: Parameters supplied to the Begin Set vpnclient ipsec parameters
of Virtual Network Gateway P2S client operation through Network resource provider.
:type vpnclient_ipsec_params: ~azure.mgmt.network.v2018_06_01.models.VpnClientIPsecParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VpnClientIPsecParameters or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.VpnClientIPsecParameters]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnClientIPsecParameters"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._set_vpnclient_ipsec_parameters_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
vpnclient_ipsec_params=vpnclient_ipsec_params,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_set_vpnclient_ipsec_parameters.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/setvpnclientipsecparameters'} # type: ignore
def _get_vpnclient_ipsec_parameters_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VpnClientIPsecParameters"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnClientIPsecParameters"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self._get_vpnclient_ipsec_parameters_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vpnclient_ipsec_parameters_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnclientipsecparameters'} # type: ignore
def begin_get_vpnclient_ipsec_parameters(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VpnClientIPsecParameters"]
"""The Get VpnclientIpsecParameters operation retrieves information about the vpnclient ipsec
policy for P2S client of virtual network gateway in the specified resource group through
Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The virtual network gateway name.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VpnClientIPsecParameters or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_06_01.models.VpnClientIPsecParameters]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnClientIPsecParameters"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_vpnclient_ipsec_parameters_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnClientIPsecParameters', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vpnclient_ipsec_parameters.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnclientipsecparameters'} # type: ignore
def vpn_device_configuration_script(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "_models.VpnDeviceScriptParameters"
**kwargs # type: Any
):
# type: (...) -> str
"""Gets a xml format representation for vpn device configuration script.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection for which the configuration script is generated.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the generate vpn device script operation.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.VpnDeviceScriptParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.vpn_device_configuration_script.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnDeviceScriptParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
vpn_device_configuration_script.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/vpndeviceconfigurationscript'} # type: ignore
| mit |
mseroczynski/platformio | platformio/ide/projectgenerator.py | 4 | 4287 | # Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import json
import os
import re
from os.path import abspath, basename, expanduser, isdir, join, relpath
import bottle
from platformio import util
class ProjectGenerator(object):
def __init__(self, project_dir, ide, board=None):
self.project_dir = project_dir
self.ide = ide
self.board = board
self._tplvars = {}
self._gather_tplvars()
@staticmethod
def get_supported_ides():
tpls_dir = join(util.get_source_dir(), "ide", "tpls")
return sorted([d for d in os.listdir(tpls_dir)
if isdir(join(tpls_dir, d))])
@util.memoized
def get_project_env(self):
data = {"env_name": "PlatformIO"}
with util.cd(self.project_dir):
config = util.get_project_config()
for section in config.sections():
if not section.startswith("env:"):
continue
data = {"env_name": section[4:]}
for k, v in config.items(section):
data[k] = v
if self.board and self.board == data.get("board"):
break
return data
@util.memoized
def get_project_build_data(self):
data = {
"defines": [],
"includes": [],
"cxx_path": None
}
envdata = self.get_project_env()
if "env_name" not in envdata:
return data
result = util.exec_command(
["platformio", "-f", "run", "-t", "idedata",
"-e", envdata['env_name'], "-d", self.project_dir]
)
if result['returncode'] != 0 or '"includes":' not in result['out']:
return data
output = result['out']
try:
start_index = output.index('\n{"')
stop_index = output.rindex('}')
data = json.loads(output[start_index + 1:stop_index + 1])
except ValueError:
pass
return data
def get_project_name(self):
return basename(self.project_dir)
def get_srcfiles(self):
result = []
with util.cd(self.project_dir):
for root, _, files in os.walk(util.get_projectsrc_dir()):
for f in files:
result.append(relpath(join(root, f)))
return result
def get_tpls(self):
tpls = []
tpls_dir = join(util.get_source_dir(), "ide", "tpls", self.ide)
for root, _, files in os.walk(tpls_dir):
for f in files:
if not f.endswith(".tpl"):
continue
_relpath = root.replace(tpls_dir, "")
if _relpath.startswith(os.sep):
_relpath = _relpath[1:]
tpls.append((_relpath, join(root, f)))
return tpls
def generate(self):
for _relpath, _path in self.get_tpls():
tpl_dir = self.project_dir
if _relpath:
tpl_dir = join(self.project_dir, _relpath)
if not isdir(tpl_dir):
os.makedirs(tpl_dir)
file_name = basename(_path)[:-4]
with open(join(tpl_dir, file_name), "w") as f:
f.write(self._render_tpl(_path).encode("utf8"))
def _render_tpl(self, tpl_path):
content = ""
with open(tpl_path) as f:
content = f.read()
return bottle.template(content, **self._tplvars)
def _gather_tplvars(self):
self._tplvars.update(self.get_project_env())
self._tplvars.update(self.get_project_build_data())
self._tplvars.update({
"project_name": self.get_project_name(),
"srcfiles": self.get_srcfiles(),
"user_home_dir": abspath(expanduser("~")),
"project_dir": self.project_dir,
"systype": util.get_systype(),
"platformio_path": self._fix_os_path(
util.where_is_program("platformio")),
"env_pathsep": os.pathsep,
"env_path": self._fix_os_path(os.getenv("PATH"))
})
@staticmethod
def _fix_os_path(path):
return (re.sub(r"[\\]+", '\\' * 4, path) if "windows" in
util.get_systype() else path)
| mit |
chrisdroid/nexmon | utilities/wireshark/tools/dftestlib/util.py | 31 | 1618 | # Copyright (c) 2013 by Gilbert Ramirez <gram@alumni.rice.edu>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import subprocess, sys
SUCCESS = 0
def exec_cmdv(cmdv, cwd=None, stdin=None):
"""Run the commands in cmdv, returning (retval, output),
where output is stdout and stderr combined.
If cwd is given, the child process runs in that directory.
If a filehandle is passed as stdin, it is used as stdin.
If there is an OS-level error, None is the retval."""
try:
output = subprocess.check_output(cmdv, stderr=subprocess.STDOUT,
cwd=cwd, stdin=stdin)
retval = SUCCESS
# If file isn't executable
except OSError as e:
return (None, str(e))
# If process returns non-zero
except subprocess.CalledProcessError as e:
output = e.output
retval = e.returncode
if sys.version_info[0] >= 3:
output = output.decode('utf-8')
return (retval, output)
| gpl-3.0 |
ArnossArnossi/django | django/contrib/staticfiles/management/commands/runserver.py | 216 | 1360 | from django.conf import settings
from django.contrib.staticfiles.handlers import StaticFilesHandler
from django.core.management.commands.runserver import \
Command as RunserverCommand
class Command(RunserverCommand):
help = "Starts a lightweight Web server for development and also serves static files."
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument('--nostatic', action="store_false", dest='use_static_handler', default=True,
help='Tells Django to NOT automatically serve static files at STATIC_URL.')
parser.add_argument('--insecure', action="store_true", dest='insecure_serving', default=False,
help='Allows serving static files even if DEBUG is False.')
def get_handler(self, *args, **options):
"""
Returns the static files serving handler wrapping the default handler,
if static files should be served. Otherwise just returns the default
handler.
"""
handler = super(Command, self).get_handler(*args, **options)
use_static_handler = options.get('use_static_handler', True)
insecure_serving = options.get('insecure_serving', False)
if use_static_handler and (settings.DEBUG or insecure_serving):
return StaticFilesHandler(handler)
return handler
| bsd-3-clause |
Sumith1896/sympy | sympy/series/tests/test_demidovich.py | 116 | 4679 | from sympy import limit, Symbol, oo, sqrt, Rational, log, exp, cos, sin, tan, \
pi, asin, together, root
# Numbers listed with the tests refer to problem numbers in the book
# "Anti-demidovich, problemas resueltos, Ed. URSS"
x = Symbol("x")
def test_leadterm():
assert (3 + 2*x**(log(3)/log(2) - 1)).leadterm(x) == (3, 0)
def root3(x):
return root(x, 3)
def root4(x):
return root(x, 4)
def test_Limits_simple_0():
assert limit((2**(x + 1) + 3**(x + 1))/(2**x + 3**x), x, oo) == 3 # 175
def test_Limits_simple_1():
assert limit((x + 1)*(x + 2)*(x + 3)/x**3, x, oo) == 1 # 172
assert limit(sqrt(x + 1) - sqrt(x), x, oo) == 0 # 179
assert limit((2*x - 3)*(3*x + 5)*(4*x - 6)/(3*x**3 + x - 1), x, oo) == 8 # Primjer 1
assert limit(x/root3(x**3 + 10), x, oo) == 1 # Primjer 2
assert limit((x + 1)**2/(x**2 + 1), x, oo) == 1 # 181
def test_Limits_simple_2():
assert limit(1000*x/(x**2 - 1), x, oo) == 0 # 182
assert limit((x**2 - 5*x + 1)/(3*x + 7), x, oo) == oo # 183
assert limit((2*x**2 - x + 3)/(x**3 - 8*x + 5), x, oo) == 0 # 184
assert limit((2*x**2 - 3*x - 4)/sqrt(x**4 + 1), x, oo) == 2 # 186
assert limit((2*x + 3)/(x + root3(x)), x, oo) == 2 # 187
assert limit(x**2/(10 + x*sqrt(x)), x, oo) == oo # 188
assert limit(root3(x**2 + 1)/(x + 1), x, oo) == 0 # 189
assert limit(sqrt(x)/sqrt(x + sqrt(x + sqrt(x))), x, oo) == 1 # 190
def test_Limits_simple_3a():
a = Symbol('a')
#issue 3513
assert together(limit((x**2 - (a + 1)*x + a)/(x**3 - a**3), x, a)) == \
(a - 1)/(3*a**2) # 196
def test_Limits_simple_3b():
h = Symbol("h")
assert limit(((x + h)**3 - x**3)/h, h, 0) == 3*x**2 # 197
assert limit((1/(1 - x) - 3/(1 - x**3)), x, 1) == -1 # 198
assert limit((sqrt(1 + x) - 1)/(root3(1 + x) - 1), x, 0) == Rational(3)/2 # Primer 4
assert limit((sqrt(x) - 1)/(x - 1), x, 1) == Rational(1)/2 # 199
assert limit((sqrt(x) - 8)/(root3(x) - 4), x, 64) == 3 # 200
assert limit((root3(x) - 1)/(root4(x) - 1), x, 1) == Rational(4)/3 # 201
assert limit(
(root3(x**2) - 2*root3(x) + 1)/(x - 1)**2, x, 1) == Rational(1)/9 # 202
def test_Limits_simple_4a():
a = Symbol('a')
assert limit((sqrt(x) - sqrt(a))/(x - a), x, a) == 1/(2*sqrt(a)) # Primer 5
assert limit((sqrt(x) - 1)/(root3(x) - 1), x, 1) == Rational(3)/2 # 205
assert limit((sqrt(1 + x) - sqrt(1 - x))/x, x, 0) == 1 # 207
assert limit(sqrt(x**2 - 5*x + 6) - x, x, oo) == -Rational(5)/2 # 213
def test_limits_simple_4aa():
assert limit(x*(sqrt(x**2 + 1) - x), x, oo) == Rational(1)/2 # 214
def test_Limits_simple_4b():
#issue 3511
assert limit(x - root3(x**3 - 1), x, oo) == 0 # 215
def test_Limits_simple_4c():
assert limit(log(1 + exp(x))/x, x, -oo) == 0 # 267a
assert limit(log(1 + exp(x))/x, x, oo) == 1 # 267b
def test_bounded():
assert limit(sin(x)/x, x, oo) == 0 # 216b
assert limit(x*sin(1/x), x, 0) == 0 # 227a
def test_f1a():
h = Symbol("h")
#issue 3508:
assert limit((sin(2*x)/x)**(1 + x), x, 0) == 2 # Primer 7
def test_f1a2():
#issue 3509:
assert limit(((x - 1)/(x + 1))**x, x, oo) == exp(-2) # Primer 9
def test_f1b():
m = Symbol("m")
n = Symbol("n")
h = Symbol("h")
a = Symbol("a")
assert limit(sin(x)/x, x, 2) == sin(2)/2 # 216a
assert limit(sin(3*x)/x, x, 0) == 3 # 217
assert limit(sin(5*x)/sin(2*x), x, 0) == Rational(5)/2 # 218
assert limit(sin(pi*x)/sin(3*pi*x), x, 0) == Rational(1)/3 # 219
assert limit(x*sin(pi/x), x, oo) == pi # 220
assert limit((1 - cos(x))/x**2, x, 0) == Rational(1, 2) # 221
assert limit(x*sin(1/x), x, oo) == 1 # 227b
assert limit((cos(m*x) - cos(n*x))/x**2, x, 0) == ((n**2 - m**2)/2) # 232
assert limit((tan(x) - sin(x))/x**3, x, 0) == Rational(1, 2) # 233
assert limit((x - sin(2*x))/(x + sin(3*x)), x, 0) == -Rational(1, 4) # 237
assert limit((1 - sqrt(cos(x)))/x**2, x, 0) == Rational(1, 4) # 239
assert limit((sqrt(1 + sin(x)) - sqrt(1 - sin(x)))/x, x, 0) == 1 # 240
assert limit((1 + h/x)**x, x, oo) == exp(h) # Primer 9
assert limit((sin(x) - sin(a))/(x - a), x, a) == cos(a) # 222, *176
assert limit((cos(x) - cos(a))/(x - a), x, a) == -sin(a) # 223
assert limit((sin(x + h) - sin(x))/h, h, 0) == cos(x) # 225
def test_f2a():
assert limit(((x + 1)/(2*x + 1))**(x**2), x, oo) == 0 # Primer 8
def test_f2():
assert limit((sqrt(
cos(x)) - root3(cos(x)))/(sin(x)**2), x, 0) == -Rational(1, 12) # *184
def test_f3():
a = Symbol('a')
#issue 3504
assert limit(asin(a*x)/x, x, 0) == a
| bsd-3-clause |
cragusa/cocoma | bin/Logger.py | 1 | 5704 | #!/usr/bin/env python
#Copyright 2012-2013 SAP Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is part of the COCOMA framework
#
# COCOMA is a framework for COntrolled COntentious and MAlicious patterns
#
import psutil,time,Library,logging,EMQproducer
from datetime import datetime as dt
from logging import handlers
from EMQproducer import Producer
global producer
producer = Producer()
global myName
myName = "Logger"
emulationEndLogger = None
def singleLogger(elementName,level=None,filename=None):
#file writing handler
producer=Producer()
HOMEPATH= Library.getHomepath()
global emulationEndLogger
emulationEndLogger=Library.loggerSet("Logger")
def logLevelGet():
LOG_LEVEL=logging.INFO
LogLevel=Library.readLogLevel("coreloglevel")
if LogLevel=="info":
LOG_LEVEL=logging.INFO
if LogLevel=="debug":
LOG_LEVEL=logging.DEBUG
else:
LOG_LEVEL=logging.INFO
return LOG_LEVEL
if level==None:
level=logLevelGet()
fileLogger=logging.getLogger(elementName)
fileLogger.setLevel(level)
#we do not add additional handlers if they are there
if not len(fileLogger.handlers):
#adding producer handler
#bHandler= EMQproducer.BroadcastLogHandler(elementName,producer)
#fileLogger.addHandler(bHandler)
#EMQproducer.StreamAndBroadcastHandler("TEST",producer)
if filename == None:
#setting log rotation for 10 files each up to 10000000 bytes (10MB)
fileHandler = handlers.RotatingFileHandler(HOMEPATH+"/logs/COCOMAlogfile.csv",'a', 10000000, 10)
fileLoggerFormatter=logging.Formatter ('%(asctime)s;%(name)s;%(levelname)s;%(message)s',datefmt='%m/%d/%Y %H:%M:%S')
fileHandler.setFormatter(fileLoggerFormatter)
fileLogger.addHandler(fileHandler)
#cli writing handler
cliLoggerFormatter=logging.Formatter ('%(asctime)s - [%(name)s] - %(levelname)s : %(message)s',datefmt='%m/%d/%Y %H:%M:%S')
cliHandler = logging.StreamHandler()
cliHandler.setFormatter(cliLoggerFormatter)
fileLogger.addHandler(cliHandler)
else:
fileHandler= logging.FileHandler(HOMEPATH+"/logs/"+str(filename))
fileLoggerFormatter=logging.Formatter ('%(asctime)s;%(name)s;%(levelname)s;%(message)s',datefmt='%m/%d/%Y %H:%M:%S')
fileHandler.setFormatter(fileLoggerFormatter)
fileLogger.addHandler(fileHandler)
return fileLogger
#Logger job that collects system stats during emulation , run by scheduler
def emulationEnd(emulationName):
"""
IN: job that executes at the end of emulation
DOING: just producing logger notification
OUT: nothing
"""
try:
print "Emulation Time expired, removing extra jobs and stopping running processes"
global emulationEndLogger
msg = {"Action":"Emulation finished","EmulationName":str(emulationName)}
producer.sendmsg(myName,msg)
emulationEndLogger.info(msg)
#emulationEndLogger.info("Emulation '"+str(emulationName)+"' finished.")
Library.removeExtraJobs(emulationName)
Library.killRemainingProcesses()
Library.deleteFiles("/tmp/stressapptestFile", "*") # Remove any stressappTest files left behind from I/O loading
return True
except:
return False
def loadMon(duration,interval,emulationID,emulationName,emuStartTime):
HOMEPATH= Library.getHomepath()
emulationName=str(emulationName)
interval=int(interval)
'''
starting cpu monitoring in the loop
'''
iterationsNo=int(duration)/int(interval)
try:
f = open(HOMEPATH+"/logs/"+str(emulationID)+"-"+str(emulationName)+"-res"+"_"+str(emuStartTime)+".csv", 'a')
f.write(emulationName+";\nCountdown;Time;CPU(%);MEM(%);IOread(bytes);IOwrite(bytes);NET(bytes_sent)\n")
#start time
initTime=time.time()
while iterationsNo !=0:
CPU=str(psutil.cpu_percent(interval, False))
#MEM=str(psutil.virtual_memory().percent)
MEM=str(psutil.avail_virtmem())
IOr=str(psutil.disk_io_counters().read_time)
IOw=str(psutil.disk_io_counters().write_time)
NET=str(psutil.network_io_counters(False).bytes_sent)
#print (emulationName+";\nTime;CPU(%);MEM(%);IOread(bytes);IOwrite(bytes);NET(bytes_sent)\n"+str(time.time())+";"+CPU+";"+MEM+";"+IOr+";"+IOw+";"+NET)
probeTime=time.time()-initTime
timeStamp=dt.now()
f.write(str(int(probeTime))+";"+str(timeStamp.strftime("%Y-%m-%d %H:%M:%S.%f"))+";"+CPU+";"+MEM+";"+IOr+";"+IOw+";"+NET+"\n")
iterationsNo=iterationsNo-1
except Exception,e:
print "Unable to create log file\nError: ",e
f.closed
if __name__ == '__main__':
duration = 20
interval = 1
emulationName = "Emulation-1"
loadMon(duration,interval,emulationName)
pass
| apache-2.0 |
gabelula/b-counted | .google_appengine/lib/django/django/utils/tzinfo.py | 34 | 1455 | "Implementation of tzinfo classes for use with datetime.datetime."
import time
from datetime import timedelta, tzinfo
class FixedOffset(tzinfo):
"Fixed offset in minutes east from UTC."
def __init__(self, offset):
self.__offset = timedelta(minutes=offset)
self.__name = "%+03d%02d" % (offset // 60, offset % 60)
def __repr__(self):
return self.__name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return timedelta(0)
class LocalTimezone(tzinfo):
"Proxy timezone information from time module."
def __init__(self, dt):
tzinfo.__init__(self, dt)
self._tzname = time.tzname[self._isdst(dt)]
def __repr__(self):
return self._tzname
def utcoffset(self, dt):
if self._isdst(dt):
return timedelta(seconds=-time.altzone)
else:
return timedelta(seconds=-time.timezone)
def dst(self, dt):
if self._isdst(dt):
return timedelta(seconds=-time.altzone) - timedelta(seconds=-time.timezone)
else:
return timedelta(0)
def tzname(self, dt):
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1)
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
| apache-2.0 |
MattDevo/edk2 | AppPkg/Applications/Python/Python-2.7.2/Lib/_abcoll.py | 56 | 15273 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
DON'T USE THIS MODULE DIRECTLY! The classes here should be imported
via collections; they are defined here only to alleviate certain
bootstrapping issues. Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
import sys
__all__ = ["Hashable", "Iterable", "Iterator",
"Sized", "Container", "Callable",
"Set", "MutableSet",
"Mapping", "MutableMapping",
"MappingView", "KeysView", "ItemsView", "ValuesView",
"Sequence", "MutableSequence",
]
### ONE-TRICK PONIES ###
def _hasattr(C, attr):
try:
return any(attr in B.__dict__ for B in C.__mro__)
except AttributeError:
# Old-style class
return hasattr(C, attr)
class Hashable:
__metaclass__ = ABCMeta
@abstractmethod
def __hash__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Hashable:
try:
for B in C.__mro__:
if "__hash__" in B.__dict__:
if B.__dict__["__hash__"]:
return True
break
except AttributeError:
# Old-style class
if getattr(C, "__hash__", None):
return True
return NotImplemented
class Iterable:
__metaclass__ = ABCMeta
@abstractmethod
def __iter__(self):
while False:
yield None
@classmethod
def __subclasshook__(cls, C):
if cls is Iterable:
if _hasattr(C, "__iter__"):
return True
return NotImplemented
Iterable.register(str)
class Iterator(Iterable):
@abstractmethod
def next(self):
raise StopIteration
def __iter__(self):
return self
@classmethod
def __subclasshook__(cls, C):
if cls is Iterator:
if _hasattr(C, "next") and _hasattr(C, "__iter__"):
return True
return NotImplemented
class Sized:
__metaclass__ = ABCMeta
@abstractmethod
def __len__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Sized:
if _hasattr(C, "__len__"):
return True
return NotImplemented
class Container:
__metaclass__ = ABCMeta
@abstractmethod
def __contains__(self, x):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Container:
if _hasattr(C, "__contains__"):
return True
return NotImplemented
class Callable:
__metaclass__ = ABCMeta
@abstractmethod
def __call__(self, *args, **kwds):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Callable:
if _hasattr(C, "__call__"):
return True
return NotImplemented
### SETS ###
class Set(Sized, Iterable, Container):
"""A set is a finite, iterable container.
This class provides concrete generic implementations of all
methods except for __contains__, __iter__ and __len__.
To override the comparisons (presumably for speed, as the
semantics are fixed), all you have to do is redefine __le__ and
then the other operations will automatically follow suit.
"""
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for elem in self:
if elem not in other:
return False
return True
def __lt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) < len(other) and self.__le__(other)
def __gt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other < self
def __ge__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other <= self
def __eq__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) == len(other) and self.__le__(other)
def __ne__(self, other):
return not (self == other)
@classmethod
def _from_iterable(cls, it):
'''Construct an instance of the class from any iterable input.
Must override this method if the class constructor signature
does not accept an iterable for an input.
'''
return cls(it)
def __and__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
return self._from_iterable(value for value in other if value in self)
def isdisjoint(self, other):
for value in other:
if value in self:
return False
return True
def __or__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
chain = (e for s in (self, other) for e in s)
return self._from_iterable(chain)
def __sub__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return self._from_iterable(value for value in self
if value not in other)
def __xor__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return (self - other) | (other - self)
# Sets are not hashable by default, but subclasses can change this
__hash__ = None
def _hash(self):
"""Compute the hash value of a set.
Note that we don't define __hash__: not all sets are hashable.
But if you define a hashable set type, its __hash__ should
call this function.
This must be compatible __eq__.
All sets ought to compare equal if they contain the same
elements, regardless of how they are implemented, and
regardless of the order of the elements; so there's not much
freedom for __eq__ or __hash__. We match the algorithm used
by the built-in frozenset type.
"""
MAX = sys.maxint
MASK = 2 * MAX + 1
n = len(self)
h = 1927868237 * (n + 1)
h &= MASK
for x in self:
hx = hash(x)
h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
h &= MASK
h = h * 69069 + 907133923
h &= MASK
if h > MAX:
h -= MASK + 1
if h == -1:
h = 590923713
return h
Set.register(frozenset)
class MutableSet(Set):
@abstractmethod
def add(self, value):
"""Add an element."""
raise NotImplementedError
@abstractmethod
def discard(self, value):
"""Remove an element. Do not raise an exception if absent."""
raise NotImplementedError
def remove(self, value):
"""Remove an element. If not a member, raise a KeyError."""
if value not in self:
raise KeyError(value)
self.discard(value)
def pop(self):
"""Return the popped value. Raise KeyError if empty."""
it = iter(self)
try:
value = next(it)
except StopIteration:
raise KeyError
self.discard(value)
return value
def clear(self):
"""This is slow (creates N new iterators!) but effective."""
try:
while True:
self.pop()
except KeyError:
pass
def __ior__(self, it):
for value in it:
self.add(value)
return self
def __iand__(self, it):
for value in (self - it):
self.discard(value)
return self
def __ixor__(self, it):
if it is self:
self.clear()
else:
if not isinstance(it, Set):
it = self._from_iterable(it)
for value in it:
if value in self:
self.discard(value)
else:
self.add(value)
return self
def __isub__(self, it):
if it is self:
self.clear()
else:
for value in it:
self.discard(value)
return self
MutableSet.register(set)
### MAPPINGS ###
class Mapping(Sized, Iterable, Container):
@abstractmethod
def __getitem__(self, key):
raise KeyError
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
def iterkeys(self):
return iter(self)
def itervalues(self):
for key in self:
yield self[key]
def iteritems(self):
for key in self:
yield (key, self[key])
def keys(self):
return list(self)
def items(self):
return [(key, self[key]) for key in self]
def values(self):
return [self[key] for key in self]
# Mappings are not hashable by default, but subclasses can change this
__hash__ = None
def __eq__(self, other):
if not isinstance(other, Mapping):
return NotImplemented
return dict(self.items()) == dict(other.items())
def __ne__(self, other):
return not (self == other)
class MappingView(Sized):
def __init__(self, mapping):
self._mapping = mapping
def __len__(self):
return len(self._mapping)
def __repr__(self):
return '{0.__class__.__name__}({0._mapping!r})'.format(self)
class KeysView(MappingView, Set):
@classmethod
def _from_iterable(self, it):
return set(it)
def __contains__(self, key):
return key in self._mapping
def __iter__(self):
for key in self._mapping:
yield key
class ItemsView(MappingView, Set):
@classmethod
def _from_iterable(self, it):
return set(it)
def __contains__(self, item):
key, value = item
try:
v = self._mapping[key]
except KeyError:
return False
else:
return v == value
def __iter__(self):
for key in self._mapping:
yield (key, self._mapping[key])
class ValuesView(MappingView):
def __contains__(self, value):
for key in self._mapping:
if value == self._mapping[key]:
return True
return False
def __iter__(self):
for key in self._mapping:
yield self._mapping[key]
class MutableMapping(Mapping):
@abstractmethod
def __setitem__(self, key, value):
raise KeyError
@abstractmethod
def __delitem__(self, key):
raise KeyError
__marker = object()
def pop(self, key, default=__marker):
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def popitem(self):
try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return key, value
def clear(self):
try:
while True:
self.popitem()
except KeyError:
pass
def update(*args, **kwds):
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
MutableMapping.register(dict)
### SEQUENCES ###
class Sequence(Sized, Iterable, Container):
"""All the operations on a read-only sequence.
Concrete subclasses must override __new__ or __init__,
__getitem__, and __len__.
"""
@abstractmethod
def __getitem__(self, index):
raise IndexError
def __iter__(self):
i = 0
try:
while True:
v = self[i]
yield v
i += 1
except IndexError:
return
def __contains__(self, value):
for v in self:
if v == value:
return True
return False
def __reversed__(self):
for i in reversed(range(len(self))):
yield self[i]
def index(self, value):
for i, v in enumerate(self):
if v == value:
return i
raise ValueError
def count(self, value):
return sum(1 for v in self if v == value)
Sequence.register(tuple)
Sequence.register(basestring)
Sequence.register(buffer)
Sequence.register(xrange)
class MutableSequence(Sequence):
@abstractmethod
def __setitem__(self, index, value):
raise IndexError
@abstractmethod
def __delitem__(self, index):
raise IndexError
@abstractmethod
def insert(self, index, value):
raise IndexError
def append(self, value):
self.insert(len(self), value)
def reverse(self):
n = len(self)
for i in range(n//2):
self[i], self[n-i-1] = self[n-i-1], self[i]
def extend(self, values):
for v in values:
self.append(v)
def pop(self, index=-1):
v = self[index]
del self[index]
return v
def remove(self, value):
del self[self.index(value)]
def __iadd__(self, values):
self.extend(values)
return self
MutableSequence.register(list)
| bsd-2-clause |
ifduyue/sentry | src/social_auth/south_migrations/0002_auto__add_unique_nonce_timestamp_salt_server_url__add_unique_associati.py | 5 | 7052 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from django.conf import settings
from social_auth.utils import custom_user_frozen_models
USER_MODEL = settings.AUTH_USER_MODEL
UID_LENGTH = getattr(settings, 'SOCIAL_AUTH_UID_LENGTH', 255)
NONCE_SERVER_URL_LENGTH = getattr(settings, 'SOCIAL_AUTH_NONCE_SERVER_URL_LENGTH', 255)
ASSOCIATION_SERVER_URL_LENGTH = getattr(settings, 'SOCIAL_AUTH_ASSOCIATION_SERVER_URL_LENGTH', 255)
ASSOCIATION_HANDLE_LENGTH = getattr(settings, 'SOCIAL_AUTH_ASSOCIATION_HANDLE_LENGTH', 255)
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding index on 'Nonce', fields ['timestamp']
db.create_index('social_auth_nonce', ['timestamp'])
# Adding unique constraint on 'Nonce', fields ['timestamp', 'salt', 'server_url']
db.create_unique('social_auth_nonce', ['timestamp', 'salt', 'server_url'])
# Adding index on 'Association', fields ['issued']
db.create_index('social_auth_association', ['issued'])
# Adding unique constraint on 'Association', fields ['handle', 'server_url']
db.create_unique('social_auth_association', ['handle', 'server_url'])
def backwards(self, orm):
# Removing unique constraint on 'Association', fields ['handle', 'server_url']
db.delete_unique('social_auth_association', ['handle', 'server_url'])
# Removing index on 'Association', fields ['issued']
db.delete_index('social_auth_association', ['issued'])
# Removing unique constraint on 'Nonce', fields ['timestamp', 'salt', 'server_url']
db.delete_unique('social_auth_nonce', ['timestamp', 'salt', 'server_url'])
# Removing index on 'Nonce', fields ['timestamp']
db.delete_index('social_auth_nonce', ['timestamp'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'social_auth.association': {
'Meta': {'unique_together': "(('server_url', 'handle'),)", 'object_name': 'Association'},
'assoc_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'handle': ('django.db.models.fields.CharField', [], {'max_length': str(ASSOCIATION_HANDLE_LENGTH)}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issued': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'lifetime': ('django.db.models.fields.IntegerField', [], {}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'server_url': ('django.db.models.fields.CharField', [], {'max_length': str(ASSOCIATION_SERVER_URL_LENGTH)})
},
'social_auth.nonce': {
'Meta': {'unique_together': "(('server_url', 'timestamp', 'salt'),)", 'object_name': 'Nonce'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'salt': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'server_url': ('django.db.models.fields.CharField', [], {'max_length': str(NONCE_SERVER_URL_LENGTH)}),
'timestamp': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'})
},
'social_auth.usersocialauth': {
'Meta': {'unique_together': "(('provider', 'uid'),)", 'object_name': 'UserSocialAuth'},
'extra_data': ('social_auth.fields.JSONField', [], {'default': "'{}'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': str(UID_LENGTH)}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'social_auth'", 'to': "orm['" + USER_MODEL + "']"})
}
}
models.update(custom_user_frozen_models(USER_MODEL))
complete_apps = ['social_auth']
| bsd-3-clause |
DEVSENSE/PTVS | Python/Tests/TestData/VirtualEnv/env/Lib/encodings/iso2022_kr.py | 816 | 1053 | #
# iso2022_kr.py: Python Unicode Codec for ISO2022_KR
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_kr')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_kr',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| apache-2.0 |
isc-projects/forge | tests/dhcpv4/ddns/test_ddns_no_tsig_request.py | 1 | 41287 | """DDNS without TSIG"""
# pylint: disable=invalid-name,line-too-long
import pytest
import misc
import srv_control
import srv_msg
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_add_success_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_add_fail_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.exae.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.exae.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_notenabled_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', False)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
# Response option 81 MUST contain flags 10. #later make it 's' 'n' and 'o' should be 10
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_update
def test_ddns4_notsig_forw_and_rev_update_success_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_setup()
srv_control.start_srv('DHCP', 'stopped')
srv_control.clear_some_data('leases')
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.11-192.168.50.11')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.11')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.11')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('11.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '11.50.168.192.in-addr.arpa.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_two_dhci_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.11')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.')
# Client 2 add
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.11')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client2.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client2.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.11')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client2.four.example.com.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_dhci_conflicts_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.11')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('11.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'client1.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
# Client 2 add
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.11')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client2.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client2.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.11')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client2.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('11.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'client2.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '11.50.168.192.in-addr.arpa.')
# Client 2 try to update client's 1 domain
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.11')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.11')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.')
# address and domain name should not be changed!
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'client1.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('11.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_add_success_withoutflag_override_client():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('override-client-update', True)
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 3)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.reverse_add
def test_ddns4_notsig_rev_success_withoutflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 0)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.reverse_add
def test_ddns4_notsig_rev_withoutflag_notenabled():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', False)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
# Response option 81 MUST contain flags 0. #later make it 's' 'n' and 'o' should be 10
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.reverse_add
def test_ddns4_notsig_rev_Nflag_override_no_update():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('override-no-update', True)
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_flags', 'N')
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 3)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com.', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
| isc |
HiroIshikawa/21playground | flask-rethink/env/lib/python3.5/site-packages/flask/testsuite/config.py | 556 | 11820 | # -*- coding: utf-8 -*-
"""
flask.testsuite.config
~~~~~~~~~~~~~~~~~~~~~~
Configuration and instances.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import flask
import pkgutil
import unittest
from contextlib import contextmanager
from flask.testsuite import FlaskTestCase
# config keys used for the ConfigTestCase
TEST_KEY = 'foo'
SECRET_KEY = 'devkey'
class ConfigTestCase(FlaskTestCase):
def common_object_test(self, app):
self.assert_equal(app.secret_key, 'devkey')
self.assert_equal(app.config['TEST_KEY'], 'foo')
self.assert_not_in('ConfigTestCase', app.config)
def test_config_from_file(self):
app = flask.Flask(__name__)
app.config.from_pyfile(__file__.rsplit('.', 1)[0] + '.py')
self.common_object_test(app)
def test_config_from_object(self):
app = flask.Flask(__name__)
app.config.from_object(__name__)
self.common_object_test(app)
def test_config_from_class(self):
class Base(object):
TEST_KEY = 'foo'
class Test(Base):
SECRET_KEY = 'devkey'
app = flask.Flask(__name__)
app.config.from_object(Test)
self.common_object_test(app)
def test_config_from_envvar(self):
env = os.environ
try:
os.environ = {}
app = flask.Flask(__name__)
try:
app.config.from_envvar('FOO_SETTINGS')
except RuntimeError as e:
self.assert_true("'FOO_SETTINGS' is not set" in str(e))
else:
self.assert_true(0, 'expected exception')
self.assert_false(app.config.from_envvar('FOO_SETTINGS', silent=True))
os.environ = {'FOO_SETTINGS': __file__.rsplit('.', 1)[0] + '.py'}
self.assert_true(app.config.from_envvar('FOO_SETTINGS'))
self.common_object_test(app)
finally:
os.environ = env
def test_config_from_envvar_missing(self):
env = os.environ
try:
os.environ = {'FOO_SETTINGS': 'missing.cfg'}
try:
app = flask.Flask(__name__)
app.config.from_envvar('FOO_SETTINGS')
except IOError as e:
msg = str(e)
self.assert_true(msg.startswith('[Errno 2] Unable to load configuration '
'file (No such file or directory):'))
self.assert_true(msg.endswith("missing.cfg'"))
else:
self.fail('expected IOError')
self.assertFalse(app.config.from_envvar('FOO_SETTINGS', silent=True))
finally:
os.environ = env
def test_config_missing(self):
app = flask.Flask(__name__)
try:
app.config.from_pyfile('missing.cfg')
except IOError as e:
msg = str(e)
self.assert_true(msg.startswith('[Errno 2] Unable to load configuration '
'file (No such file or directory):'))
self.assert_true(msg.endswith("missing.cfg'"))
else:
self.assert_true(0, 'expected config')
self.assert_false(app.config.from_pyfile('missing.cfg', silent=True))
def test_session_lifetime(self):
app = flask.Flask(__name__)
app.config['PERMANENT_SESSION_LIFETIME'] = 42
self.assert_equal(app.permanent_session_lifetime.seconds, 42)
class LimitedLoaderMockWrapper(object):
def __init__(self, loader):
self.loader = loader
def __getattr__(self, name):
if name in ('archive', 'get_filename'):
msg = 'Mocking a loader which does not have `%s.`' % name
raise AttributeError(msg)
return getattr(self.loader, name)
@contextmanager
def patch_pkgutil_get_loader(wrapper_class=LimitedLoaderMockWrapper):
"""Patch pkgutil.get_loader to give loader without get_filename or archive.
This provides for tests where a system has custom loaders, e.g. Google App
Engine's HardenedModulesHook, which have neither the `get_filename` method
nor the `archive` attribute.
"""
old_get_loader = pkgutil.get_loader
def get_loader(*args, **kwargs):
return wrapper_class(old_get_loader(*args, **kwargs))
try:
pkgutil.get_loader = get_loader
yield
finally:
pkgutil.get_loader = old_get_loader
class InstanceTestCase(FlaskTestCase):
def test_explicit_instance_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
try:
flask.Flask(__name__, instance_path='instance')
except ValueError as e:
self.assert_in('must be absolute', str(e))
else:
self.fail('Expected value error')
app = flask.Flask(__name__, instance_path=here)
self.assert_equal(app.instance_path, here)
def test_main_module_paths(self):
# Test an app with '__main__' as the import name, uses cwd.
from main_app import app
here = os.path.abspath(os.getcwd())
self.assert_equal(app.instance_path, os.path.join(here, 'instance'))
if 'main_app' in sys.modules:
del sys.modules['main_app']
def test_uninstalled_module_paths(self):
from config_module_app import app
here = os.path.abspath(os.path.dirname(__file__))
self.assert_equal(app.instance_path, os.path.join(here, 'test_apps', 'instance'))
def test_uninstalled_package_paths(self):
from config_package_app import app
here = os.path.abspath(os.path.dirname(__file__))
self.assert_equal(app.instance_path, os.path.join(here, 'test_apps', 'instance'))
def test_installed_module_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
try:
import site_app
self.assert_equal(site_app.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_app-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_app' in sys.modules:
del sys.modules['site_app']
def test_installed_module_paths_with_limited_loader(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
with patch_pkgutil_get_loader():
try:
import site_app
self.assert_equal(site_app.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_app-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_app' in sys.modules:
del sys.modules['site_app']
def test_installed_package_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
installed_path = os.path.join(expected_prefix, 'path')
sys.path.append(installed_path)
try:
import installed_package
self.assert_equal(installed_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'installed_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(installed_path)
if 'installed_package' in sys.modules:
del sys.modules['installed_package']
def test_installed_package_paths_with_limited_loader(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
installed_path = os.path.join(expected_prefix, 'path')
sys.path.append(installed_path)
with patch_pkgutil_get_loader():
try:
import installed_package
self.assert_equal(installed_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'installed_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(installed_path)
if 'installed_package' in sys.modules:
del sys.modules['installed_package']
def test_prefix_package_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
try:
import site_package
self.assert_equal(site_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_package' in sys.modules:
del sys.modules['site_package']
def test_prefix_package_paths_with_limited_loader(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
with patch_pkgutil_get_loader():
try:
import site_package
self.assert_equal(site_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_package' in sys.modules:
del sys.modules['site_package']
def test_egg_installed_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
egg_path = os.path.join(site_packages, 'SiteEgg.egg')
sys.path.append(site_packages)
sys.path.append(egg_path)
try:
import site_egg # in SiteEgg.egg
self.assert_equal(site_egg.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_egg-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
sys.path.remove(egg_path)
if 'site_egg' in sys.modules:
del sys.modules['site_egg']
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ConfigTestCase))
suite.addTest(unittest.makeSuite(InstanceTestCase))
return suite
| mit |
willthames/ansible | lib/ansible/plugins/action/service.py | 31 | 3427 | # (c) 2015, Ansible Inc,
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = False
UNUSED_PARAMS = {
'systemd': ['pattern', 'runlevel', 'sleep', 'arguments', 'args'],
}
def run(self, tmp=None, task_vars=None):
''' handler for package operations '''
self._supports_check_mode = True
self._supports_async = True
result = super(ActionModule, self).run(tmp, task_vars)
module = self._task.args.get('use', 'auto').lower()
if module == 'auto':
try:
if self._task.delegate_to: # if we delegate, we should use delegated host's facts
module = self._templar.template("{{hostvars['%s']['ansible_facts']['ansible_service_mgr']}}" % self._task.delegate_to)
else:
module = self._templar.template('{{ansible_facts["ansible_service_mgr"]}}')
except:
pass # could not get it from template!
if module == 'auto':
facts = self._execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_service_mgr'), task_vars=task_vars)
self._display.debug("Facts %s" % facts)
module = facts.get('ansible_facts', {}).get('ansible_service_mgr', 'auto')
if not module or module == 'auto' or module not in self._shared_loader_obj.module_loader:
module = 'service'
if module != 'auto':
# run the 'service' module
new_module_args = self._task.args.copy()
if 'use' in new_module_args:
del new_module_args['use']
# for backwards compatibility
if 'state' in new_module_args and new_module_args['state'] == 'running':
self._display.deprecated(msg="state=running is deprecated. Please use state=started", version="2.7")
new_module_args['state'] = 'started'
if module in self.UNUSED_PARAMS:
for unused in self.UNUSED_PARAMS[module]:
if unused in new_module_args:
del new_module_args[unused]
self._display.warning('Ignoring "%s" as it is not used in "%s"' % (unused, module))
self._display.vvvv("Running %s" % module)
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async))
else:
result['failed'] = True
result['msg'] = 'Could not detect which service manager to use. Try gathering facts or setting the "use" option.'
return result
| gpl-3.0 |
ASCrookes/django | django/contrib/gis/db/backends/mysql/schema.py | 448 | 3048 | import logging
from django.contrib.gis.db.models.fields import GeometryField
from django.db.backends.mysql.schema import DatabaseSchemaEditor
from django.db.utils import OperationalError
logger = logging.getLogger('django.contrib.gis')
class MySQLGISSchemaEditor(DatabaseSchemaEditor):
sql_add_spatial_index = 'CREATE SPATIAL INDEX %(index)s ON %(table)s(%(column)s)'
sql_drop_spatial_index = 'DROP INDEX %(index)s ON %(table)s'
def __init__(self, *args, **kwargs):
super(MySQLGISSchemaEditor, self).__init__(*args, **kwargs)
self.geometry_sql = []
def skip_default(self, field):
return (
super(MySQLGISSchemaEditor, self).skip_default(field) or
# Geometry fields are stored as BLOB/TEXT and can't have defaults.
isinstance(field, GeometryField)
)
def column_sql(self, model, field, include_default=False):
column_sql = super(MySQLGISSchemaEditor, self).column_sql(model, field, include_default)
# MySQL doesn't support spatial indexes on NULL columns
if isinstance(field, GeometryField) and field.spatial_index and not field.null:
qn = self.connection.ops.quote_name
db_table = model._meta.db_table
self.geometry_sql.append(
self.sql_add_spatial_index % {
'index': qn(self._create_spatial_index_name(model, field)),
'table': qn(db_table),
'column': qn(field.column),
}
)
return column_sql
def create_model(self, model):
super(MySQLGISSchemaEditor, self).create_model(model)
self.create_spatial_indexes()
def add_field(self, model, field):
super(MySQLGISSchemaEditor, self).add_field(model, field)
self.create_spatial_indexes()
def remove_field(self, model, field):
if isinstance(field, GeometryField) and field.spatial_index:
qn = self.connection.ops.quote_name
sql = self.sql_drop_spatial_index % {
'index': qn(self._create_spatial_index_name(model, field)),
'table': qn(model._meta.db_table),
}
try:
self.execute(sql)
except OperationalError:
logger.error(
"Couldn't remove spatial index: %s (may be expected "
"if your storage engine doesn't support them)." % sql
)
super(MySQLGISSchemaEditor, self).remove_field(model, field)
def _create_spatial_index_name(self, model, field):
return '%s_%s_id' % (model._meta.db_table, field.column)
def create_spatial_indexes(self):
for sql in self.geometry_sql:
try:
self.execute(sql)
except OperationalError:
logger.error(
"Cannot create SPATIAL INDEX %s. Only MyISAM and (as of "
"MySQL 5.7.5) InnoDB support them." % sql
)
self.geometry_sql = []
| bsd-3-clause |
qedsoftware/commcare-hq | corehq/apps/settings/tests/test_utils.py | 1 | 1321 | import os
from django.test import SimpleTestCase
from corehq.apps.settings.utils import get_temp_file
class GetTempFileTests(SimpleTestCase):
def test_file_closed(self):
"""
Check that an error is not raised if the file is closed by the caller
"""
try:
with get_temp_file() as (fd, name):
os.close(fd)
except Exception as err:
self.fail('Failed with exception "{}"'.format(err))
else:
file_exists = os.access(name, os.F_OK)
self.assertFalse(file_exists)
def test_file_unused(self):
"""
Check that an error is not raised if the file is unused by the caller
"""
try:
with get_temp_file() as (fd, name):
pass
except Exception as err:
self.fail('Failed with exception "{}"'.format(err))
else:
file_exists = os.access(name, os.F_OK)
self.assertFalse(file_exists)
def test_file_deleted(self):
"""
Check that an error is not raised if the file is deleted by the caller
"""
try:
with get_temp_file() as (fd, name):
os.unlink(name)
except Exception as err:
self.fail('Failed with exception "{}"'.format(err))
| bsd-3-clause |
LooseTerrifyingSpaceMonkey/DecMeg2014 | src/benchmark_pooling.py | 1 | 3692 | """DecMeg2014 example code.
Simple prediction of the class labels of the test set by:
- pooling all the triaining trials of all subjects in one dataset.
- Extracting the MEG data in the first 500ms from when the
stimulus starts.
- Using a linear classifier (logistic regression).
"""
import numpy as np
from sklearn.linear_model import LogisticRegression
from scipy.io import loadmat
def create_features(XX, tmin, tmax, sfreq, tmin_original=-0.5):
"""Creation of the feature space:
- restricting the time window of MEG data to [tmin, tmax]sec.
- Concatenating the 306 timeseries of each trial in one long
vector.
- Normalizing each feature independently (z-scoring).
"""
print "Applying the desired time window."
beginning = np.round((tmin - tmin_original) * sfreq).astype(np.int)
end = np.round((tmax - tmin_original) * sfreq).astype(np.int)
XX = XX[:, :, beginning:end].copy()
print "2D Reshaping: concatenating all 306 timeseries."
XX = XX.reshape(XX.shape[0], XX.shape[1] * XX.shape[2])
print "Features Normalization."
XX -= XX.mean(0)
XX = np.nan_to_num(XX / XX.std(0))
return XX
if __name__ == '__main__':
print "DecMeg2014: https://www.kaggle.com/c/decoding-the-human-brain"
print
subjects_train = range(1, 7) # use range(1, 17) for all subjects
print "Training on subjects", subjects_train
# We throw away all the MEG data outside the first 0.5sec from when
# the visual stimulus start:
tmin = 0.0
tmax = 0.500
print "Restricting MEG data to the interval [%s, %s]sec." % (tmin, tmax)
X_train = []
y_train = []
X_test = []
ids_test = []
print
print "Creating the trainset."
for subject in subjects_train:
filename = '../data/mat/train_subject%02d.mat' % subject
print "Loading", filename
data = loadmat(filename, squeeze_me=True)
XX = data['X']
yy = data['y']
sfreq = data['sfreq']
tmin_original = data['tmin']
print "Dataset summary:"
print "XX:", XX.shape
print "yy:", yy.shape
print "sfreq:", sfreq
XX = create_features(XX, tmin, tmax, sfreq)
X_train.append(XX)
y_train.append(yy)
X_train = np.vstack(X_train)
y_train = np.concatenate(y_train)
print "Trainset:", X_train.shape
print
print "Creating the testset."
subjects_test = range(17, 24)
for subject in subjects_test:
filename = '../data/mat/test_subject%02d.mat' % subject
print "Loading", filename
data = loadmat(filename, squeeze_me=True)
XX = data['X']
ids = data['Id']
sfreq = data['sfreq']
tmin_original = data['tmin']
print "Dataset summary:"
print "XX:", XX.shape
print "ids:", ids.shape
print "sfreq:", sfreq
XX = create_features(XX, tmin, tmax, sfreq)
X_test.append(XX)
ids_test.append(ids)
X_test = np.vstack(X_test)
ids_test = np.concatenate(ids_test)
print "Testset:", X_test.shape
print
clf = LogisticRegression(random_state=0) # Beware! You need 10Gb RAM to train LogisticRegression on all 16 subjects!
print "Classifier:"
print clf
print "Training."
clf.fit(X_train, y_train)
print "Predicting."
y_pred = clf.predict(X_test)
print
filename_submission = "../output/submissionBenchmarkPooling25s.csv"
print "Creating submission file", filename_submission
f = open(filename_submission, "w")
print >> f, "Id,Prediction"
for i in range(len(y_pred)):
print >> f, str(ids_test[i]) + "," + str(y_pred[i])
f.close()
print "Done." | gpl-2.0 |
anirudhvenkats/clowdflows | workflows/migrations/0026_auto__del_field_workflow_author.py | 6 | 15263 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Workflow.author'
db.delete_column('workflows_workflow', 'author')
def backwards(self, orm):
# Adding field 'Workflow.author'
db.add_column('workflows_workflow', 'author', self.gf('django.db.models.fields.CharField')(default='', max_length=200), keep_default=False)
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'workflows.abstractinput': {
'Meta': {'object_name': 'AbstractInput'},
'default': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multi': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'variable': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'widget': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'inputs'", 'to': "orm['workflows.AbstractWidget']"})
},
'workflows.abstractoption': {
'Meta': {'ordering': "['name']", 'object_name': 'AbstractOption'},
'abstract_input': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['workflows.AbstractInput']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'workflows.abstractoutput': {
'Meta': {'object_name': 'AbstractOutput'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'variable': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'widget': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'outputs'", 'to': "orm['workflows.AbstractWidget']"})
},
'workflows.abstractwidget': {
'Meta': {'object_name': 'AbstractWidget'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'widgets'", 'to': "orm['workflows.Category']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'has_progress_bar': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('workflows.thumbs.ThumbnailField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'interaction_view': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'interactive': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'post_interact_action': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'treeview_image': ('workflows.thumbs.ThumbnailField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'widgets'", 'null': 'True', 'to': "orm['auth.User']"}),
'visualization_view': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'wsdl': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'wsdl_method': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
},
'workflows.category': {
'Meta': {'ordering': "['name']", 'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['workflows.Category']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'categories'", 'null': 'True', 'to': "orm['auth.User']"})
},
'workflows.connection': {
'Meta': {'object_name': 'Connection'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'connections'", 'to': "orm['workflows.Input']"}),
'output': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'connections'", 'to': "orm['workflows.Output']"}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'connections'", 'to': "orm['workflows.Workflow']"})
},
'workflows.data': {
'Meta': {'object_name': 'Data'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'workflows.input': {
'Meta': {'object_name': 'Input'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inner_output': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'outer_input_rel'", 'null': 'True', 'to': "orm['workflows.Output']"}),
'multi_id': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'outer_output': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'inner_input_rel'", 'null': 'True', 'to': "orm['workflows.Output']"}),
'parameter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'value': ('picklefield.fields.PickledObjectField', [], {'null': 'True'}),
'variable': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'widget': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'inputs'", 'to': "orm['workflows.Widget']"})
},
'workflows.option': {
'Meta': {'ordering': "['name']", 'object_name': 'Option'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['workflows.Input']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'workflows.output': {
'Meta': {'object_name': 'Output'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inner_input': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'outer_output_rel'", 'null': 'True', 'to': "orm['workflows.Input']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'outer_input': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'inner_output_rel'", 'null': 'True', 'to': "orm['workflows.Input']"}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'value': ('picklefield.fields.PickledObjectField', [], {'null': 'True'}),
'variable': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'widget': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'outputs'", 'to': "orm['workflows.Widget']"})
},
'workflows.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'active_workflow': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'users'", 'null': 'True', 'to': "orm['workflows.Workflow']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'workflows.widget': {
'Meta': {'object_name': 'Widget'},
'abstract_widget': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'instances'", 'null': 'True', 'to': "orm['workflows.AbstractWidget']"}),
'error': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'finished': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interaction_waiting': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'progress': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'regular'", 'max_length': '50'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'widgets'", 'to': "orm['workflows.Workflow']"}),
'x': ('django.db.models.fields.IntegerField', [], {}),
'y': ('django.db.models.fields.IntegerField', [], {})
},
'workflows.workflow': {
'Meta': {'object_name': 'Workflow'},
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Untitled workflow'", 'max_length': '200'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'workflows'", 'to': "orm['auth.User']"}),
'widget': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'workflow_link'", 'unique': 'True', 'null': 'True', 'to': "orm['workflows.Widget']"})
}
}
complete_apps = ['workflows']
| gpl-3.0 |
mixman/djangodev | django/contrib/gis/gdal/envelope.py | 94 | 7041 | """
The GDAL/OGR library uses an Envelope structure to hold the bounding
box information for a geometry. The envelope (bounding box) contains
two pairs of coordinates, one for the lower left coordinate and one
for the upper right coordinate:
+----------o Upper right; (max_x, max_y)
| |
| |
| |
Lower left (min_x, min_y) o----------+
"""
from ctypes import Structure, c_double
from django.contrib.gis.gdal.error import OGRException
# The OGR definition of an Envelope is a C structure containing four doubles.
# See the 'ogr_core.h' source file for more information:
# http://www.gdal.org/ogr/ogr__core_8h-source.html
class OGREnvelope(Structure):
"Represents the OGREnvelope C Structure."
_fields_ = [("MinX", c_double),
("MaxX", c_double),
("MinY", c_double),
("MaxY", c_double),
]
class Envelope(object):
"""
The Envelope object is a C structure that contains the minimum and
maximum X, Y coordinates for a rectangle bounding box. The naming
of the variables is compatible with the OGR Envelope structure.
"""
def __init__(self, *args):
"""
The initialization function may take an OGREnvelope structure, 4-element
tuple or list, or 4 individual arguments.
"""
if len(args) == 1:
if isinstance(args[0], OGREnvelope):
# OGREnvelope (a ctypes Structure) was passed in.
self._envelope = args[0]
elif isinstance(args[0], (tuple, list)):
# A tuple was passed in.
if len(args[0]) != 4:
raise OGRException('Incorrect number of tuple elements (%d).' % len(args[0]))
else:
self._from_sequence(args[0])
else:
raise TypeError('Incorrect type of argument: %s' % str(type(args[0])))
elif len(args) == 4:
# Individual parameters passed in.
# Thanks to ww for the help
self._from_sequence(map(float, args))
else:
raise OGRException('Incorrect number (%d) of arguments.' % len(args))
# Checking the x,y coordinates
if self.min_x > self.max_x:
raise OGRException('Envelope minimum X > maximum X.')
if self.min_y > self.max_y:
raise OGRException('Envelope minimum Y > maximum Y.')
def __eq__(self, other):
"""
Returns True if the envelopes are equivalent; can compare against
other Envelopes and 4-tuples.
"""
if isinstance(other, Envelope):
return (self.min_x == other.min_x) and (self.min_y == other.min_y) and \
(self.max_x == other.max_x) and (self.max_y == other.max_y)
elif isinstance(other, tuple) and len(other) == 4:
return (self.min_x == other[0]) and (self.min_y == other[1]) and \
(self.max_x == other[2]) and (self.max_y == other[3])
else:
raise OGRException('Equivalence testing only works with other Envelopes.')
def __str__(self):
"Returns a string representation of the tuple."
return str(self.tuple)
def _from_sequence(self, seq):
"Initializes the C OGR Envelope structure from the given sequence."
self._envelope = OGREnvelope()
self._envelope.MinX = seq[0]
self._envelope.MinY = seq[1]
self._envelope.MaxX = seq[2]
self._envelope.MaxY = seq[3]
def expand_to_include(self, *args):
"""
Modifies the envelope to expand to include the boundaries of
the passed-in 2-tuple (a point), 4-tuple (an extent) or
envelope.
"""
# We provide a number of different signatures for this method,
# and the logic here is all about converting them into a
# 4-tuple single parameter which does the actual work of
# expanding the envelope.
if len(args) == 1:
if isinstance(args[0], Envelope):
return self.expand_to_include(args[0].tuple)
elif hasattr(args[0], 'x') and hasattr(args[0], 'y'):
return self.expand_to_include(args[0].x, args[0].y, args[0].x, args[0].y)
elif isinstance(args[0], (tuple, list)):
# A tuple was passed in.
if len(args[0]) == 2:
return self.expand_to_include((args[0][0], args[0][1], args[0][0], args[0][1]))
elif len(args[0]) == 4:
(minx, miny, maxx, maxy) = args[0]
if minx < self._envelope.MinX:
self._envelope.MinX = minx
if miny < self._envelope.MinY:
self._envelope.MinY = miny
if maxx > self._envelope.MaxX:
self._envelope.MaxX = maxx
if maxy > self._envelope.MaxY:
self._envelope.MaxY = maxy
else:
raise OGRException('Incorrect number of tuple elements (%d).' % len(args[0]))
else:
raise TypeError('Incorrect type of argument: %s' % str(type(args[0])))
elif len(args) == 2:
# An x and an y parameter were passed in
return self.expand_to_include((args[0], args[1], args[0], args[1]))
elif len(args) == 4:
# Individual parameters passed in.
return self.expand_to_include(args)
else:
raise OGRException('Incorrect number (%d) of arguments.' % len(args[0]))
@property
def min_x(self):
"Returns the value of the minimum X coordinate."
return self._envelope.MinX
@property
def min_y(self):
"Returns the value of the minimum Y coordinate."
return self._envelope.MinY
@property
def max_x(self):
"Returns the value of the maximum X coordinate."
return self._envelope.MaxX
@property
def max_y(self):
"Returns the value of the maximum Y coordinate."
return self._envelope.MaxY
@property
def ur(self):
"Returns the upper-right coordinate."
return (self.max_x, self.max_y)
@property
def ll(self):
"Returns the lower-left coordinate."
return (self.min_x, self.min_y)
@property
def tuple(self):
"Returns a tuple representing the envelope."
return (self.min_x, self.min_y, self.max_x, self.max_y)
@property
def wkt(self):
"Returns WKT representing a Polygon for this envelope."
# TODO: Fix significant figures.
return 'POLYGON((%s %s,%s %s,%s %s,%s %s,%s %s))' % \
(self.min_x, self.min_y, self.min_x, self.max_y,
self.max_x, self.max_y, self.max_x, self.min_y,
self.min_x, self.min_y)
| bsd-3-clause |
hilaskis/UAV_MissionPlanner | Lib/site-packages/numpy/polynomial/polyutils.py | 79 | 11109 | """
Utililty objects for the polynomial modules.
This module provides: error and warning objects; a polynomial base class;
and some routines used in both the `polynomial` and `chebyshev` modules.
Error objects
-------------
- `PolyError` -- base class for this sub-package's errors.
- `PolyDomainError` -- raised when domains are "mismatched."
Warning objects
---------------
- `RankWarning` -- raised by a least-squares fit when a rank-deficient
matrix is encountered.
Base class
----------
- `PolyBase` -- The base class for the `Polynomial` and `Chebyshev`
classes.
Functions
---------
- `as_series` -- turns a list of array_likes into 1-D arrays of common
type.
- `trimseq` -- removes trailing zeros.
- `trimcoef` -- removes trailing coefficients that are less than a given
magnitude (thereby removing the corresponding terms).
- `getdomain` -- returns a domain appropriate for a given set of abscissae.
- `mapdomain` -- maps points between domains.
- `mapparms` -- parameters of the linear map between domains.
"""
from __future__ import division
__all__ = ['RankWarning', 'PolyError', 'PolyDomainError', 'PolyBase',
'as_series', 'trimseq', 'trimcoef', 'getdomain', 'mapdomain',
'mapparms']
import warnings
import numpy as np
import sys
#
# Warnings and Exceptions
#
class RankWarning(UserWarning) :
"""Issued by chebfit when the design matrix is rank deficient."""
pass
class PolyError(Exception) :
"""Base class for errors in this module."""
pass
class PolyDomainError(PolyError) :
"""Issued by the generic Poly class when two domains don't match.
This is raised when an binary operation is passed Poly objects with
different domains.
"""
pass
#
# Base class for all polynomial types
#
class PolyBase(object) :
pass
#
# We need the any function for python < 2.5
#
if sys.version_info[:2] < (2,5) :
def any(iterable) :
for element in iterable:
if element :
return True
return False
#
# Helper functions to convert inputs to 1d arrays
#
def trimseq(seq) :
"""Remove small Poly series coefficients.
Parameters
----------
seq : sequence
Sequence of Poly series coefficients. This routine fails for
empty sequences.
Returns
-------
series : sequence
Subsequence with trailing zeros removed. If the resulting sequence
would be empty, return the first element. The returned sequence may
or may not be a view.
Notes
-----
Do not lose the type info if the sequence contains unknown objects.
"""
if len(seq) == 0 :
return seq
else :
for i in range(len(seq) - 1, -1, -1) :
if seq[i] != 0 :
break
return seq[:i+1]
def as_series(alist, trim=True) :
"""
Return argument as a list of 1-d arrays.
The returned list contains array(s) of dtype double, complex double, or
object. A 1-d argument of shape ``(N,)`` is parsed into ``N`` arrays of
size one; a 2-d argument of shape ``(M,N)`` is parsed into ``M`` arrays
of size ``N`` (i.e., is "parsed by row"); and a higher dimensional array
raises a Value Error if it is not first reshaped into either a 1-d or 2-d
array.
Parameters
----------
a : array_like
A 1- or 2-d array_like
trim : boolean, optional
When True, trailing zeros are removed from the inputs.
When False, the inputs are passed through intact.
Returns
-------
[a1, a2,...] : list of 1d-arrays
A copy of the input data as a list of 1-d arrays.
Raises
------
ValueError :
Raised when `as_series` cannot convert its input to 1-d arrays, or at
least one of the resulting arrays is empty.
Examples
--------
>>> from numpy import polynomial as P
>>> a = np.arange(4)
>>> P.as_series(a)
[array([ 0.]), array([ 1.]), array([ 2.]), array([ 3.])]
>>> b = np.arange(6).reshape((2,3))
>>> P.as_series(b)
[array([ 0., 1., 2.]), array([ 3., 4., 5.])]
"""
arrays = [np.array(a, ndmin=1, copy=0) for a in alist]
if min([a.size for a in arrays]) == 0 :
raise ValueError("Coefficient array is empty")
if any([a.ndim != 1 for a in arrays]) :
raise ValueError("Coefficient array is not 1-d")
if trim :
arrays = [trimseq(a) for a in arrays]
if any([a.dtype == np.dtype(object) for a in arrays]) :
ret = []
for a in arrays :
if a.dtype != np.dtype(object) :
tmp = np.empty(len(a), dtype=np.dtype(object))
tmp[:] = a[:]
ret.append(tmp)
else :
ret.append(a.copy())
else :
try :
dtype = np.common_type(*arrays)
except :
raise ValueError("Coefficient arrays have no common type")
ret = [np.array(a, copy=1, dtype=dtype) for a in arrays]
return ret
def trimcoef(c, tol=0) :
"""
Remove "small" "trailing" coefficients from a polynomial.
"Small" means "small in absolute value" and is controlled by the
parameter `tol`; "trailing" means highest order coefficient(s), e.g., in
``[0, 1, 1, 0, 0]`` (which represents ``0 + x + x**2 + 0*x**3 + 0*x**4``)
both the 3-rd and 4-th order coefficients would be "trimmed."
Parameters
----------
c : array_like
1-d array of coefficients, ordered from lowest order to highest.
tol : number, optional
Trailing (i.e., highest order) elements with absolute value less
than or equal to `tol` (default value is zero) are removed.
Returns
-------
trimmed : ndarray
1-d array with trailing zeros removed. If the resulting series
would be empty, a series containing a single zero is returned.
Raises
------
ValueError
If `tol` < 0
See Also
--------
trimseq
Examples
--------
>>> from numpy import polynomial as P
>>> P.trimcoef((0,0,3,0,5,0,0))
array([ 0., 0., 3., 0., 5.])
>>> P.trimcoef((0,0,1e-3,0,1e-5,0,0),1e-3) # item == tol is trimmed
array([ 0.])
>>> i = complex(0,1) # works for complex
>>> P.trimcoef((3e-4,1e-3*(1-i),5e-4,2e-5*(1+i)), 1e-3)
array([ 0.0003+0.j , 0.0010-0.001j])
"""
if tol < 0 :
raise ValueError("tol must be non-negative")
[c] = as_series([c])
[ind] = np.where(np.abs(c) > tol)
if len(ind) == 0 :
return c[:1]*0
else :
return c[:ind[-1] + 1].copy()
def getdomain(x) :
"""
Return a domain suitable for given abscissae.
Find a domain suitable for a polynomial or Chebyshev series
defined at the values supplied.
Parameters
----------
x : array_like
1-d array of abscissae whose domain will be determined.
Returns
-------
domain : ndarray
1-d array containing two values. If the inputs are complex, then
the two returned points are the lower left and upper right corners
of the smallest rectangle (aligned with the axes) in the complex
plane containing the points `x`. If the inputs are real, then the
two points are the ends of the smallest interval containing the
points `x`.
See Also
--------
mapparms, mapdomain
Examples
--------
>>> from numpy.polynomial import polyutils as pu
>>> points = np.arange(4)**2 - 5; points
array([-5, -4, -1, 4])
>>> pu.getdomain(points)
array([-5., 4.])
>>> c = np.exp(complex(0,1)*np.pi*np.arange(12)/6) # unit circle
>>> pu.getdomain(c)
array([-1.-1.j, 1.+1.j])
"""
[x] = as_series([x], trim=False)
if x.dtype.char in np.typecodes['Complex'] :
rmin, rmax = x.real.min(), x.real.max()
imin, imax = x.imag.min(), x.imag.max()
return np.array((complex(rmin, imin), complex(rmax, imax)))
else :
return np.array((x.min(), x.max()))
def mapparms(old, new) :
"""
Linear map parameters between domains.
Return the parameters of the linear map ``offset + scale*x`` that maps
`old` to `new` such that ``old[i] -> new[i]``, ``i = 0, 1``.
Parameters
----------
old, new : array_like
Domains. Each domain must (successfully) convert to a 1-d array
containing precisely two values.
Returns
-------
offset, scale : scalars
The map ``L(x) = offset + scale*x`` maps the first domain to the
second.
See Also
--------
getdomain, mapdomain
Notes
-----
Also works for complex numbers, and thus can be used to calculate the
parameters required to map any line in the complex plane to any other
line therein.
Examples
--------
>>> from numpy import polynomial as P
>>> P.mapparms((-1,1),(-1,1))
(0.0, 1.0)
>>> P.mapparms((1,-1),(-1,1))
(0.0, -1.0)
>>> i = complex(0,1)
>>> P.mapparms((-i,-1),(1,i))
((1+1j), (1+0j))
"""
oldlen = old[1] - old[0]
newlen = new[1] - new[0]
off = (old[1]*new[0] - old[0]*new[1])/oldlen
scl = newlen/oldlen
return off, scl
def mapdomain(x, old, new) :
"""
Apply linear map to input points.
The linear map ``offset + scale*x`` that maps the domain `old` to
the domain `new` is applied to the points `x`.
Parameters
----------
x : array_like
Points to be mapped. If `x` is a subtype of ndarray the subtype
will be preserved.
old, new : array_like
The two domains that determine the map. Each must (successfully)
convert to 1-d arrays containing precisely two values.
Returns
-------
x_out : ndarray
Array of points of the same shape as `x`, after application of the
linear map between the two domains.
See Also
--------
getdomain, mapparms
Notes
-----
Effectively, this implements:
.. math ::
x\\_out = new[0] + m(x - old[0])
where
.. math ::
m = \\frac{new[1]-new[0]}{old[1]-old[0]}
Examples
--------
>>> from numpy import polynomial as P
>>> old_domain = (-1,1)
>>> new_domain = (0,2*np.pi)
>>> x = np.linspace(-1,1,6); x
array([-1. , -0.6, -0.2, 0.2, 0.6, 1. ])
>>> x_out = P.mapdomain(x, old_domain, new_domain); x_out
array([ 0. , 1.25663706, 2.51327412, 3.76991118, 5.02654825,
6.28318531])
>>> x - P.mapdomain(x_out, new_domain, old_domain)
array([ 0., 0., 0., 0., 0., 0.])
Also works for complex numbers (and thus can be used to map any line in
the complex plane to any other line therein).
>>> i = complex(0,1)
>>> old = (-1 - i, 1 + i)
>>> new = (-1 + i, 1 - i)
>>> z = np.linspace(old[0], old[1], 6); z
array([-1.0-1.j , -0.6-0.6j, -0.2-0.2j, 0.2+0.2j, 0.6+0.6j, 1.0+1.j ])
>>> new_z = P.mapdomain(z, old, new); new_z
array([-1.0+1.j , -0.6+0.6j, -0.2+0.2j, 0.2-0.2j, 0.6-0.6j, 1.0-1.j ])
"""
x = np.asanyarray(x)
off, scl = mapparms(old, new)
return off + scl*x
| gpl-2.0 |
denisff/python-for-android | python-modules/zope/zope/interface/tests/test_interface.py | 50 | 16077 | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Test Interface implementation
"""
import doctest
import unittest
import sys
class InterfaceTests(unittest.TestCase):
def _makeDerivedInterface(self):
from zope.interface import Interface
from zope.interface import Attribute
class _I1(Interface):
a1 = Attribute("This is an attribute")
def f11():
pass
def f12():
pass
f12.optional = 1
class _I1_(_I1):
pass
class _I1__(_I1_):
pass
class _I2(_I1__):
def f21():
pass
def f22():
pass
f23 = f22
return _I2
def testInterfaceSetOnAttributes(self):
from zope.interface.tests.unitfixtures import FooInterface
self.assertEqual(FooInterface['foobar'].interface,
FooInterface)
self.assertEqual(FooInterface['aMethod'].interface,
FooInterface)
def testClassImplements(self):
from zope.interface.tests.unitfixtures import A
from zope.interface.tests.unitfixtures import B
from zope.interface.tests.unitfixtures import C
from zope.interface.tests.unitfixtures import D
from zope.interface.tests.unitfixtures import E
from zope.interface.tests.unitfixtures import I1
from zope.interface.tests.unitfixtures import I2
from zope.interface.tests.unitfixtures import IC
self.assert_(IC.implementedBy(C))
self.assert_(I1.implementedBy(A))
self.assert_(I1.implementedBy(B))
self.assert_(not I1.implementedBy(C))
self.assert_(I1.implementedBy(D))
self.assert_(I1.implementedBy(E))
self.assert_(not I2.implementedBy(A))
self.assert_(I2.implementedBy(B))
self.assert_(not I2.implementedBy(C))
# No longer after interfacegeddon
# self.assert_(not I2.implementedBy(D))
self.assert_(not I2.implementedBy(E))
def testUtil(self):
from zope.interface import implementedBy
from zope.interface import providedBy
from zope.interface.tests.unitfixtures import A
from zope.interface.tests.unitfixtures import B
from zope.interface.tests.unitfixtures import C
from zope.interface.tests.unitfixtures import I1
from zope.interface.tests.unitfixtures import I2
from zope.interface.tests.unitfixtures import IC
self.assert_(IC in implementedBy(C))
self.assert_(I1 in implementedBy(A))
self.assert_(not I1 in implementedBy(C))
self.assert_(I2 in implementedBy(B))
self.assert_(not I2 in implementedBy(C))
self.assert_(IC in providedBy(C()))
self.assert_(I1 in providedBy(A()))
self.assert_(not I1 in providedBy(C()))
self.assert_(I2 in providedBy(B()))
self.assert_(not I2 in providedBy(C()))
def testObjectImplements(self):
from zope.interface.tests.unitfixtures import A
from zope.interface.tests.unitfixtures import B
from zope.interface.tests.unitfixtures import C
from zope.interface.tests.unitfixtures import D
from zope.interface.tests.unitfixtures import E
from zope.interface.tests.unitfixtures import I1
from zope.interface.tests.unitfixtures import I2
from zope.interface.tests.unitfixtures import IC
self.assert_(IC.providedBy(C()))
self.assert_(I1.providedBy(A()))
self.assert_(I1.providedBy(B()))
self.assert_(not I1.providedBy(C()))
self.assert_(I1.providedBy(D()))
self.assert_(I1.providedBy(E()))
self.assert_(not I2.providedBy(A()))
self.assert_(I2.providedBy(B()))
self.assert_(not I2.providedBy(C()))
# Not after interface geddon
# self.assert_(not I2.providedBy(D()))
self.assert_(not I2.providedBy(E()))
def testDeferredClass(self):
from zope.interface.tests.unitfixtures import A
from zope.interface.exceptions import BrokenImplementation
a = A()
self.assertRaises(BrokenImplementation, a.ma)
def testInterfaceExtendsInterface(self):
from zope.interface.tests.unitfixtures import BazInterface
from zope.interface.tests.unitfixtures import BarInterface
from zope.interface.tests.unitfixtures import BobInterface
from zope.interface.tests.unitfixtures import FunInterface
self.assert_(BazInterface.extends(BobInterface))
self.assert_(BazInterface.extends(BarInterface))
self.assert_(BazInterface.extends(FunInterface))
self.assert_(not BobInterface.extends(FunInterface))
self.assert_(not BobInterface.extends(BarInterface))
self.assert_(BarInterface.extends(FunInterface))
self.assert_(not BarInterface.extends(BazInterface))
def testVerifyImplementation(self):
from zope.interface.verify import verifyClass
from zope.interface import Interface
from zope.interface.tests.unitfixtures import Foo
from zope.interface.tests.unitfixtures import FooInterface
from zope.interface.tests.unitfixtures import I1
self.assert_(verifyClass(FooInterface, Foo))
self.assert_(Interface.providedBy(I1))
def test_names(self):
iface = self._makeDerivedInterface()
names = list(iface.names())
names.sort()
self.assertEqual(names, ['f21', 'f22', 'f23'])
all = list(iface.names(all=True))
all.sort()
self.assertEqual(all, ['a1', 'f11', 'f12', 'f21', 'f22', 'f23'])
def test_namesAndDescriptions(self):
iface = self._makeDerivedInterface()
names = [nd[0] for nd in iface.namesAndDescriptions()]
names.sort()
self.assertEqual(names, ['f21', 'f22', 'f23'])
names = [nd[0] for nd in iface.namesAndDescriptions(1)]
names.sort()
self.assertEqual(names, ['a1', 'f11', 'f12', 'f21', 'f22', 'f23'])
for name, d in iface.namesAndDescriptions(1):
self.assertEqual(name, d.__name__)
def test_getDescriptionFor(self):
iface = self._makeDerivedInterface()
self.assertEqual(iface.getDescriptionFor('f11').__name__, 'f11')
self.assertEqual(iface.getDescriptionFor('f22').__name__, 'f22')
self.assertEqual(iface.queryDescriptionFor('f33', self), self)
self.assertRaises(KeyError, iface.getDescriptionFor, 'f33')
def test___getitem__(self):
iface = self._makeDerivedInterface()
self.assertEqual(iface['f11'].__name__, 'f11')
self.assertEqual(iface['f22'].__name__, 'f22')
self.assertEqual(iface.get('f33', self), self)
self.assertRaises(KeyError, iface.__getitem__, 'f33')
def test___contains__(self):
iface = self._makeDerivedInterface()
self.failUnless('f11' in iface)
self.failIf('f33' in iface)
def test___iter__(self):
iface = self._makeDerivedInterface()
names = list(iter(iface))
names.sort()
self.assertEqual(names, ['a1', 'f11', 'f12', 'f21', 'f22', 'f23'])
def testAttr(self):
iface = self._makeDerivedInterface()
description = iface.getDescriptionFor('a1')
self.assertEqual(description.__name__, 'a1')
self.assertEqual(description.__doc__, 'This is an attribute')
def testFunctionAttributes(self):
# Make sure function attributes become tagged values.
from zope.interface import Interface
class ITest(Interface):
def method():
pass
method.optional = 1
method = ITest['method']
self.assertEqual(method.getTaggedValue('optional'), 1)
def testInvariant(self):
from zope.interface.exceptions import Invalid
from zope.interface import directlyProvides
from zope.interface.tests.unitfixtures import BarGreaterThanFoo
from zope.interface.tests.unitfixtures import ifFooThenBar
from zope.interface.tests.unitfixtures import IInvariant
from zope.interface.tests.unitfixtures import InvariantC
from zope.interface.tests.unitfixtures import ISubInvariant
# set up
o = InvariantC()
directlyProvides(o, IInvariant)
# a helper
def errorsEqual(self, o, error_len, error_msgs, iface=None):
if iface is None:
iface = IInvariant
self.assertRaises(Invalid, iface.validateInvariants, o)
e = []
try:
iface.validateInvariants(o, e)
except Invalid, error:
self.assertEquals(error.args[0], e)
else:
self._assert(0) # validateInvariants should always raise
# Invalid
self.assertEquals(len(e), error_len)
msgs = [error.args[0] for error in e]
msgs.sort()
for msg in msgs:
self.assertEquals(msg, error_msgs.pop(0))
# the tests
self.assertEquals(IInvariant.getTaggedValue('invariants'),
[ifFooThenBar])
self.assertEquals(IInvariant.validateInvariants(o), None)
o.bar = 27
self.assertEquals(IInvariant.validateInvariants(o), None)
o.foo = 42
self.assertEquals(IInvariant.validateInvariants(o), None)
del o.bar
errorsEqual(self, o, 1, ['If Foo, then Bar!'])
# nested interfaces with invariants:
self.assertEquals(ISubInvariant.getTaggedValue('invariants'),
[BarGreaterThanFoo])
o = InvariantC()
directlyProvides(o, ISubInvariant)
o.foo = 42
# even though the interface has changed, we should still only have one
# error.
errorsEqual(self, o, 1, ['If Foo, then Bar!'], ISubInvariant)
# however, if we set foo to 0 (Boolean False) and bar to a negative
# number then we'll get the new error
o.foo = 2
o.bar = 1
errorsEqual(self, o, 1, ['Please, Boo MUST be greater than Foo!'],
ISubInvariant)
# and if we set foo to a positive number and boo to 0, we'll
# get both errors!
o.foo = 1
o.bar = 0
errorsEqual(self, o, 2, ['If Foo, then Bar!',
'Please, Boo MUST be greater than Foo!'],
ISubInvariant)
# for a happy ending, we'll make the invariants happy
o.foo = 1
o.bar = 2
self.assertEquals(IInvariant.validateInvariants(o), None) # woohoo
# now we'll do two invariants on the same interface,
# just to make sure that a small
# multi-invariant interface is at least minimally tested.
o = InvariantC()
directlyProvides(o, IInvariant)
o.foo = 42
old_invariants = IInvariant.getTaggedValue('invariants')
invariants = old_invariants[:]
invariants.append(BarGreaterThanFoo) # if you really need to mutate,
# then this would be the way to do it. Probably a bad idea, though. :-)
IInvariant.setTaggedValue('invariants', invariants)
#
# even though the interface has changed, we should still only have one
# error.
errorsEqual(self, o, 1, ['If Foo, then Bar!'])
# however, if we set foo to 0 (Boolean False) and bar to a negative
# number then we'll get the new error
o.foo = 2
o.bar = 1
errorsEqual(self, o, 1, ['Please, Boo MUST be greater than Foo!'])
# and if we set foo to a positive number and boo to 0, we'll
# get both errors!
o.foo = 1
o.bar = 0
errorsEqual(self, o, 2, ['If Foo, then Bar!',
'Please, Boo MUST be greater than Foo!'])
# for another happy ending, we'll make the invariants happy again
o.foo = 1
o.bar = 2
self.assertEquals(IInvariant.validateInvariants(o), None) # bliss
# clean up
IInvariant.setTaggedValue('invariants', old_invariants)
def test___doc___element(self):
from zope.interface import Interface
from zope.interface import Attribute
class I(Interface):
"xxx"
self.assertEqual(I.__doc__, "xxx")
self.assertEqual(list(I), [])
class I(Interface):
"xxx"
__doc__ = Attribute('the doc')
self.assertEqual(I.__doc__, "")
self.assertEqual(list(I), ['__doc__'])
def testIssue228(self):
from zope.interface import Interface
# Test for http://collector.zope.org/Zope3-dev/228
if sys.version[0] == '3':
# No old style classes in Python 3, so the test becomes moot.
return
class I(Interface):
"xxx"
class Bad:
__providedBy__ = None
# Old style classes don't have a '__class__' attribute
self.failUnlessRaises(AttributeError, I.providedBy, Bad)
if sys.version_info >= (2, 4):
def test_invariant_as_decorator():
"""Invaiants can be deined in line
>>> from zope.interface.exceptions import Invalid
>>> from zope.interface import Interface
>>> from zope.interface import Attribute
>>> from zope.interface import implements
>>> from zope.interface import invariant
>>> class IRange(Interface):
... min = Attribute("Lower bound")
... max = Attribute("Upper bound")
...
... @invariant
... def range_invariant(ob):
... if ob.max < ob.min:
... raise Invalid('max < min')
>>> class Range(object):
... implements(IRange)
...
... def __init__(self, min, max):
... self.min, self.max = min, max
>>> from zope.interface.exceptions import Invalid
>>> IRange.validateInvariants(Range(1,2))
>>> IRange.validateInvariants(Range(1,1))
>>> try:
... IRange.validateInvariants(Range(2,1))
... except Invalid, e:
... str(e)
'max < min'
"""
def test_description_cache_management():
""" See https://bugs.launchpad.net/zope.interface/+bug/185974
There was a bug where the cache used by Specification.get() was not
cleared when the bases were changed.
>>> from zope.interface import Interface
>>> from zope.interface import Attribute
>>> class I1(Interface):
... a = Attribute('a')
>>> class I2(I1):
... pass
>>> class I3(I2):
... pass
>>> I3.get('a') is I1.get('a')
True
>>> I2.__bases__ = (Interface,)
>>> I3.get('a') is None
True
"""
def test_suite():
suite = unittest.makeSuite(InterfaceTests)
suite.addTest(doctest.DocTestSuite("zope.interface.interface"))
if sys.version_info >= (2, 4):
suite.addTest(doctest.DocTestSuite())
suite.addTest(doctest.DocFileSuite(
'../README.txt',
globs={'__name__': '__main__'},
optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS,
))
suite.addTest(doctest.DocFileSuite(
'../README.ru.txt',
globs={'__name__': '__main__'},
optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS,
))
return suite
| apache-2.0 |
karolciba/playground | markov/baumwelch.py | 1 | 4027 | import numpy as np
# functions and classes go here
def fb_alg(A_mat, O_mat, observ):
# set up
k = observ.size
(n,m) = O_mat.shape
prob_mat = np.zeros( (n,k) )
fw = np.zeros( (n,k+1) )
bw = np.zeros( (n,k+1) )
# forward part
fw[:, 0] = 1.0/n
for obs_ind in xrange(k):
f_row_vec = np.matrix(fw[:,obs_ind])
fw[:, obs_ind+1] = f_row_vec * \
np.matrix(A_mat) * \
np.matrix(np.diag(O_mat[:,observ[obs_ind]]))
fw[:,obs_ind+1] = fw[:,obs_ind+1]/np.sum(fw[:,obs_ind+1])
# backward part
bw[:,-1] = 1.0
for obs_ind in xrange(k, 0, -1):
b_col_vec = np.matrix(bw[:,obs_ind]).transpose()
bw[:, obs_ind-1] = (np.matrix(A_mat) * \
np.matrix(np.diag(O_mat[:,observ[obs_ind-1]])) * \
b_col_vec).transpose()
bw[:,obs_ind-1] = bw[:,obs_ind-1]/np.sum(bw[:,obs_ind-1])
# combine it
prob_mat = np.array(fw)*np.array(bw)
prob_mat = prob_mat/np.sum(prob_mat, 0)
# get out
return prob_mat, fw, bw
def baum_welch( num_states, num_obs, observ ):
# allocate
# A_mat = np.ones( (num_states, num_states) )
A_mat = np.random.random( (num_states, num_states) )
A_mat = A_mat / np.sum(A_mat,1)[:,None]
# O_mat = np.ones( (num_states, num_obs) )
O_mat = np.random.random( (num_states, num_obs) )
O_mat = O_mat / np.sum(O_mat,1)[:,None]
theta = np.zeros( (num_states, num_states, observ.size) )
while True:
old_A = A_mat
old_O = O_mat
A_mat = np.ones( (num_states, num_states) )
O_mat = np.ones( (num_states, num_obs) )
# A_mat = np.random.random( (num_states, num_states) )
# A_mat = A_mat / np.sum(A_mat,1)[:,None]
# O_mat = np.random.random( (num_states, num_obs) )
# O_mat = O_mat / np.sum(O_mat,1)[:,None]
# expectation step, forward and backward probs
P,F,B = fb_alg( old_A, old_O, observ)
# need to get transitional probabilities at each time step too
for a_ind in xrange(num_states):
for b_ind in xrange(num_states):
for t_ind in xrange(observ.size):
theta[a_ind,b_ind,t_ind] = \
F[a_ind,t_ind] * \
B[b_ind,t_ind+1] * \
old_A[a_ind,b_ind] * \
old_O[b_ind, observ[t_ind]]
# form A_mat and O_mat
for a_ind in xrange(num_states):
for b_ind in xrange(num_states):
A_mat[a_ind, b_ind] = np.sum( theta[a_ind, b_ind, :] )/ \
np.sum(P[a_ind,:])
A_mat = A_mat / np.sum(A_mat,1)
for a_ind in xrange(num_states):
for o_ind in xrange(num_obs):
right_obs_ind = np.array(np.where(observ == o_ind))+1
O_mat[a_ind, o_ind] = np.sum(P[a_ind,right_obs_ind])/ \
np.sum( P[a_ind,1:])
O_mat = O_mat / np.sum(O_mat,1)
# compare
if np.linalg.norm(old_A-A_mat) < .00001 and np.linalg.norm(old_O-O_mat) < .00001:
break
# get out
return A_mat, O_mat
import casino
num_obs = 100
g = casino.casino()
observations1 = [ 1 if g.next()[0].name == 'H' else 0 for x in xrange(num_obs) ]
observations1 = np.array(observations1)
# observations1 = np.random.randn( num_obs )
# observations1[observations1>0] = 1
# observations1[observations1<=0] = 0
# import pdb; pdb.set_trace()
A_mat, O_mat = baum_welch(2,2,observations1)
print "observation 1"
print observations1[:30]
print "trans"
print A_mat
print "emiss"
print O_mat
# observations2 = np.random.random(num_obs)
# observations2[observations2>.15] = 1
# observations2[observations2<=.85] = 0
# A_mat, O_mat = baum_welch(2,2,observations2)
# print "observations2"
# print observations2[:30]
# print A_mat
# print O_mat
# A_mat, O_mat = baum_welch(2,2,np.hstack( (observations1, observations2) ) )
# print A_mat
# print O_mat
| unlicense |
jsgf/xen | tools/python/xen/xend/server/vfbif.py | 43 | 3171 | from xen.xend.server.DevController import DevController
from xen.xend.XendLogging import log
from xen.xend.XendError import VmError
import xen.xend
import os
CONFIG_ENTRIES = ['type', 'vncdisplay', 'vnclisten', 'vncpasswd', 'vncunused',
'display', 'xauthority', 'keymap', 'vnc', 'sdl', 'uuid',
'location', 'protocol', 'opengl']
class VfbifController(DevController):
"""Virtual frame buffer controller. Handles all vfb devices for a domain.
Note that we only support a single vfb per domain at the moment.
"""
def __init__(self, vm):
DevController.__init__(self, vm)
def getDeviceDetails(self, config):
"""@see DevController.getDeviceDetails"""
back = dict([(k, str(config[k])) for k in CONFIG_ENTRIES
if config.has_key(k)])
devid = 0
return (devid, back, {})
def getDeviceConfiguration(self, devid, transaction = None):
result = DevController.getDeviceConfiguration(self, devid, transaction)
if transaction is None:
devinfo = self.readBackend(devid, *CONFIG_ENTRIES)
else:
devinfo = self.readBackendTxn(transaction, devid, *CONFIG_ENTRIES)
return dict([(CONFIG_ENTRIES[i], devinfo[i])
for i in range(len(CONFIG_ENTRIES))
if devinfo[i] is not None])
def waitForDevice(self, devid):
# is a qemu-dm managed device, don't wait for hotplug for these.
return
def reconfigureDevice(self, _, config):
""" Only allow appending location information of vnc port into
xenstore."""
if 'location' in config:
(devid, back, front) = self.getDeviceDetails(config)
self.writeBackend(devid, 'location', config['location'])
return back.get('uuid')
raise VmError('Refusing to reconfigure device vfb:%d' % devid)
def destroyDevice(self, devid, force):
# remove the backend xenstore entries no matter what
# because we kill qemu-dm with extreme prejudice
# not giving it a chance to remove them itself
DevController.destroyDevice(self, devid, True)
def migrate(self, deviceConfig, network, dst, step, domName):
# Handled by qemu-dm so no action needed
return 0
class VkbdifController(DevController):
"""Virtual keyboard controller. Handles all vkbd devices for a domain.
"""
def getDeviceDetails(self, config):
"""@see DevController.getDeviceDetails"""
devid = 0
back = {}
front = {}
return (devid, back, front)
def waitForDevice(self, config):
# is a qemu-dm managed device, don't wait for hotplug for these.
return
def destroyDevice(self, devid, force):
# remove the backend xenstore entries no matter what
# because we kill qemu-dm with extreme prejudice
# not giving it a chance to remove them itself
DevController.destroyDevice(self, devid, True)
def migrate(self, deviceConfig, network, dst, step, domName):
# Handled by qemu-dm so no action needed
return 0
| gpl-2.0 |
LeartS/odoo | addons/google_calendar/__openerp__.py | 31 | 1633 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2012 OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Google Calendar',
'version': '1.0',
'category': 'Tools',
'description': """
The module adds the possibility to synchronize Google Calendar with OpenERP
===========================================================================
""",
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'depends': ['google_account', 'calendar'],
'qweb': ['static/src/xml/*.xml'],
'data': [
'res_config_view.xml',
'security/ir.model.access.csv',
'views/google_calendar.xml',
'views/res_users.xml',
],
'demo': [],
'installable': True,
'auto_install': False,
}
| agpl-3.0 |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/device_adapter.py | 1 | 21488 | import os
import binascii
import base64
import datetime
import logging
import queue
import uuid
from iotile.core.exceptions import IOTileException, ArgumentError, HardwareError
from iotile.core.hw.transport.adapter import DeviceAdapter
from iotile.core.hw.reports.parser import IOTileReportParser
from iotile.core.dev.registry import ComponentRegistry
from .mqtt_client import OrderedAWSIOTClient
from .topic_validator import MQTTTopicValidator
from .connection_manager import ConnectionManager
from . import messages
class AWSIOTDeviceAdapter(DeviceAdapter):
"""A device adapter allowing connections to devices over AWS IoT
Args:
port (string): A optional port string specifying a topic prefix
to use if we are trying to connect to a gateway, otherwise,
we assume that we're connecting directly to a device that
is attached to AWS IoT.
"""
def __init__(self, port):
super(AWSIOTDeviceAdapter, self).__init__()
self.set_config('default_timeout', 5.0)
reg = ComponentRegistry()
endpoint = reg.get_config('awsiot-endpoint')
rootcert = reg.get_config('awsiot-rootcert')
iamuser = reg.get_config('awsiot-iamkey')
iamsecret = reg.get_config('awsiot-iamtoken')
iamsession = reg.get_config('awsiot-session', default=None)
args = {}
args['endpoint'] = endpoint
args['root_certificate'] = rootcert
args['use_websockets'] = True
args['iam_key'] = iamuser
args['iam_secret'] = iamsecret
args['iam_session'] = iamsession
self._logger = logging.getLogger(__name__)
# Port should be a topic prefix that allows us to connect
# only to subset of IOTile devices managed by a gateway
# rather than to directly accessible iotile devices.
if port is None:
port = ""
if len(port) > 0 and port[-1] != '/':
port = port + '/'
self.client = OrderedAWSIOTClient(args)
self.name = str(uuid.uuid4())
self.client.connect(self.name)
self.prefix = port
self.conns = ConnectionManager(self.id)
self.conns.start()
self.client.subscribe(self.prefix + 'devices/+/data/advertisement', self._on_advertisement, ordered=False)
self._deferred = queue.Queue()
self.set_config('minimum_scan_time', 5.0)
self.set_config('probe_supported', True)
self.set_config('probe_required', True)
self.mtu = self.get_config('mtu', 60*1024) # Split script payloads larger than this
self.report_parser = IOTileReportParser()
def connect_async(self, connection_id, connection_string, callback):
"""Connect to a device by its connection_string
This function looks for the device on AWS IOT using the preconfigured
topic prefix and looking for:
<prefix>/devices/connection_string
It then attempts to lock that device for exclusive access and
returns a callback if successful.
Args:
connection_id (int): A unique integer set by the caller for referring to this connection
once created
connection_string (string): A device id of the form d--XXXX-YYYY-ZZZZ-WWWW
callback (callable): A callback function called when the connection has succeeded or
failed
"""
topics = MQTTTopicValidator(self.prefix + 'devices/{}'.format(connection_string))
key = self._generate_key()
name = self.name
conn_message = {'type': 'command', 'operation': 'connect', 'key': key, 'client': name}
context = {'key': key, 'slug': connection_string, 'topics': topics}
self.conns.begin_connection(connection_id, connection_string, callback, context, self.get_config('default_timeout'))
self._bind_topics(topics)
try:
self.client.publish(topics.connect, conn_message)
except IOTileException:
self._unbind_topics(topics)
self.conns.finish_connection(connection_id, False, 'Failed to send connection message')
def disconnect_async(self, conn_id, callback):
"""Asynchronously disconnect from a device that has previously been connected
Args:
conn_id (int): a unique identifier for this connection on the DeviceManager
that owns this adapter.
callback (callable): A function called as callback(conn_id, adapter_id, success, failure_reason)
when the disconnection finishes. Disconnection can only either succeed or timeout.
"""
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information")
return
self.conns.begin_disconnection(conn_id, callback, self.get_config('default_timeout'))
topics = context['topics']
disconn_message = {'key': context['key'], 'client': self.name, 'type': 'command', 'operation': 'disconnect'}
self.client.publish(topics.action, disconn_message)
def send_script_async(self, conn_id, data, progress_callback, callback):
"""Asynchronously send a a script to this IOTile device
Args:
conn_id (int): A unique identifer that will refer to this connection
data (string): the script to send to the device
progress_callback (callable): A function to be called with status on our progress, called as:
progress_callback(done_count, total_count)
callback (callable): A callback for when we have finished sending the script. The callback will be called as
callback(connection_id, adapter_id, success, failure_reason)
'connection_id': the connection id
'adapter_id': this adapter's id
'success': a bool indicating whether we received a response to our attempted RPC
'failure_reason': a string with the reason for the failure if success == False
"""
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information")
return
topics = context['topics']
context['progress_callback'] = progress_callback
self.conns.begin_operation(conn_id, 'script', callback, 60.0)
chunks = 1
if len(data) > self.mtu:
chunks = len(data) // self.mtu
if len(data) % self.mtu != 0:
chunks += 1
# Send the script out possibly in multiple chunks if it's larger than our maximum transmit unit
for i in range(0, chunks):
start = i*self.mtu
chunk = data[start:start + self.mtu]
encoded = base64.standard_b64encode(chunk)
script_message = {'key': context['key'], 'client': self.name, 'type': 'command', 'operation': 'send_script',
'script': encoded, 'fragment_count': chunks, 'fragment_index': i}
self.client.publish(topics.action, script_message)
def send_rpc_async(self, conn_id, address, rpc_id, payload, timeout, callback):
"""Asynchronously send an RPC to this IOTile device
Args:
conn_id (int): A unique identifier that will refer to this connection
address (int): the address of the tile that we wish to send the RPC to
rpc_id (int): the 16-bit id of the RPC we want to call
payload (bytearray): the payload of the command
timeout (float): the number of seconds to wait for the RPC to execute
callback (callable): A callback for when we have finished the RPC. The callback will be called as"
callback(connection_id, adapter_id, success, failure_reason, status, payload)
'connection_id': the connection id
'adapter_id': this adapter's id
'success': a bool indicating whether we received a response to our attempted RPC
'failure_reason': a string with the reason for the failure if success == False
'status': the one byte status code returned for the RPC if success == True else None
'payload': a bytearray with the payload returned by RPC if success == True else None
"""
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information", 0xFF, bytearray())
return
self.conns.begin_operation(conn_id, 'rpc', callback, timeout)
topics = context['topics']
encoded_payload = binascii.hexlify(payload)
rpc_message = {'key': context['key'], 'client': self.name, 'type': 'command', 'operation': 'rpc',
'address': address, 'rpc_id': rpc_id, 'payload': encoded_payload, 'timeout': timeout}
self.client.publish(topics.action, rpc_message)
def _open_rpc_interface(self, conn_id, callback):
"""Enable RPC interface for this IOTile device
Args:
conn_id (int): the unique identifier for the connection
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason)
"""
self._open_interface(conn_id, 'rpc', callback)
def _open_streaming_interface(self, conn_id, callback):
"""Enable streaming interface for this IOTile device
Args:
conn_id (int): the unique identifier for the connection
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason)
"""
self._open_interface(conn_id, 'streaming', callback)
def _open_tracing_interface(self, conn_id, callback):
"""Enable tracing interface for this IOTile device
Args:
conn_id (int): the unique identifier for the connection
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason)
"""
self._open_interface(conn_id, 'tracing', callback)
def _open_script_interface(self, conn_id, callback):
"""Enable script interface for this IOTile device
Args:
conn_id (int): the unique identifier for the connection
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason)
"""
self._open_interface(conn_id, 'script', callback)
def _open_interface(self, conn_id, iface, callback):
"""Open an interface on this device
Args:
conn_id (int): the unique identifier for the connection
iface (string): the interface name to open
callback (callback): Callback to be called when this command finishes
callback(conn_id, adapter_id, success, failure_reason)
"""
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information")
return
self.conns.begin_operation(conn_id, 'open_interface', callback, self.get_config('default_timeout'))
topics = context['topics']
open_iface_message = {'key': context['key'], 'type': 'command', 'operation': 'open_interface', 'client': self.name, 'interface': iface}
self.client.publish(topics.action, open_iface_message)
def stop_sync(self):
"""Synchronously stop this adapter
"""
conn_ids = self.conns.get_connections()
# If we have any open connections, try to close them here before shutting down
for conn in list(conn_ids):
try:
self.disconnect_sync(conn)
except HardwareError:
pass
self.client.disconnect()
self.conns.stop()
def probe_async(self, callback):
"""Probe for visible devices connected to this DeviceAdapter.
Args:
callback (callable): A callback for when the probe operation has completed.
callback should have signature callback(adapter_id, success, failure_reason) where:
success: bool
failure_reason: None if success is True, otherwise a reason for why we could not probe
"""
topics = MQTTTopicValidator(self.prefix)
self.client.publish(topics.probe, {'type': 'command', 'operation': 'probe', 'client': self.name})
callback(self.id, True, None)
def periodic_callback(self):
"""Periodically help maintain adapter internal state
"""
while True:
try:
action = self._deferred.get(False)
action()
except queue.Empty:
break
except Exception:
self._logger.exception('Exception in periodic callback')
def _bind_topics(self, topics):
"""Subscribe to all the topics we need to communication with this device
Args:
topics (MQTTTopicValidator): The topic validator for this device that
we are connecting to.
"""
# FIXME: Allow for these subscriptions to fail and clean up the previous ones
# so that this function is atomic
self.client.subscribe(topics.status, self._on_status_message)
self.client.subscribe(topics.tracing, self._on_trace)
self.client.subscribe(topics.streaming, self._on_report)
self.client.subscribe(topics.response, self._on_response_message)
def _unbind_topics(self, topics):
"""Unsubscribe to all of the topics we needed for communication with device
Args:
topics (MQTTTopicValidator): The topic validator for this device that
we have connected to.
"""
self.client.unsubscribe(topics.status)
self.client.unsubscribe(topics.tracing)
self.client.unsubscribe(topics.streaming)
self.client.unsubscribe(topics.response)
def _generate_key(self):
"""Generate a random 32 byte key and encode it in hex
Returns:
string: Cryptographically random 64 character string
"""
key = os.urandom(32)
return binascii.hexlify(key)
def _find_connection(self, topic):
"""Attempt to find a connection id corresponding with a topic
The device is found by assuming the topic ends in <slug>/[control|data]/channel
Args:
topic (string): The topic we received a message on
Returns:
int: The internal connect id (device slug) associated with this topic
"""
parts = topic.split('/')
if len(parts) < 3:
return None
slug = parts[-3]
return slug
def _on_advertisement(self, sequence, topic, message):
try:
# FIXME: We need a global topic validator to validate these messages
# message = self.topics.validate_message(['advertisement'], message_type, message)
del message['operation']
del message['type']
self._trigger_callback('on_scan', self.id, message, 60.) # FIXME: Get the timeout from somewhere
except IOTileException as exc:
pass
def _on_report(self, sequence, topic, message):
"""Process a report received from a device.
Args:
sequence (int): The sequence number of the packet received
topic (string): The topic this message was received on
message (dict): The message itself
"""
try:
conn_key = self._find_connection(topic)
conn_id = self.conns.get_connection_id(conn_key)
except ArgumentError:
self._logger.warn("Dropping report message that does not correspond with a known connection, topic=%s", topic)
return
try:
rep_msg = messages.ReportNotification.verify(message)
serialized_report = {}
serialized_report['report_format'] = rep_msg['report_format']
serialized_report['encoded_report'] = rep_msg['report']
serialized_report['received_time'] = datetime.datetime.strptime(rep_msg['received_time'].encode().decode(), "%Y%m%dT%H:%M:%S.%fZ")
report = self.report_parser.deserialize_report(serialized_report)
self._trigger_callback('on_report', conn_id, report)
except Exception:
self._logger.exception("Error processing report conn_id=%d", conn_id)
def _on_trace(self, sequence, topic, message):
"""Process a trace received from a device.
Args:
sequence (int): The sequence number of the packet received
topic (string): The topic this message was received on
message (dict): The message itself
"""
try:
conn_key = self._find_connection(topic)
conn_id = self.conns.get_connection_id(conn_key)
except ArgumentError:
self._logger.warn("Dropping trace message that does not correspond with a known connection, topic=%s", topic)
return
try:
tracing = messages.TracingNotification.verify(message)
self._trigger_callback('on_trace', conn_id, tracing['trace'])
except Exception:
self._logger.exception("Error processing trace conn_id=%d", conn_id)
def _on_status_message(self, sequence, topic, message):
"""Process a status message received
Args:
sequence (int): The sequence number of the packet received
topic (string): The topic this message was received on
message (dict): The message itself
"""
self._logger.debug("Received message on (topic=%s): %s" % (topic, message))
try:
conn_key = self._find_connection(topic)
except ArgumentError:
self._logger.warn("Dropping message that does not correspond with a known connection, message=%s", message)
return
if messages.ConnectionResponse.matches(message):
if self.name != message['client']:
self._logger.debug("Connection response received for a different client, client=%s, name=%s", message['client'], self.name)
return
self.conns.finish_connection(conn_key, message['success'], message.get('failure_reason', None))
else:
self._logger.warn("Dropping message that did not correspond with a known schema, message=%s", message)
def _on_response_message(self, sequence, topic, message):
"""Process a response message received
Args:
sequence (int): The sequence number of the packet received
topic (string): The topic this message was received on
message (dict): The message itself
"""
try:
conn_key = self._find_connection(topic)
context = self.conns.get_context(conn_key)
except ArgumentError:
self._logger.warn("Dropping message that does not correspond with a known connection, message=%s", message)
return
if 'client' in message and message['client'] != self.name:
self._logger.debug("Dropping message that is for another client %s, we are %s", message['client'], self.name)
if messages.DisconnectionResponse.matches(message):
self.conns.finish_disconnection(conn_key, message['success'], message.get('failure_reason', None))
elif messages.OpenInterfaceResponse.matches(message):
self.conns.finish_operation(conn_key, message['success'], message.get('failure_reason', None))
elif messages.RPCResponse.matches(message):
rpc_message = messages.RPCResponse.verify(message)
self.conns.finish_operation(conn_key, rpc_message['success'], rpc_message.get('failure_reason', None), rpc_message.get('status', None), rpc_message.get('payload', None))
elif messages.ProgressNotification.matches(message):
progress_callback = context.get('progress_callback', None)
if progress_callback is not None:
progress_callback(message['done_count'], message['total_count'])
elif messages.ScriptResponse.matches(message):
if 'progress_callback' in context:
del context['progress_callback']
self.conns.finish_operation(conn_key, message['success'], message.get('failure_reason', None))
elif messages.DisconnectionNotification.matches(message):
try:
conn_key = self._find_connection(topic)
conn_id = self.conns.get_connection_id(conn_key)
except ArgumentError:
self._logger.warn("Dropping disconnect notification that does not correspond with a known connection, topic=%s", topic)
return
self.conns.unexpected_disconnect(conn_key)
self._trigger_callback('on_disconnect', self.id, conn_id)
else:
self._logger.warn("Invalid response message received, message=%s", message)
| gpl-3.0 |
cgstudiomap/cgstudiomap | main/parts/odoo/addons/edi/__openerp__.py | 312 | 1911 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Electronic Data Interchange (EDI)',
'version': '1.0',
'category': 'Tools',
'description': """
Provides a common EDI platform that other Applications can use.
===============================================================
OpenERP specifies a generic EDI format for exchanging business documents between
different systems, and provides generic mechanisms to import and export them.
More details about OpenERP's EDI format may be found in the technical OpenERP
documentation at http://doc.openerp.com.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/api',
'depends': ['base', 'email_template'],
'data' : [
'views/edi.xml',
],
'icon': '/edi/static/src/img/knowledge.png',
'test': ['test/edi_partner_test.yml'],
'qweb': ['static/src/xml/*.xml'],
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Nowheresly/odoo | addons/point_of_sale/wizard/__init__.py | 382 | 1200 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pos_box
import pos_confirm
import pos_details
import pos_discount
import pos_open_statement
import pos_payment
import pos_session_opening
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
hbrunn/OpenUpgrade | addons/hr_holidays/__init__.py | 442 | 1094 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_holidays
import report
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ptmr3/GalaxyNote2_Kernel | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
MartinHjelmare/home-assistant | homeassistant/components/simplisafe/alarm_control_panel.py | 6 | 4446 | """Support for SimpliSafe alarm control panels."""
import logging
import re
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.const import (
CONF_CODE, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME,
STATE_ALARM_DISARMED)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import DATA_CLIENT, DOMAIN, TOPIC_UPDATE
_LOGGER = logging.getLogger(__name__)
ATTR_ALARM_ACTIVE = 'alarm_active'
ATTR_TEMPERATURE = 'temperature'
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up a SimpliSafe alarm control panel based on existing config."""
pass
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up a SimpliSafe alarm control panel based on a config entry."""
systems = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id]
async_add_entities([
SimpliSafeAlarm(system, entry.data.get(CONF_CODE))
for system in systems
], True)
class SimpliSafeAlarm(alarm.AlarmControlPanel):
"""Representation of a SimpliSafe alarm."""
def __init__(self, system, code):
"""Initialize the SimpliSafe alarm."""
self._async_unsub_dispatcher_connect = None
self._attrs = {}
self._code = code
self._system = system
self._state = None
@property
def unique_id(self):
"""Return the unique ID."""
return self._system.system_id
@property
def name(self):
"""Return the name of the device."""
return self._system.address
@property
def code_format(self):
"""Return one or more digits/characters."""
if not self._code:
return None
if isinstance(self._code, str) and re.search('^\\d+$', self._code):
return alarm.FORMAT_NUMBER
return alarm.FORMAT_TEXT
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attrs
def _validate_code(self, code, state):
"""Validate given code."""
check = self._code is None or code == self._code
if not check:
_LOGGER.warning("Wrong code entered for %s", state)
return check
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def update():
"""Update the state."""
self.async_schedule_update_ha_state(True)
self._async_unsub_dispatcher_connect = async_dispatcher_connect(
self.hass, TOPIC_UPDATE, update)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect dispatcher listener when removed."""
if self._async_unsub_dispatcher_connect:
self._async_unsub_dispatcher_connect()
async def async_alarm_disarm(self, code=None):
"""Send disarm command."""
if not self._validate_code(code, 'disarming'):
return
await self._system.set_off()
async def async_alarm_arm_home(self, code=None):
"""Send arm home command."""
if not self._validate_code(code, 'arming home'):
return
await self._system.set_home()
async def async_alarm_arm_away(self, code=None):
"""Send arm away command."""
if not self._validate_code(code, 'arming away'):
return
await self._system.set_away()
async def async_update(self):
"""Update alarm status."""
from simplipy.system import SystemStates
await self._system.update()
self._attrs[ATTR_ALARM_ACTIVE] = self._system.alarm_going_off
if self._system.temperature:
self._attrs[ATTR_TEMPERATURE] = self._system.temperature
if self._system.state == SystemStates.error:
return
if self._system.state == SystemStates.off:
self._state = STATE_ALARM_DISARMED
elif self._system.state in (SystemStates.home,
SystemStates.home_count):
self._state = STATE_ALARM_ARMED_HOME
elif self._system.state in (SystemStates.away, SystemStates.away_count,
SystemStates.exit_delay):
self._state = STATE_ALARM_ARMED_AWAY
else:
self._state = None
| apache-2.0 |
domenicosolazzo/practice-django | venv/lib/python2.7/site-packages/django/contrib/messages/storage/base.py | 113 | 6286 | from __future__ import unicode_literals
from django.conf import settings
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.contrib.messages import constants, utils
LEVEL_TAGS = utils.get_level_tags()
@python_2_unicode_compatible
class Message(object):
"""
Represents an actual message that can be stored in any of the supported
storage classes (typically session- or cookie-based) and rendered in a view
or template.
"""
def __init__(self, level, message, extra_tags=None):
self.level = int(level)
self.message = message
self.extra_tags = extra_tags
def _prepare(self):
"""
Prepares the message for serialization by forcing the ``message``
and ``extra_tags`` to unicode in case they are lazy translations.
Known "safe" types (None, int, etc.) are not converted (see Django's
``force_text`` implementation for details).
"""
self.message = force_text(self.message, strings_only=True)
self.extra_tags = force_text(self.extra_tags, strings_only=True)
def __eq__(self, other):
return isinstance(other, Message) and self.level == other.level and \
self.message == other.message
def __str__(self):
return force_text(self.message)
def _get_tags(self):
extra_tags = force_text(self.extra_tags, strings_only=True)
if extra_tags and self.level_tag:
return ' '.join([extra_tags, self.level_tag])
elif extra_tags:
return extra_tags
elif self.level_tag:
return self.level_tag
return ''
tags = property(_get_tags)
@property
def level_tag(self):
return force_text(LEVEL_TAGS.get(self.level, ''), strings_only=True)
class BaseStorage(object):
"""
This is the base backend for temporary message storage.
This is not a complete class; to be a usable storage backend, it must be
subclassed and the two methods ``_get`` and ``_store`` overridden.
"""
def __init__(self, request, *args, **kwargs):
self.request = request
self._queued_messages = []
self.used = False
self.added_new = False
super(BaseStorage, self).__init__(*args, **kwargs)
def __len__(self):
return len(self._loaded_messages) + len(self._queued_messages)
def __iter__(self):
self.used = True
if self._queued_messages:
self._loaded_messages.extend(self._queued_messages)
self._queued_messages = []
return iter(self._loaded_messages)
def __contains__(self, item):
return item in self._loaded_messages or item in self._queued_messages
@property
def _loaded_messages(self):
"""
Returns a list of loaded messages, retrieving them first if they have
not been loaded yet.
"""
if not hasattr(self, '_loaded_data'):
messages, all_retrieved = self._get()
self._loaded_data = messages or []
return self._loaded_data
def _get(self, *args, **kwargs):
"""
Retrieves a list of stored messages. Returns a tuple of the messages
and a flag indicating whether or not all the messages originally
intended to be stored in this storage were, in fact, stored and
retrieved; e.g., ``(messages, all_retrieved)``.
**This method must be implemented by a subclass.**
If it is possible to tell if the backend was not used (as opposed to
just containing no messages) then ``None`` should be returned in
place of ``messages``.
"""
raise NotImplementedError('subclasses of BaseStorage must provide a _get() method')
def _store(self, messages, response, *args, **kwargs):
"""
Stores a list of messages, returning a list of any messages which could
not be stored.
One type of object must be able to be stored, ``Message``.
**This method must be implemented by a subclass.**
"""
raise NotImplementedError('subclasses of BaseStorage must provide a _store() method')
def _prepare_messages(self, messages):
"""
Prepares a list of messages for storage.
"""
for message in messages:
message._prepare()
def update(self, response):
"""
Stores all unread messages.
If the backend has yet to be iterated, previously stored messages will
be stored again. Otherwise, only messages added after the last
iteration will be stored.
"""
self._prepare_messages(self._queued_messages)
if self.used:
return self._store(self._queued_messages, response)
elif self.added_new:
messages = self._loaded_messages + self._queued_messages
return self._store(messages, response)
def add(self, level, message, extra_tags=''):
"""
Queues a message to be stored.
The message is only queued if it contained something and its level is
not less than the recording level (``self.level``).
"""
if not message:
return
# Check that the message level is not less than the recording level.
level = int(level)
if level < self.level:
return
# Add the message.
self.added_new = True
message = Message(level, message, extra_tags=extra_tags)
self._queued_messages.append(message)
def _get_level(self):
"""
Returns the minimum recorded level.
The default level is the ``MESSAGE_LEVEL`` setting. If this is
not found, the ``INFO`` level is used.
"""
if not hasattr(self, '_level'):
self._level = getattr(settings, 'MESSAGE_LEVEL', constants.INFO)
return self._level
def _set_level(self, value=None):
"""
Sets a custom minimum recorded level.
If set to ``None``, the default level will be used (see the
``_get_level`` method).
"""
if value is None and hasattr(self, '_level'):
del self._level
else:
self._level = int(value)
level = property(_get_level, _set_level, _set_level)
| mit |
campbe13/openhatch | vendor/packages/celery/celery/tests/test_backends/test_base.py | 18 | 10279 | from __future__ import absolute_import
from __future__ import with_statement
import sys
import types
from mock import Mock
from nose import SkipTest
from celery.utils import serialization
from celery.utils.serialization import subclass_exception
from celery.utils.serialization import \
find_nearest_pickleable_exception as fnpe
from celery.utils.serialization import UnpickleableExceptionWrapper
from celery.utils.serialization import get_pickleable_exception as gpe
from celery import states
from celery.backends.base import BaseBackend, KeyValueStoreBackend
from celery.backends.base import BaseDictBackend, DisabledBackend
from celery.utils import uuid
from celery.tests.utils import unittest
class wrapobject(object):
def __init__(self, *args, **kwargs):
self.args = args
if sys.version_info >= (3, 0):
Oldstyle = None
else:
Oldstyle = types.ClassType("Oldstyle", (), {})
Unpickleable = subclass_exception("Unpickleable", KeyError, "foo.module")
Impossible = subclass_exception("Impossible", object, "foo.module")
Lookalike = subclass_exception("Lookalike", wrapobject, "foo.module")
b = BaseBackend()
class test_serialization(unittest.TestCase):
def test_create_exception_cls(self):
self.assertTrue(serialization.create_exception_cls("FooError", "m"))
self.assertTrue(serialization.create_exception_cls("FooError",
"m",
KeyError))
class test_BaseBackend_interface(unittest.TestCase):
def test_get_status(self):
with self.assertRaises(NotImplementedError):
b.get_status("SOMExx-N0Nex1stant-IDxx-")
def test__forget(self):
with self.assertRaises(NotImplementedError):
b.forget("SOMExx-N0Nex1stant-IDxx-")
def test_store_result(self):
with self.assertRaises(NotImplementedError):
b.store_result("SOMExx-N0nex1stant-IDxx-", 42, states.SUCCESS)
def test_mark_as_started(self):
with self.assertRaises(NotImplementedError):
b.mark_as_started("SOMExx-N0nex1stant-IDxx-")
def test_reload_task_result(self):
with self.assertRaises(NotImplementedError):
b.reload_task_result("SOMExx-N0nex1stant-IDxx-")
def test_reload_taskset_result(self):
with self.assertRaises(NotImplementedError):
b.reload_taskset_result("SOMExx-N0nex1stant-IDxx-")
def test_get_result(self):
with self.assertRaises(NotImplementedError):
b.get_result("SOMExx-N0nex1stant-IDxx-")
def test_restore_taskset(self):
with self.assertRaises(NotImplementedError):
b.restore_taskset("SOMExx-N0nex1stant-IDxx-")
def test_delete_taskset(self):
with self.assertRaises(NotImplementedError):
b.delete_taskset("SOMExx-N0nex1stant-IDxx-")
def test_save_taskset(self):
with self.assertRaises(NotImplementedError):
b.save_taskset("SOMExx-N0nex1stant-IDxx-", "blergh")
def test_get_traceback(self):
with self.assertRaises(NotImplementedError):
b.get_traceback("SOMExx-N0nex1stant-IDxx-")
def test_forget(self):
with self.assertRaises(NotImplementedError):
b.forget("SOMExx-N0nex1stant-IDxx-")
def test_on_chord_apply(self, unlock="celery.chord_unlock"):
from celery.registry import tasks
p, tasks[unlock] = tasks.get(unlock), Mock()
try:
b.on_chord_apply("dakj221", "sdokqweok")
self.assertTrue(tasks[unlock].apply_async.call_count)
finally:
tasks[unlock] = p
class test_exception_pickle(unittest.TestCase):
def test_oldstyle(self):
if Oldstyle is None:
raise SkipTest("py3k does not support old style classes")
self.assertIsNone(fnpe(Oldstyle()))
def test_BaseException(self):
self.assertIsNone(fnpe(Exception()))
def test_get_pickleable_exception(self):
exc = Exception("foo")
self.assertEqual(gpe(exc), exc)
def test_unpickleable(self):
self.assertIsInstance(fnpe(Unpickleable()), KeyError)
self.assertIsNone(fnpe(Impossible()))
class test_prepare_exception(unittest.TestCase):
def test_unpickleable(self):
x = b.prepare_exception(Unpickleable(1, 2, "foo"))
self.assertIsInstance(x, KeyError)
y = b.exception_to_python(x)
self.assertIsInstance(y, KeyError)
def test_impossible(self):
x = b.prepare_exception(Impossible())
self.assertIsInstance(x, UnpickleableExceptionWrapper)
y = b.exception_to_python(x)
self.assertEqual(y.__class__.__name__, "Impossible")
if sys.version_info < (2, 5):
self.assertTrue(y.__class__.__module__)
else:
self.assertEqual(y.__class__.__module__, "foo.module")
def test_regular(self):
x = b.prepare_exception(KeyError("baz"))
self.assertIsInstance(x, KeyError)
y = b.exception_to_python(x)
self.assertIsInstance(y, KeyError)
class KVBackend(KeyValueStoreBackend):
mget_returns_dict = False
def __init__(self, *args, **kwargs):
self.db = {}
super(KVBackend, self).__init__(KeyValueStoreBackend)
def get(self, key):
return self.db.get(key)
def set(self, key, value):
self.db[key] = value
def mget(self, keys):
if self.mget_returns_dict:
return dict((key, self.get(key)) for key in keys)
else:
return [self.get(key) for key in keys]
def delete(self, key):
self.db.pop(key, None)
class DictBackend(BaseDictBackend):
def __init__(self, *args, **kwargs):
BaseDictBackend.__init__(self, *args, **kwargs)
self._data = {"can-delete": {"result": "foo"}}
def _restore_taskset(self, taskset_id):
if taskset_id == "exists":
return {"result": "taskset"}
def _get_task_meta_for(self, task_id):
if task_id == "task-exists":
return {"result": "task"}
def _delete_taskset(self, taskset_id):
self._data.pop(taskset_id, None)
class test_BaseDictBackend(unittest.TestCase):
def setUp(self):
self.b = DictBackend()
def test_delete_taskset(self):
self.b.delete_taskset("can-delete")
self.assertNotIn("can-delete", self.b._data)
def test_save_taskset(self):
b = BaseDictBackend()
b._save_taskset = Mock()
b.save_taskset("foofoo", "xxx")
b._save_taskset.assert_called_with("foofoo", "xxx")
def test_forget_interface(self):
b = BaseDictBackend()
with self.assertRaises(NotImplementedError):
b.forget("foo")
def test_restore_taskset(self):
self.assertIsNone(self.b.restore_taskset("missing"))
self.assertIsNone(self.b.restore_taskset("missing"))
self.assertEqual(self.b.restore_taskset("exists"), "taskset")
self.assertEqual(self.b.restore_taskset("exists"), "taskset")
self.assertEqual(self.b.restore_taskset("exists", cache=False),
"taskset")
def test_reload_taskset_result(self):
self.b._cache = {}
self.b.reload_taskset_result("exists")
self.b._cache["exists"] = {"result": "taskset"}
def test_reload_task_result(self):
self.b._cache = {}
self.b.reload_task_result("task-exists")
self.b._cache["task-exists"] = {"result": "task"}
class test_KeyValueStoreBackend(unittest.TestCase):
def setUp(self):
self.b = KVBackend()
def test_get_store_delete_result(self):
tid = uuid()
self.b.mark_as_done(tid, "Hello world")
self.assertEqual(self.b.get_result(tid), "Hello world")
self.assertEqual(self.b.get_status(tid), states.SUCCESS)
self.b.forget(tid)
self.assertEqual(self.b.get_status(tid), states.PENDING)
def test_strip_prefix(self):
x = self.b.get_key_for_task("x1b34")
self.assertEqual(self.b._strip_prefix(x), "x1b34")
self.assertEqual(self.b._strip_prefix("x1b34"), "x1b34")
def test_get_many(self):
for is_dict in True, False:
self.b.mget_returns_dict = is_dict
ids = dict((uuid(), i) for i in xrange(10))
for id, i in ids.items():
self.b.mark_as_done(id, i)
it = self.b.get_many(ids.keys())
for i, (got_id, got_state) in enumerate(it):
self.assertEqual(got_state["result"], ids[got_id])
self.assertEqual(i, 9)
self.assertTrue(list(self.b.get_many(ids.keys())))
def test_get_missing_meta(self):
self.assertIsNone(self.b.get_result("xxx-missing"))
self.assertEqual(self.b.get_status("xxx-missing"), states.PENDING)
def test_save_restore_delete_taskset(self):
tid = uuid()
self.b.save_taskset(tid, "Hello world")
self.assertEqual(self.b.restore_taskset(tid), "Hello world")
self.b.delete_taskset(tid)
self.assertIsNone(self.b.restore_taskset(tid))
def test_restore_missing_taskset(self):
self.assertIsNone(self.b.restore_taskset("xxx-nonexistant"))
class test_KeyValueStoreBackend_interface(unittest.TestCase):
def test_get(self):
with self.assertRaises(NotImplementedError):
KeyValueStoreBackend().get("a")
def test_set(self):
with self.assertRaises(NotImplementedError):
KeyValueStoreBackend().set("a", 1)
def test_cleanup(self):
self.assertFalse(KeyValueStoreBackend().cleanup())
def test_delete(self):
with self.assertRaises(NotImplementedError):
KeyValueStoreBackend().delete("a")
def test_mget(self):
with self.assertRaises(NotImplementedError):
KeyValueStoreBackend().mget(["a"])
def test_forget(self):
with self.assertRaises(NotImplementedError):
KeyValueStoreBackend().forget("a")
class test_DisabledBackend(unittest.TestCase):
def test_store_result(self):
DisabledBackend().store_result()
def test_is_disabled(self):
with self.assertRaises(NotImplementedError):
DisabledBackend().get_status("foo")
| agpl-3.0 |
benschmaus/catapult | third_party/gsutil/third_party/boto/tests/integration/route53/test_zone.py | 100 | 7729 | # Copyright (c) 2011 Blue Pines Technologies LLC, Brad Carleton
# www.bluepines.org
# Copyright (c) 2012 42 Lines Inc., Jim Browne
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import time
from tests.compat import unittest
from nose.plugins.attrib import attr
from boto.route53.connection import Route53Connection
from boto.exception import TooManyRecordsException
from boto.vpc import VPCConnection
@attr(route53=True)
class TestRoute53Zone(unittest.TestCase):
@classmethod
def setUpClass(self):
route53 = Route53Connection()
self.base_domain = 'boto-test-%s.com' % str(int(time.time()))
zone = route53.get_zone(self.base_domain)
if zone is not None:
zone.delete()
self.zone = route53.create_zone(self.base_domain)
def test_nameservers(self):
self.zone.get_nameservers()
def test_a(self):
self.zone.add_a(self.base_domain, '102.11.23.1', 80)
record = self.zone.get_a(self.base_domain)
self.assertEquals(record.name, u'%s.' % self.base_domain)
self.assertEquals(record.resource_records, [u'102.11.23.1'])
self.assertEquals(record.ttl, u'80')
self.zone.update_a(self.base_domain, '186.143.32.2', '800')
record = self.zone.get_a(self.base_domain)
self.assertEquals(record.name, u'%s.' % self.base_domain)
self.assertEquals(record.resource_records, [u'186.143.32.2'])
self.assertEquals(record.ttl, u'800')
def test_cname(self):
self.zone.add_cname(
'www.%s' % self.base_domain,
'webserver.%s' % self.base_domain,
200
)
record = self.zone.get_cname('www.%s' % self.base_domain)
self.assertEquals(record.name, u'www.%s.' % self.base_domain)
self.assertEquals(record.resource_records, [
u'webserver.%s.' % self.base_domain
])
self.assertEquals(record.ttl, u'200')
self.zone.update_cname(
'www.%s' % self.base_domain,
'web.%s' % self.base_domain,
45
)
record = self.zone.get_cname('www.%s' % self.base_domain)
self.assertEquals(record.name, u'www.%s.' % self.base_domain)
self.assertEquals(record.resource_records, [
u'web.%s.' % self.base_domain
])
self.assertEquals(record.ttl, u'45')
def test_mx(self):
self.zone.add_mx(
self.base_domain,
[
'10 mx1.%s' % self.base_domain,
'20 mx2.%s' % self.base_domain,
],
1000
)
record = self.zone.get_mx(self.base_domain)
self.assertEquals(set(record.resource_records),
set([u'10 mx1.%s.' % self.base_domain,
u'20 mx2.%s.' % self.base_domain]))
self.assertEquals(record.ttl, u'1000')
self.zone.update_mx(
self.base_domain,
[
'10 mail1.%s' % self.base_domain,
'20 mail2.%s' % self.base_domain,
],
50
)
record = self.zone.get_mx(self.base_domain)
self.assertEquals(set(record.resource_records),
set([u'10 mail1.%s.' % self.base_domain,
'20 mail2.%s.' % self.base_domain]))
self.assertEquals(record.ttl, u'50')
def test_get_records(self):
self.zone.get_records()
def test_get_nameservers(self):
self.zone.get_nameservers()
def test_get_zones(self):
route53 = Route53Connection()
route53.get_zones()
def test_identifiers_wrrs(self):
self.zone.add_a('wrr.%s' % self.base_domain, '1.2.3.4',
identifier=('foo', '20'))
self.zone.add_a('wrr.%s' % self.base_domain, '5.6.7.8',
identifier=('bar', '10'))
wrrs = self.zone.find_records(
'wrr.%s' % self.base_domain,
'A',
all=True
)
self.assertEquals(len(wrrs), 2)
self.zone.delete_a('wrr.%s' % self.base_domain, all=True)
def test_identifiers_lbrs(self):
self.zone.add_a('lbr.%s' % self.base_domain, '4.3.2.1',
identifier=('baz', 'us-east-1'))
self.zone.add_a('lbr.%s' % self.base_domain, '8.7.6.5',
identifier=('bam', 'us-west-1'))
lbrs = self.zone.find_records(
'lbr.%s' % self.base_domain,
'A',
all=True
)
self.assertEquals(len(lbrs), 2)
self.zone.delete_a('lbr.%s' % self.base_domain,
identifier=('bam', 'us-west-1'))
self.zone.delete_a('lbr.%s' % self.base_domain,
identifier=('baz', 'us-east-1'))
def test_toomany_exception(self):
self.zone.add_a('exception.%s' % self.base_domain, '4.3.2.1',
identifier=('baz', 'us-east-1'))
self.zone.add_a('exception.%s' % self.base_domain, '8.7.6.5',
identifier=('bam', 'us-west-1'))
self.assertRaises(TooManyRecordsException,
lambda: self.zone.get_a('exception.%s' %
self.base_domain))
self.zone.delete_a('exception.%s' % self.base_domain, all=True)
@classmethod
def tearDownClass(self):
self.zone.delete_a(self.base_domain)
self.zone.delete_cname('www.%s' % self.base_domain)
self.zone.delete_mx(self.base_domain)
self.zone.delete()
@attr(route53=True)
class TestRoute53PrivateZone(unittest.TestCase):
@classmethod
def setUpClass(self):
time_str = str(int(time.time()))
self.route53 = Route53Connection()
self.base_domain = 'boto-private-zone-test-%s.com' % time_str
self.vpc = VPCConnection()
self.test_vpc = self.vpc.create_vpc(cidr_block='10.11.0.0/16')
# tag the vpc to make it easily identifiable if things go spang
self.test_vpc.add_tag("Name", self.base_domain)
self.zone = self.route53.get_zone(self.base_domain)
if self.zone is not None:
self.zone.delete()
def test_create_private_zone(self):
self.zone = self.route53.create_hosted_zone(self.base_domain,
private_zone=True,
vpc_id=self.test_vpc.id,
vpc_region='us-east-1')
@classmethod
def tearDownClass(self):
if self.zone is not None:
self.zone.delete()
self.test_vpc.delete()
if __name__ == '__main__':
unittest.main(verbosity=3)
| bsd-3-clause |
schef/schef.github.io | source/14/mc-14-04-whf-pid.py | 1 | 2581 | #!/usr/bin/python
# Written by Stjepan Horvat
# ( zvanstefan@gmail.com )
# by the exercises from David Lucal Burge - Perfect Pitch Ear Traning Supercourse
# Thanks to Wojciech M. Zabolotny ( wzab@ise.pw.edu.pl ) for snd-virmidi example
# ( wzab@ise.pw.edu.pl )
import random
import sys
sys.path.append("/home/schef/github/schef.github.io/source/")
from pptraning import *
print ("Exercise: 14-03")
print ("White harmonic fours. Pitch indentification drill. OVR.")
runda = 0
try:
while True:
runda += 1
print ("Possible commands: 1-again, 2-play, 3-next, 4-compare-to-c:")
while True:
notes = []
for i in range(0, 4):
notes.append(random.choice(whiteNotes[7:28]))
# if len(list(set(notes))) == 4:
# break;
if (len(list(set(notes))) == 4 \
and (notes[0]%12 != notes[1]%12) \
and (notes[0]%12 != notes[2]%12) \
and (notes[0]%12 != notes[3]%12) \
and (notes[1]%12 != notes[2]%12) \
and (notes[1]%12 != notes[3]%12) \
and (notes[2]%12 != notes[3]%12) \
):
break;
#notes.sort()
match = False
noteError = None
while not match: #here starts the practice
done = False
#playFourNotes(notes)
playNote(notes[0])
playNote(notes[1])
playNote(notes[2])
playNote(notes[3])
while not done:
n = input("? ")
if n =="1":
#playFourNotes(notes)
playNote(notes[0])
playNote(notes[1])
playNote(notes[2])
playNote(notes[3])
elif n == "3":
print ("Next")
print (str(runda) + ". round.")
done = True
match = True
elif n =="5":
print (num2Name(notes[0]), num2Name(notes[1]), num2Name(notes[2]), num2Name(notes[3]))
elif n =="4":
print ("C the comparrer")
playNote(name2Num("c"))
elif n =="2":
print(num2Name(notes[0]), num2Name(notes[1]), num2Name(notes[2]), num2Name(notes[3]))
elif re.compile("^[0-3] [0-3]$").match(n):
splited = n.split()
playTwoNotes(notes[int(splited[0])], notes[int(splited[1])])
elif re.compile("^[0-3] [0-3] [0-3]$").match(n):
splited = n.split()
playThreeNotes(notes[int(splited[0])], notes[int(splited[1])], notes[int(splited[2])])
elif splitFour.match(n):
splitNote = n.split()
if splitNote[0] == num2Name(notes[0]).lower() and splitNote[1] == num2Name(notes[1]).lower() and splitNote[2] == num2Name(notes[2]).lower() and splitNote[3] == num2Name(notes[3]).lower():
print ("Next")
print (str(runda) + ". round.")
done = True
match = True
except KeyboardInterrupt:
pass
| mit |
kushalbhola/MyStuff | Practice/PythonApplication/env/Lib/site-packages/pandas/tests/io/parser/test_comment.py | 2 | 3819 | """
Tests that comments are properly handled during parsing
for all of the parsers defined in parsers.py
"""
from io import StringIO
import numpy as np
import pytest
from pandas import DataFrame
import pandas.util.testing as tm
@pytest.mark.parametrize("na_values", [None, ["NaN"]])
def test_comment(all_parsers, na_values):
parser = all_parsers
data = """A,B,C
1,2.,4.#hello world
5.,NaN,10.0
"""
expected = DataFrame(
[[1.0, 2.0, 4.0], [5.0, np.nan, 10.0]], columns=["A", "B", "C"]
)
result = parser.read_csv(StringIO(data), comment="#", na_values=na_values)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"read_kwargs", [dict(), dict(lineterminator="*"), dict(delim_whitespace=True)]
)
def test_line_comment(all_parsers, read_kwargs):
parser = all_parsers
data = """# empty
A,B,C
1,2.,4.#hello world
#ignore this line
5.,NaN,10.0
"""
if read_kwargs.get("delim_whitespace"):
data = data.replace(",", " ")
elif read_kwargs.get("lineterminator"):
if parser.engine != "c":
pytest.skip("Custom terminator not supported with Python engine")
data = data.replace("\n", read_kwargs.get("lineterminator"))
read_kwargs["comment"] = "#"
result = parser.read_csv(StringIO(data), **read_kwargs)
expected = DataFrame(
[[1.0, 2.0, 4.0], [5.0, np.nan, 10.0]], columns=["A", "B", "C"]
)
tm.assert_frame_equal(result, expected)
def test_comment_skiprows(all_parsers):
parser = all_parsers
data = """# empty
random line
# second empty line
1,2,3
A,B,C
1,2.,4.
5.,NaN,10.0
"""
# This should ignore the first four lines (including comments).
expected = DataFrame(
[[1.0, 2.0, 4.0], [5.0, np.nan, 10.0]], columns=["A", "B", "C"]
)
result = parser.read_csv(StringIO(data), comment="#", skiprows=4)
tm.assert_frame_equal(result, expected)
def test_comment_header(all_parsers):
parser = all_parsers
data = """# empty
# second empty line
1,2,3
A,B,C
1,2.,4.
5.,NaN,10.0
"""
# Header should begin at the second non-comment line.
expected = DataFrame(
[[1.0, 2.0, 4.0], [5.0, np.nan, 10.0]], columns=["A", "B", "C"]
)
result = parser.read_csv(StringIO(data), comment="#", header=1)
tm.assert_frame_equal(result, expected)
def test_comment_skiprows_header(all_parsers):
parser = all_parsers
data = """# empty
# second empty line
# third empty line
X,Y,Z
1,2,3
A,B,C
1,2.,4.
5.,NaN,10.0
"""
# Skiprows should skip the first 4 lines (including comments),
# while header should start from the second non-commented line,
# starting with line 5.
expected = DataFrame(
[[1.0, 2.0, 4.0], [5.0, np.nan, 10.0]], columns=["A", "B", "C"]
)
result = parser.read_csv(StringIO(data), comment="#", skiprows=4, header=1)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("comment_char", ["#", "~", "&", "^", "*", "@"])
def test_custom_comment_char(all_parsers, comment_char):
parser = all_parsers
data = "a,b,c\n1,2,3#ignore this!\n4,5,6#ignorethistoo"
result = parser.read_csv(
StringIO(data.replace("#", comment_char)), comment=comment_char
)
expected = DataFrame([[1, 2, 3], [4, 5, 6]], columns=["a", "b", "c"])
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("header", ["infer", None])
def test_comment_first_line(all_parsers, header):
# see gh-4623
parser = all_parsers
data = "# notes\na,b,c\n# more notes\n1,2,3"
if header is None:
expected = DataFrame({0: ["a", "1"], 1: ["b", "2"], 2: ["c", "3"]})
else:
expected = DataFrame([[1, 2, 3]], columns=["a", "b", "c"])
result = parser.read_csv(StringIO(data), comment="#", header=header)
tm.assert_frame_equal(result, expected)
| apache-2.0 |
pnedunuri/scikit-learn | examples/applications/plot_model_complexity_influence.py | 323 | 6372 | """
==========================
Model Complexity Influence
==========================
Demonstrate how model complexity influences both prediction accuracy and
computational performance.
The dataset is the Boston Housing dataset (resp. 20 Newsgroups) for
regression (resp. classification).
For each class of models we make the model complexity vary through the choice
of relevant model parameters and measure the influence on both computational
performance (latency) and predictive power (MSE or Hamming Loss).
"""
print(__doc__)
# Author: Eustache Diemert <eustache@diemert.fr>
# License: BSD 3 clause
import time
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.parasite_axes import host_subplot
from mpl_toolkits.axisartist.axislines import Axes
from scipy.sparse.csr import csr_matrix
from sklearn import datasets
from sklearn.utils import shuffle
from sklearn.metrics import mean_squared_error
from sklearn.svm.classes import NuSVR
from sklearn.ensemble.gradient_boosting import GradientBoostingRegressor
from sklearn.linear_model.stochastic_gradient import SGDClassifier
from sklearn.metrics import hamming_loss
###############################################################################
# Routines
# initialize random generator
np.random.seed(0)
def generate_data(case, sparse=False):
"""Generate regression/classification data."""
bunch = None
if case == 'regression':
bunch = datasets.load_boston()
elif case == 'classification':
bunch = datasets.fetch_20newsgroups_vectorized(subset='all')
X, y = shuffle(bunch.data, bunch.target)
offset = int(X.shape[0] * 0.8)
X_train, y_train = X[:offset], y[:offset]
X_test, y_test = X[offset:], y[offset:]
if sparse:
X_train = csr_matrix(X_train)
X_test = csr_matrix(X_test)
else:
X_train = np.array(X_train)
X_test = np.array(X_test)
y_test = np.array(y_test)
y_train = np.array(y_train)
data = {'X_train': X_train, 'X_test': X_test, 'y_train': y_train,
'y_test': y_test}
return data
def benchmark_influence(conf):
"""
Benchmark influence of :changing_param: on both MSE and latency.
"""
prediction_times = []
prediction_powers = []
complexities = []
for param_value in conf['changing_param_values']:
conf['tuned_params'][conf['changing_param']] = param_value
estimator = conf['estimator'](**conf['tuned_params'])
print("Benchmarking %s" % estimator)
estimator.fit(conf['data']['X_train'], conf['data']['y_train'])
conf['postfit_hook'](estimator)
complexity = conf['complexity_computer'](estimator)
complexities.append(complexity)
start_time = time.time()
for _ in range(conf['n_samples']):
y_pred = estimator.predict(conf['data']['X_test'])
elapsed_time = (time.time() - start_time) / float(conf['n_samples'])
prediction_times.append(elapsed_time)
pred_score = conf['prediction_performance_computer'](
conf['data']['y_test'], y_pred)
prediction_powers.append(pred_score)
print("Complexity: %d | %s: %.4f | Pred. Time: %fs\n" % (
complexity, conf['prediction_performance_label'], pred_score,
elapsed_time))
return prediction_powers, prediction_times, complexities
def plot_influence(conf, mse_values, prediction_times, complexities):
"""
Plot influence of model complexity on both accuracy and latency.
"""
plt.figure(figsize=(12, 6))
host = host_subplot(111, axes_class=Axes)
plt.subplots_adjust(right=0.75)
par1 = host.twinx()
host.set_xlabel('Model Complexity (%s)' % conf['complexity_label'])
y1_label = conf['prediction_performance_label']
y2_label = "Time (s)"
host.set_ylabel(y1_label)
par1.set_ylabel(y2_label)
p1, = host.plot(complexities, mse_values, 'b-', label="prediction error")
p2, = par1.plot(complexities, prediction_times, 'r-',
label="latency")
host.legend(loc='upper right')
host.axis["left"].label.set_color(p1.get_color())
par1.axis["right"].label.set_color(p2.get_color())
plt.title('Influence of Model Complexity - %s' % conf['estimator'].__name__)
plt.show()
def _count_nonzero_coefficients(estimator):
a = estimator.coef_.toarray()
return np.count_nonzero(a)
###############################################################################
# main code
regression_data = generate_data('regression')
classification_data = generate_data('classification', sparse=True)
configurations = [
{'estimator': SGDClassifier,
'tuned_params': {'penalty': 'elasticnet', 'alpha': 0.001, 'loss':
'modified_huber', 'fit_intercept': True},
'changing_param': 'l1_ratio',
'changing_param_values': [0.25, 0.5, 0.75, 0.9],
'complexity_label': 'non_zero coefficients',
'complexity_computer': _count_nonzero_coefficients,
'prediction_performance_computer': hamming_loss,
'prediction_performance_label': 'Hamming Loss (Misclassification Ratio)',
'postfit_hook': lambda x: x.sparsify(),
'data': classification_data,
'n_samples': 30},
{'estimator': NuSVR,
'tuned_params': {'C': 1e3, 'gamma': 2 ** -15},
'changing_param': 'nu',
'changing_param_values': [0.1, 0.25, 0.5, 0.75, 0.9],
'complexity_label': 'n_support_vectors',
'complexity_computer': lambda x: len(x.support_vectors_),
'data': regression_data,
'postfit_hook': lambda x: x,
'prediction_performance_computer': mean_squared_error,
'prediction_performance_label': 'MSE',
'n_samples': 30},
{'estimator': GradientBoostingRegressor,
'tuned_params': {'loss': 'ls'},
'changing_param': 'n_estimators',
'changing_param_values': [10, 50, 100, 200, 500],
'complexity_label': 'n_trees',
'complexity_computer': lambda x: x.n_estimators,
'data': regression_data,
'postfit_hook': lambda x: x,
'prediction_performance_computer': mean_squared_error,
'prediction_performance_label': 'MSE',
'n_samples': 30},
]
for conf in configurations:
prediction_performances, prediction_times, complexities = \
benchmark_influence(conf)
plot_influence(conf, prediction_performances, prediction_times,
complexities)
| bsd-3-clause |
shushen/ansible | test/units/TestPlayVarsFiles.py | 10 | 12328 | #!/usr/bin/env python
import os
import shutil
from tempfile import mkstemp
from tempfile import mkdtemp
from ansible.playbook.play import Play
import ansible
import unittest
from nose.plugins.skip import SkipTest
class FakeCallBacks(object):
def __init__(self):
pass
def on_vars_prompt(self):
pass
def on_import_for_host(self, host, filename):
pass
class FakeInventory(object):
def __init__(self):
self.hosts = {}
def basedir(self):
return "."
def src(self):
return "fakeinventory"
def get_variables(self, host, vault_password=None):
if host in self.hosts:
return self.hosts[host]
else:
return {}
class FakePlayBook(object):
def __init__(self):
self.extra_vars = {}
self.remote_user = None
self.remote_port = None
self.sudo = None
self.sudo_user = None
self.su = None
self.su_user = None
self.become = None
self.become_method = None
self.become_user = None
self.transport = None
self.only_tags = None
self.skip_tags = None
self.VARS_CACHE = {}
self.SETUP_CACHE = {}
self.inventory = FakeInventory()
self.callbacks = FakeCallBacks()
self.VARS_CACHE['localhost'] = {}
class TestMe(unittest.TestCase):
########################################
# BASIC FILE LOADING BEHAVIOR TESTS
########################################
def test_play_constructor(self):
# __init__(self, playbook, ds, basedir, vault_password=None)
playbook = FakePlayBook()
ds = { "hosts": "localhost"}
basedir = "."
play = Play(playbook, ds, basedir)
def test_vars_file(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# create a play with a vars_file
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
# make sure the variable was loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
def test_vars_file_nonlist_error(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# create a play with a string for vars_files
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": temp_path}
basedir = "."
error_hit = False
try:
play = Play(playbook, ds, basedir)
except:
error_hit = True
os.remove(temp_path)
assert error_hit == True, "no error was thrown when vars_files was not a list"
def test_multiple_vars_files(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
f = open(temp_path2, "wb")
f.write("baz: bang\n")
f.close()
# create a play with two vars_files
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path, temp_path2]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
os.remove(temp_path2)
# make sure the variables were loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
assert 'baz' in play.vars_file_vars, "vars_file2 was not loaded into play.vars_file_vars"
assert play.vars_file_vars['baz'] == 'bang', "baz was not set to bang in play.vars_file_vars"
def test_vars_files_first_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# get a random file path
fd, temp_path2 = mkstemp()
# make sure this file doesn't exist
os.remove(temp_path2)
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [[temp_path2, temp_path]]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
# make sure the variable was loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
def test_vars_files_multiple_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
f = open(temp_path2, "wb")
f.write("baz: bang\n")
f.close()
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [[temp_path, temp_path2]]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
os.remove(temp_path2)
# make sure the variables were loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
assert 'baz' not in play.vars_file_vars, "vars_file2 was loaded after vars_file1 was loaded"
def test_vars_files_assert_all_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
# make sure it doesn't exist
os.remove(temp_path2)
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path, temp_path2]}
basedir = "."
error_hit = False
error_msg = None
try:
play = Play(playbook, ds, basedir)
except ansible.errors.AnsibleError, e:
error_hit = True
error_msg = e
os.remove(temp_path)
assert error_hit == True, "no error was thrown for missing vars_file"
########################################
# VARIABLE PRECEDENCE TESTS
########################################
# On the first run vars_files are loaded into play.vars_file_vars by host == None
# * only files with vars from host==None will work here
# On the secondary run(s), a host is given and the vars_files are loaded into VARS_CACHE
# * this only occurs if host is not None, filename2 has vars in the name, and filename3 does not
# filename -- the original string
# filename2 -- filename templated with play vars
# filename3 -- filename2 template with inject (hostvars + setup_cache + vars_cache)
# filename4 -- path_dwim(filename3)
def test_vars_files_for_host(self):
# host != None
# vars in filename2
# no vars in filename3
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": ["{{ temp_path }}"]}
basedir = "."
playbook.VARS_CACHE['localhost']['temp_path'] = temp_path
# create play and do first run
play = Play(playbook, ds, basedir)
# the second run is started by calling update_vars_files
play.update_vars_files(['localhost'])
os.remove(temp_path)
assert 'foo' in play.playbook.VARS_CACHE['localhost'], "vars_file vars were not loaded into vars_cache"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', "foo does not equal bar"
########################################
# COMPLEX FILENAME TEMPLATING TESTS
########################################
def test_vars_files_two_vars_in_name(self):
# self.vars_file_vars = ds['vars']
# self.vars_file_vars += _get_vars() ... aka extra_vars
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file),
"temp_file": os.path.basename(temp_file) },
"vars_files": ["{{ temp_dir + '/' + temp_file }}"]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' in play.vars_file_vars, "double var templated vars_files filename not loaded"
def test_vars_files_two_vars_different_scope(self):
#
# Use a play var and an inventory var to create the filename
#
# self.playbook.inventory.get_variables(host)
# {'group_names': ['ungrouped'], 'inventory_hostname': 'localhost',
# 'ansible_ssh_user': 'root', 'inventory_hostname_short': 'localhost'}
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
playbook.inventory.hosts['localhost'] = {'inventory_hostname': os.path.basename(temp_file)}
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file)},
"vars_files": ["{{ temp_dir + '/' + inventory_hostname }}"]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# do the host run
play.update_vars_files(['localhost'])
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' not in play.vars_file_vars, \
"mixed scope vars_file loaded into play vars"
assert 'foo' in play.playbook.VARS_CACHE['localhost'], \
"differently scoped templated vars_files filename not loaded"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', \
"foo is not bar"
def test_vars_files_two_vars_different_scope_first_found(self):
#
# Use a play var and an inventory var to create the filename
#
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
playbook.inventory.hosts['localhost'] = {'inventory_hostname': os.path.basename(temp_file)}
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file)},
"vars_files": [["{{ temp_dir + '/' + inventory_hostname }}"]]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# do the host run
play.update_vars_files(['localhost'])
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' not in play.vars_file_vars, \
"mixed scope vars_file loaded into play vars"
assert 'foo' in play.playbook.VARS_CACHE['localhost'], \
"differently scoped templated vars_files filename not loaded"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', \
"foo is not bar"
| gpl-3.0 |
palladius/gcloud | packages/gsutil/gslib/addlhelp/anon.py | 51 | 2114 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gslib.help_provider import HELP_NAME
from gslib.help_provider import HELP_NAME_ALIASES
from gslib.help_provider import HELP_ONE_LINE_SUMMARY
from gslib.help_provider import HelpProvider
from gslib.help_provider import HELP_TEXT
from gslib.help_provider import HelpType
from gslib.help_provider import HELP_TYPE
_detailed_help_text = ("""
<B>OVERVIEW</B>
gsutil users can access publicly readable data without obtaining
credentials. For example, the gs://uspto-pair bucket contains a number
of publicly readable objects, so any user can run the following command
without first obtaining credentials:
gsutil ls gs://uspto-pair/applications/0800401*
Users can similarly download objects they find via the above gsutil ls
command.
If a user without credentials attempts to access protected data using gsutil,
they will be prompted to run "gsutil config" to obtain credentials.
See "gsutil help acls" for more details about data protection.
""")
class CommandOptions(HelpProvider):
"""Additional help about Access Control Lists."""
help_spec = {
# Name of command or auxiliary help info for which this help applies.
HELP_NAME : 'anon',
# List of help name aliases.
HELP_NAME_ALIASES : ['anonymous', 'public'],
# Type of help:
HELP_TYPE : HelpType.ADDITIONAL_HELP,
# One line summary of this help.
HELP_ONE_LINE_SUMMARY :
'Accessing public data without obtaining credentials',
# The full help text.
HELP_TEXT : _detailed_help_text,
}
| gpl-3.0 |
ppanczyk/ansible | lib/ansible/modules/network/aci/aci_taboo_contract.py | 22 | 4081 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_taboo_contract
short_description: Manage taboo contracts on Cisco ACI fabrics (vz:BrCP)
description:
- Manage taboo contracts on Cisco ACI fabrics.
- More information from the internal APIC class
I(vz:BrCP) at U(https://developer.cisco.com/media/mim-ref/MO-vzBrCP.html).
author:
- Swetha Chunduri (@schunduri)
- Dag Wieers (@dagwieers)
- Jacob McGill (@jmcgill298)
version_added: '2.4'
requirements:
- ACI Fabric 1.0(3f)+
notes:
- The C(tenant) used must exist before using this module in your playbook.
The M(aci_tenant) module can be used for this.
options:
taboo_contract:
description:
- The name of the Taboo Contract.
required: yes
aliases: [ name ]
description:
description:
- The description for the Taboo Contract.
aliases: [ descr ]
tenant:
description:
- The name of the tenant.
required: yes
aliases: [ tenant_name ]
scope:
description:
- The scope of a service contract.
- The APIC defaults new Taboo Contracts to C(context).
choices: [ application-profile, context, global, tenant ]
default: context
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
# FIXME: Add more, better examples
EXAMPLES = r'''
- aci_taboo_contract:
hostname: '{{ inventory_hostname }}'
username: '{{ username }}'
password: '{{ password }}'
taboo_contract: '{{ taboo_contract }}'
description: '{{ descr }}'
tenant: '{{ tenant }}'
'''
RETURN = r'''
#
'''
from ansible.module_utils.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec
argument_spec.update(
taboo_contract=dict(type='str', required=False, aliases=['name']), # Not required for querying all contracts
tenant=dict(type='str', required=False, aliases=['tenant_name']), # Not required for querying all contracts
scope=dict(type='str', choices=['application-profile', 'context', 'global', 'tenant']),
description=dict(type='str', aliases=['descr']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
method=dict(type='str', choices=['delete', 'get', 'post'], aliases=['action'], removed_in_version='2.6'), # Deprecated starting from v2.6
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['tenant', 'taboo_contract']],
['state', 'present', ['tenant', 'taboo_contract']],
],
)
taboo_contract = module.params['taboo_contract']
description = module.params['description']
scope = module.params['scope']
state = module.params['state']
aci = ACIModule(module)
aci.construct_url(root_class='tenant', subclass_1='taboo_contract')
aci.get_existing()
if state == 'present':
# Filter out module parameters with null values
aci.payload(
aci_class='vzTaboo',
class_config=dict(
name=taboo_contract,
descr=description, scope=scope,
),
)
# Generate config diff which will be used as POST request body
aci.get_diff(aci_class='vzTaboo')
# Submit changes if module not in check_mode and the proposed is different than existing
aci.post_config()
elif state == 'absent':
aci.delete_config()
module.exit_json(**aci.result)
if __name__ == "__main__":
main()
| gpl-3.0 |
steveb/tablib | tablib/packages/xlwt3/Utils.py | 46 | 6663 | # pyXLWriter: A library for generating Excel Spreadsheets
# Copyright (c) 2004 Evgeny Filatov <fufff@users.sourceforge.net>
# Copyright (c) 2002-2004 John McNamara (Perl Spreadsheet::WriteExcel)
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
# General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#----------------------------------------------------------------------------
# This module was written/ported from PERL Spreadsheet::WriteExcel module
# The author of the PERL Spreadsheet::WriteExcel module is John McNamara
# <jmcnamara@cpan.org>
#----------------------------------------------------------------------------
# See the README.txt distributed with pyXLWriter for more details.
# Portions are (C) Roman V. Kiseliov, 2005
# Utilities for work with reference to cells and with sheetnames
__rev_id__ = """$Id: Utils.py 3844 2009-05-20 01:02:54Z sjmachin $"""
import re
from struct import pack
from .ExcelMagic import MAX_ROW, MAX_COL
_re_cell_ex = re.compile(r"(\$?)([A-I]?[A-Z])(\$?)(\d+)", re.IGNORECASE)
_re_row_range = re.compile(r"\$?(\d+):\$?(\d+)")
_re_col_range = re.compile(r"\$?([A-I]?[A-Z]):\$?([A-I]?[A-Z])", re.IGNORECASE)
_re_cell_range = re.compile(r"\$?([A-I]?[A-Z]\$?\d+):\$?([A-I]?[A-Z]\$?\d+)", re.IGNORECASE)
_re_cell_ref = re.compile(r"\$?([A-I]?[A-Z]\$?\d+)", re.IGNORECASE)
def col_by_name(colname):
"""
"""
col = 0
pow = 1
for i in range(len(colname)-1, -1, -1):
ch = colname[i]
col += (ord(ch) - ord('A') + 1) * pow
pow *= 26
return col - 1
def cell_to_rowcol(cell):
"""Convert an Excel cell reference string in A1 notation
to numeric row/col notation.
Returns: row, col, row_abs, col_abs
"""
m = _re_cell_ex.match(cell)
if not m:
raise Exception("Ill-formed single_cell reference: %s" % cell)
col_abs, col, row_abs, row = m.groups()
row_abs = bool(row_abs)
col_abs = bool(col_abs)
row = int(row) - 1
col = col_by_name(col.upper())
return row, col, row_abs, col_abs
def cell_to_rowcol2(cell):
"""Convert an Excel cell reference string in A1 notation
to numeric row/col notation.
Returns: row, col
"""
m = _re_cell_ex.match(cell)
if not m:
raise Exception("Error in cell format")
col_abs, col, row_abs, row = m.groups()
# Convert base26 column string to number
# All your Base are belong to us.
row = int(row) - 1
col = col_by_name(col.upper())
return row, col
def rowcol_to_cell(row, col, row_abs=False, col_abs=False):
"""Convert numeric row/col notation to an Excel cell reference string in
A1 notation.
"""
assert 0 <= row < MAX_ROW # MAX_ROW counts from 1
assert 0 <= col < MAX_COL # MAX_COL counts from 1
d = col // 26
m = col % 26
chr1 = "" # Most significant character in AA1
if row_abs:
row_abs = '$'
else:
row_abs = ''
if col_abs:
col_abs = '$'
else:
col_abs = ''
if d > 0:
chr1 = chr(ord('A') + d - 1)
chr2 = chr(ord('A') + m)
# Zero index to 1-index
return col_abs + chr1 + chr2 + row_abs + str(row + 1)
def rowcol_pair_to_cellrange(row1, col1, row2, col2,
row1_abs=False, col1_abs=False, row2_abs=False, col2_abs=False):
"""Convert two (row,column) pairs
into a cell range string in A1:B2 notation.
Returns: cell range string
"""
assert row1 <= row2
assert col1 <= col2
return (
rowcol_to_cell(row1, col1, row1_abs, col1_abs)
+ ":"
+ rowcol_to_cell(row2, col2, row2_abs, col2_abs)
)
def cellrange_to_rowcol_pair(cellrange):
"""Convert cell range string in A1 notation to numeric row/col
pair.
Returns: row1, col1, row2, col2
"""
cellrange = cellrange.upper()
# Convert a row range: '1:3'
res = _re_row_range.match(cellrange)
if res:
row1 = int(res.group(1)) - 1
col1 = 0
row2 = int(res.group(2)) - 1
col2 = -1
return row1, col1, row2, col2
# Convert a column range: 'A:A' or 'B:G'.
# A range such as A:A is equivalent to A1:A16384, so add rows as required
res = _re_col_range.match(cellrange)
if res:
col1 = col_by_name(res.group(1).upper())
row1 = 0
col2 = col_by_name(res.group(2).upper())
row2 = -1
return row1, col1, row2, col2
# Convert a cell range: 'A1:B7'
res = _re_cell_range.match(cellrange)
if res:
row1, col1 = cell_to_rowcol2(res.group(1))
row2, col2 = cell_to_rowcol2(res.group(2))
return row1, col1, row2, col2
# Convert a cell reference: 'A1' or 'AD2000'
res = _re_cell_ref.match(cellrange)
if res:
row1, col1 = cell_to_rowcol2(res.group(1))
return row1, col1, row1, col1
raise Exception("Unknown cell reference %s" % (cell))
def cell_to_packed_rowcol(cell):
""" pack row and column into the required 4 byte format """
row, col, row_abs, col_abs = cell_to_rowcol(cell)
if col >= MAX_COL:
raise Exception("Column %s greater than IV in formula" % cell)
if row >= MAX_ROW: # this for BIFF8. for BIFF7 available 2^14
raise Exception("Row %s greater than %d in formula" % (cell, MAX_ROW))
col |= int(not row_abs) << 15
col |= int(not col_abs) << 14
return row, col
# === sheetname functions ===
def valid_sheet_name(sheet_name):
if sheet_name == "" or sheet_name[0] == "'" or len(sheet_name) > 31:
return False
for c in sheet_name:
if c in "[]:\\?/*\x00":
return False
return True
def quote_sheet_name(unquoted_sheet_name):
if not valid_sheet_name(unquoted_sheet_name):
raise Exception(
'attempt to quote an invalid worksheet name %r' % unquoted_sheet_name)
return "'" + unquoted_sheet_name.replace("'", "''") + "'"
| mit |
Zephor5/zspider | zspider/crawler.py | 1 | 5759 | # coding=utf-8
import json
import logging
from queue import Queue
from pooled_pika import PooledConn
from scrapy.crawler import CrawlerProcess
from scrapy.settings import Settings
from scrapy.utils.log import log_scrapy_info
from scrapy.utils.ossignal import install_shutdown_handlers
from twisted.internet import defer
from twisted.internet.error import ConnectionDone
from zspider.confs.conf import AMQP_PARAM
from zspider.confs.conf import EXCHANGE_PARAMS
from zspider.confs.conf import TASK_BIND_PARAMS
from zspider.confs.conf import TASK_Q_PARAMS
__author__ = "zephor"
logger = logging.getLogger("crawler")
class TestCrawler(CrawlerProcess):
def __init__(self):
from zspider.confs import crawl_conf as p_settings
settings = Settings()
settings.setmodule(p_settings)
super(CrawlerProcess, self).__init__(settings)
self.task_q = defer.DeferredQueue()
self.res_q = Queue()
self.task_q.get().addCallback(self.crawl)
def crawl(self, kwargs):
spider_name = kwargs.pop("spider_name", "")
crawler = self._create_crawler(spider_name)
self.crawlers.add(crawler)
d = crawler.crawl(**kwargs)
self._active.add(d)
def _done(_):
self.crawlers.discard(crawler)
self._active.discard(d)
try:
result = crawler.spider.test_result
del crawler.spider.test_result
except AttributeError:
result = None # spider may be None in case Failure
self.res_q.put(result)
return _
d.addBoth(_done)
d.addErrback(lambda _: logger.error(_))
d.addCallback(lambda _: self.task_q.get().addCallback(self.crawl))
return d
def debug(_=None):
"""
for debug use
"""
import objgraph
# with open('logs/test', 'w') as f:
# objs = objgraph.get_leaking_objects()
# for o in objs:
# f.write('%s\n' % o.encode('utf-8') if isinstance(o, unicode) else str(o))
leak_ref = objgraph.by_type("Newspaper")
objgraph.show_backrefs(leak_ref, max_depth=10, filename="my_leak.png")
class CrawlerDaemon(CrawlerProcess):
def __init__(self):
from zspider.confs import crawl_conf as p_settings
settings = Settings()
settings.setmodule(p_settings)
super(CrawlerProcess, self).__init__(
settings
) # 跳过CrawlerProcess的初始日志配置,由init.py处理
install_shutdown_handlers(self._signal_shutdown)
log_scrapy_info(self.settings)
self.__task_queue = None
self._pconn = PooledConn(AMQP_PARAM)
self._set_up()
def _set_up(self, _=None):
d = self._pconn.acquire()
d.addCallbacks(self._on_conn, self._on_err_conn)
d.addErrback(self._on_err)
@defer.inlineCallbacks
def _on_conn(self, conn):
# in case the connection is lost; mostly closed by the mq server
conn.ready.addErrback(self.__clear)
conn.ready.addCallback(self._set_up)
self._conn = conn
channel = self._channel = yield conn.channel()
# do some setup
yield channel.exchange_declare(**EXCHANGE_PARAMS)
yield channel.queue_declare(**TASK_Q_PARAMS)
yield channel.queue_bind(**TASK_BIND_PARAMS)
self.__task_queue, consumer_tag = yield channel.basic_consume(
queue=TASK_Q_PARAMS["queue"], auto_ack=False
)
yield self._on_get()
@staticmethod
def _on_err_conn(err):
logger.fatal(err)
@staticmethod
def _on_err(err):
if err.type is ConnectionDone:
logger.info("connection lost when waiting, handled..")
else:
logger.error(err)
@defer.inlineCallbacks
def _on_get(self):
ch, method, properties, body = yield self.__task_queue.get()
d = self._on_msg(body)
yield ch.basic_ack(delivery_tag=method.delivery_tag)
if isinstance(d, defer.Deferred):
self._channel.close()
self._pconn.release(self._conn)
d.addCallback(self._set_up)
else:
d = self._on_get()
yield d
def _on_msg(self, body):
logger.info("_on_msg %s" % body)
try:
msg = json.loads(body)
self.settings.set("COOKIES_ENABLED", msg["is_login"], "spider")
d = self.crawl(
msg["spider"],
parser=msg["parser"],
task_id=msg["id"],
task_name=msg["name"],
)
# d.addCallback(lambda som: reactor.callLater(2, debug))
d.addErrback(lambda err: logger.error(err))
except Exception as e:
logger.error(repr(e))
if len(self._active) > 1:
return self.join()
def __clear(self, _=None):
if self.__task_queue is not None:
self.__task_queue.close(ConnectionDone("done"))
def crawl(self, spider_name, *args, **kwargs):
crawler = self._create_crawler(spider_name)
self.crawlers.add(crawler)
d = crawler.crawl(*args, **kwargs)
self._active.add(d)
def _done(result):
self.crawlers.discard(crawler)
self._active.discard(d)
# parser may hold large memory, release it manually
try:
del crawler.spider.parser
except AttributeError:
pass # spider may be None in case Failure
return result
return d.addBoth(_done)
def main():
from zspider import init
init.init("crawler")
if init.done:
p = CrawlerDaemon()
p.start(stop_after_crawl=False)
if __name__ == "__main__":
main()
| mit |
andymckay/zamboni | mkt/inapp/views.py | 1 | 3169 | import json
from django.db import transaction
from django.shortcuts import get_object_or_404
from rest_framework.permissions import AllowAny
from rest_framework.viewsets import ModelViewSet
import commonware.log
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import AllowAuthor, ByHttpMethod
from mkt.api.base import CORSMixin, MarketplaceView
from mkt.inapp.models import InAppProduct
from mkt.inapp.serializers import InAppProductSerializer
from mkt.prices.models import Price
from mkt.webapps.models import Webapp
log = commonware.log.getLogger('z.inapp')
class InAppProductViewSet(CORSMixin, MarketplaceView, ModelViewSet):
serializer_class = InAppProductSerializer
cors_allowed_methods = ('get', 'post', 'put', 'patch', 'delete')
lookup_field = 'guid'
permission_classes = [ByHttpMethod({
'options': AllowAny, # Needed for CORS.
'get': AllowAny,
'post': AllowAuthor,
'put': AllowAuthor,
'patch': AllowAuthor,
})]
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
def destroy(self):
raise NotImplemented('destroy is not allowed')
def pre_save(self, in_app_product):
in_app_product.webapp = self.get_app()
def get_queryset(self):
return InAppProduct.objects.filter(webapp=self.get_app())
def get_app(self):
if not hasattr(self, 'app'):
self.app = get_object_or_404(Webapp,
app_domain=self.kwargs['origin'])
return self.app
def get_authors(self):
return self.get_app().authors.all()
class StubInAppProductViewSet(CORSMixin, MarketplaceView, ModelViewSet):
serializer_class = InAppProductSerializer
lookup_field = 'guid'
cors_allowed_methods = ('get',)
allowed_methods = ('GET',)
permission_classes = [AllowAny]
authentication_classes = []
def _queryset(self):
return InAppProduct.objects.filter(stub=True)
def get_queryset(self):
qs = self._queryset()
# Since caching count() is unreliable, this optimizes for the case of
# having already created stub products.
if not len(qs):
with transaction.atomic():
self._create_stub_products()
qs = self._queryset()
return qs
def _create_stub_products(self):
for name, amount in (('Kiwi', '0.99'),
('Unicorn', '1.99')):
log.info('Creating stub in-app product {n} {p}'
.format(n=name, p=amount))
# TODO: make this adjustable.
simulate = json.dumps({'result': 'postback'})
InAppProduct.objects.create(stub=True,
simulate=simulate,
name=name,
price=Price.objects.get(price=amount))
| bsd-3-clause |
datenbetrieb/odoo | openerp/report/render/html2html/html2html.py | 443 | 4238 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.report.render.rml2pdf import utils
import copy
import base64
import cStringIO
import re
from reportlab.lib.utils import ImageReader
_regex = re.compile('\[\[(.+?)\]\]')
utils._regex = re.compile('\[\[\s*(.+?)\s*\]\]',re.DOTALL)
class html2html(object):
def __init__(self, html, localcontext):
self.localcontext = localcontext
self.etree = html
self._node = None
def render(self):
def process_text(node,new_node):
if new_node.tag in ['story','tr','section']:
new_node.attrib.clear()
for child in utils._child_get(node, self):
new_child = copy.deepcopy(child)
new_node.append(new_child)
if len(child):
for n in new_child:
new_child.text = utils._process_text(self, child.text)
new_child.tail = utils._process_text(self, child.tail)
new_child.remove(n)
process_text(child, new_child)
else:
if new_child.tag=='img' and new_child.get('name'):
if _regex.findall(new_child.get('name')) :
src = utils._process_text(self, new_child.get('name'))
if src :
new_child.set('src','data:image/gif;base64,%s'%src)
output = cStringIO.StringIO(base64.decodestring(src))
img = ImageReader(output)
(width,height) = img.getSize()
if not new_child.get('width'):
new_child.set('width',str(width))
if not new_child.get('height') :
new_child.set('height',str(height))
else :
new_child.getparent().remove(new_child)
new_child.text = utils._process_text(self, child.text)
new_child.tail = utils._process_text(self, child.tail)
self._node = copy.deepcopy(self.etree)
for n in self._node:
self._node.remove(n)
process_text(self.etree, self._node)
return self._node
def url_modify(self,root):
for n in root:
if (n.text.find('<a ')>=0 or n.text.find('<a')>=0) and n.text.find('href')>=0 and n.text.find('style')<=0 :
node = (n.tag=='span' and n.getparent().tag=='u') and n.getparent().getparent() or ((n.tag=='span') and n.getparent()) or n
style = node.get('color') and "style='color:%s; text-decoration: none;'"%node.get('color') or ''
if n.text.find('<a')>=0:
t = '<a '
else :
t = '<a '
href = n.text.split(t)[-1]
n.text = ' '.join([t,style,href])
self.url_modify(n)
return root
def parseString(node, localcontext = {}):
r = html2html(node, localcontext)
root = r.render()
root = r.url_modify(root)
return root
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
betoesquivel/fil2014 | build/django/django/template/smartif.py | 239 | 6269 | """
Parser and utilities for the smart 'if' tag
"""
# Using a simple top down parser, as described here:
# http://effbot.org/zone/simple-top-down-parsing.htm.
# 'led' = left denotation
# 'nud' = null denotation
# 'bp' = binding power (left = lbp, right = rbp)
class TokenBase(object):
"""
Base class for operators and literals, mainly for debugging and for throwing
syntax errors.
"""
id = None # node/token type name
value = None # used by literals
first = second = None # used by tree nodes
def nud(self, parser):
# Null denotation - called in prefix context
raise parser.error_class(
"Not expecting '%s' in this position in if tag." % self.id
)
def led(self, left, parser):
# Left denotation - called in infix context
raise parser.error_class(
"Not expecting '%s' as infix operator in if tag." % self.id
)
def display(self):
"""
Returns what to display in error messages for this node
"""
return self.id
def __repr__(self):
out = [str(x) for x in [self.id, self.first, self.second] if x is not None]
return "(" + " ".join(out) + ")"
def infix(bp, func):
"""
Creates an infix operator, given a binding power and a function that
evaluates the node
"""
class Operator(TokenBase):
lbp = bp
def led(self, left, parser):
self.first = left
self.second = parser.expression(bp)
return self
def eval(self, context):
try:
return func(context, self.first, self.second)
except Exception:
# Templates shouldn't throw exceptions when rendering. We are
# most likely to get exceptions for things like {% if foo in bar
# %} where 'bar' does not support 'in', so default to False
return False
return Operator
def prefix(bp, func):
"""
Creates a prefix operator, given a binding power and a function that
evaluates the node.
"""
class Operator(TokenBase):
lbp = bp
def nud(self, parser):
self.first = parser.expression(bp)
self.second = None
return self
def eval(self, context):
try:
return func(context, self.first)
except Exception:
return False
return Operator
# Operator precedence follows Python.
# NB - we can get slightly more accurate syntax error messages by not using the
# same object for '==' and '='.
# We defer variable evaluation to the lambda to ensure that terms are
# lazily evaluated using Python's boolean parsing logic.
OPERATORS = {
'or': infix(6, lambda context, x, y: x.eval(context) or y.eval(context)),
'and': infix(7, lambda context, x, y: x.eval(context) and y.eval(context)),
'not': prefix(8, lambda context, x: not x.eval(context)),
'in': infix(9, lambda context, x, y: x.eval(context) in y.eval(context)),
'not in': infix(9, lambda context, x, y: x.eval(context) not in y.eval(context)),
'=': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'==': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'!=': infix(10, lambda context, x, y: x.eval(context) != y.eval(context)),
'>': infix(10, lambda context, x, y: x.eval(context) > y.eval(context)),
'>=': infix(10, lambda context, x, y: x.eval(context) >= y.eval(context)),
'<': infix(10, lambda context, x, y: x.eval(context) < y.eval(context)),
'<=': infix(10, lambda context, x, y: x.eval(context) <= y.eval(context)),
}
# Assign 'id' to each:
for key, op in OPERATORS.items():
op.id = key
class Literal(TokenBase):
"""
A basic self-resolvable object similar to a Django template variable.
"""
# IfParser uses Literal in create_var, but TemplateIfParser overrides
# create_var so that a proper implementation that actually resolves
# variables, filters etc is used.
id = "literal"
lbp = 0
def __init__(self, value):
self.value = value
def display(self):
return repr(self.value)
def nud(self, parser):
return self
def eval(self, context):
return self.value
def __repr__(self):
return "(%s %r)" % (self.id, self.value)
class EndToken(TokenBase):
lbp = 0
def nud(self, parser):
raise parser.error_class("Unexpected end of expression in if tag.")
EndToken = EndToken()
class IfParser(object):
error_class = ValueError
def __init__(self, tokens):
# pre-pass necessary to turn 'not','in' into single token
l = len(tokens)
mapped_tokens = []
i = 0
while i < l:
token = tokens[i]
if token == "not" and i + 1 < l and tokens[i+1] == "in":
token = "not in"
i += 1 # skip 'in'
mapped_tokens.append(self.translate_token(token))
i += 1
self.tokens = mapped_tokens
self.pos = 0
self.current_token = self.next_token()
def translate_token(self, token):
try:
op = OPERATORS[token]
except (KeyError, TypeError):
return self.create_var(token)
else:
return op()
def next_token(self):
if self.pos >= len(self.tokens):
return EndToken
else:
retval = self.tokens[self.pos]
self.pos += 1
return retval
def parse(self):
retval = self.expression()
# Check that we have exhausted all the tokens
if self.current_token is not EndToken:
raise self.error_class("Unused '%s' at end of if expression." %
self.current_token.display())
return retval
def expression(self, rbp=0):
t = self.current_token
self.current_token = self.next_token()
left = t.nud(self)
while rbp < self.current_token.lbp:
t = self.current_token
self.current_token = self.next_token()
left = t.led(left, self)
return left
def create_var(self, value):
return Literal(value)
| mit |
do-mpc/do-mpc | documentation/source/release_overview.py | 1 | 1138 | import requests
import os
def get_overview():
# Use Github Rest API to get releases:
release_dict = requests.get('https://api.github.com/repos/do-mpc/do-mpc/releases').json()
text = ''
text += '# Release notes'
text += '\n'
text += 'This content is autogenereated from our Github [release notes](https://github.com/do-mpc/do-mpc/releases).'
text += '\n'
for release_i in release_dict:
name_i = release_i['name']
body_i = release_i['body']
body_i = body_i.replace('# ', '### ')
print(name_i)
text += '## {}'.format(name_i)
text += '\n'
text += body_i
text += '\n'
try:
if release_i['assets']:
text += '### Example files'.format(name_i)
text += '\n'
text += 'Please download the example files for release {} [here]({}).'.format(name_i, release_i['assets'][0]['browser_download_url'])
text += '\n'
except:
print('Couldnt provide download link for example files.')
with open('release_notes.md', 'w') as f:
f.write(text)
| lgpl-3.0 |
hieukypc/ERP | openerp/addons/website_portal/controllers/main.py | 3 | 4201 | # -*- coding: utf-8 -*-
from openerp import http
from openerp.http import request
from openerp import tools
from openerp.tools.translate import _
class website_account(http.Controller):
@http.route(['/my', '/my/home'], type='http', auth="public", website=True)
def account(self, **kw):
partner = request.env.user.partner_id
# get customer sales rep
if partner.user_id:
sales_rep = partner.user_id
else:
sales_rep = False
values = {
'sales_rep': sales_rep,
'company': request.website.company_id,
'user': request.env.user
}
return request.website.render("website_portal.account", values)
@http.route(['/my/account'], type='http', auth='user', website=True)
def details(self, redirect=None, **post):
partner = request.env['res.users'].browse(request.uid).partner_id
values = {
'error': {},
'error_message': []
}
if post:
error, error_message = self.details_form_validate(post)
values.update({'error': error, 'error_message': error_message})
values.update(post)
if not error:
post.update({'zip': post.pop('zipcode', '')})
if partner.type == "contact":
address_fields = {
'city': post.pop('city'),
'street': post.pop('street'),
'street2': post.pop('street2'),
'zip': post.pop('zip'),
'country_id': post.pop('country_id'),
'state_id': post.pop('state_id')
}
partner.commercial_partner_id.write(address_fields)
partner.sudo().write(post)
if redirect:
return request.redirect(redirect)
return request.redirect('/my/home')
countries = request.env['res.country'].sudo().search([])
states = request.env['res.country.state'].sudo().search([])
values.update({
'partner': partner,
'countries': countries,
'states': states,
'has_check_vat': hasattr(request.env['res.partner'], 'check_vat'),
'redirect': redirect,
})
return request.website.render("website_portal.details", values)
def details_form_validate(self, data):
error = dict()
error_message = []
mandatory_billing_fields = ["name", "phone", "email", "street2", "city", "country_id"]
optional_billing_fields = ["zipcode", "state_id", "vat", "street"]
# Validation
for field_name in mandatory_billing_fields:
if not data.get(field_name):
error[field_name] = 'missing'
# email validation
if data.get('email') and not tools.single_email_re.match(data.get('email')):
error["email"] = 'error'
error_message.append(_('Invalid Email! Please enter a valid email address.'))
# vat validation
if data.get("vat") and hasattr(request.env["res.partner"], "check_vat"):
if request.website.company_id.vat_check_vies:
# force full VIES online check
check_func = request.env["res.partner"].vies_vat_check
else:
# quick and partial off-line checksum validation
check_func = request.env["res.partner"].simple_vat_check
vat_country, vat_number = request.env["res.partner"]._split_vat(data.get("vat"))
if not check_func(vat_country, vat_number): # simple_vat_check
error["vat"] = 'error'
# error message for empty required fields
if [err for err in error.values() if err == 'missing']:
error_message.append(_('Some required fields are empty.'))
unknown = [k for k in data.iterkeys() if k not in mandatory_billing_fields + optional_billing_fields]
if unknown:
error['common'] = 'Unknown field'
error_message.append("Unknown field '%s'" % ','.join(unknown))
return error, error_message
| gpl-3.0 |
ess/dd-agent | tests/checks/mock/test_mesos_slave.py | 45 | 1565 | # stdlib
import json
# 3p
from mock import patch
from nose.plugins.attrib import attr
# project
from checks import AgentCheck
from tests.checks.common import AgentCheckTest, Fixtures, get_check_class
def _mocked_get_state(*args, **kwargs):
state = json.loads(Fixtures.read_file('state.json'))
return state
def _mocked_get_stats(*args, **kwargs):
stats = json.loads(Fixtures.read_file('stats.json'))
return stats
@attr(requires='mesos_slave')
class TestMesosSlave(AgentCheckTest):
CHECK_NAME = 'mesos_slave'
def test_checks(self):
config = {
'init_config': {},
'instances': [
{
'url': 'http://localhost:5050',
'tasks': ['hello']
}
]
}
klass = get_check_class('mesos_slave')
with patch.object(klass, '_get_state', _mocked_get_state):
with patch.object(klass, '_get_stats', _mocked_get_stats):
check = klass('mesos_slave', {}, {})
self.run_check_twice(config)
metrics = {}
for d in (check.SLAVE_TASKS_METRICS, check.SYSTEM_METRICS, check.SLAVE_RESOURCE_METRICS,
check.SLAVE_EXECUTORS_METRICS, check.STATS_METRICS):
metrics.update(d)
[self.assertMetric(v[0]) for k, v in check.TASK_METRICS.iteritems()]
[self.assertMetric(v[0]) for k, v in metrics.iteritems()]
self.assertServiceCheck('hello.ok', count=1, status=AgentCheck.OK)
| bsd-3-clause |
sauloal/cufflinksviewer | venvwin/Lib/encodings/raw_unicode_escape.py | 103 | 1253 | """ Python 'raw-unicode-escape' Codec
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.raw_unicode_escape_encode
decode = codecs.raw_unicode_escape_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.raw_unicode_escape_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.raw_unicode_escape_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='raw-unicode-escape',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| mit |
andrewleech/WeasyPrint | weasyprint/layout/min_max.py | 5 | 1773 | # coding: utf8
"""
weasyprint.layout.min_max
-------------------------
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import functools
def handle_min_max_width(function):
"""Decorate a function that sets the used width of a box to handle
{min,max}-width.
"""
@functools.wraps(function)
def wrapper(box, *args):
computed_margins = box.margin_left, box.margin_right
result = function(box, *args)
if box.width > box.max_width:
box.width = box.max_width
box.margin_left, box.margin_right = computed_margins
result = function(box, *args)
if box.width < box.min_width:
box.width = box.min_width
box.margin_left, box.margin_right = computed_margins
result = function(box, *args)
return result
wrapper.without_min_max = function
return wrapper
def handle_min_max_height(function):
"""Decorate a function that sets the used height of a box to handle
{min,max}-height.
"""
@functools.wraps(function)
def wrapper(box, *args):
computed_margins = box.margin_top, box.margin_bottom
result = function(box, *args)
if box.height > box.max_height:
box.height = box.max_height
box.margin_top, box.margin_bottom = computed_margins
result = function(box, *args)
if box.height < box.min_height:
box.height = box.min_height
box.margin_top, box.margin_bottom = computed_margins
result = function(box, *args)
return result
wrapper.without_min_max = function
return wrapper
| bsd-3-clause |
PetterS/easy-IP | examples/run_nurses.py | 1 | 1028 | #!/usr/bin/env python3
from glob import glob
import os
# Set this to the location of NSPLib.
nsplib = r"C:\Users\Petter\Dropbox\Datasets\NSPLib"
def run_solver(data_set, case):
case_file = os.path.join(nsplib, "Cases", str(case) + ".gen")
log_file = data_set + "." + str(case) + ".output.log"
files = glob(os.path.join(nsplib, data_set, "*.nsp"))
names = [f.split(".")[0] for f in files]
names = [n.split(os.path.sep)[-1] for n in names]
nums = sorted([int(n) for n in names])
files = [os.path.join(nsplib, data_set, str(n) + ".nsp") for n in nums]
try:
os.unlink(log_file)
except FileNotFoundError:
pass
for f in files:
print(case_file)
print(f)
print(log_file)
# This may need to change depending on shell.
os.system("nurses " + f + " " + case_file + " >> " + log_file)
for data_set in ["N25", "N50", "N75", "N100"]:
for case in [1, 2, 3, 4, 5, 6, 7, 8]:
run_solver(data_set, case)
for data_set in ["N30", "N60"]:
for case in [9, 10, 11, 12, 13, 14, 15, 16]:
run_solver(data_set, case)
| bsd-2-clause |
silentfuzzle/calibre | src/calibre/gui2/store/stores/ebooksgratuits_plugin.py | 22 | 1107 |
# -*- coding: utf-8 -*-
from __future__ import (unicode_literals, division, absolute_import, print_function)
store_version = 1 # Needed for dynamic plugin loading
__license__ = 'GPL 3'
__copyright__ = '2012, Florent FAYOLLE <florent.fayolle69@gmail.com>'
__docformat__ = 'restructuredtext en'
from calibre.gui2.store.basic_config import BasicStoreConfig
from calibre.gui2.store.opensearch_store import OpenSearchOPDSStore
from calibre.gui2.store.search_result import SearchResult
from calibre.utils.filenames import ascii_text
class EbooksGratuitsStore(BasicStoreConfig, OpenSearchOPDSStore):
open_search_url = 'http://www.ebooksgratuits.com/opds/opensearch.xml'
web_url = 'http://www.ebooksgratuits.com/'
def strip_accents(self, s):
return ascii_text(s)
def search(self, query, max_results=10, timeout=60):
query = self.strip_accents(unicode(query))
for s in OpenSearchOPDSStore.search(self, query, max_results, timeout):
if s.downloads:
s.drm = SearchResult.DRM_UNLOCKED
s.price = '$0.00'
yield s
| gpl-3.0 |
mxOBS/deb-pkg_trusty_chromium-browser | native_client/src/trusted/validator_ragel/proof_tools.py | 1 | 15125 | # Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tools and utilities for creating proofs about tries."""
import itertools
import multiprocessing
import optparse
import spec
import trie
import validator
class Operands(object):
"""Contains parts of the disassembly of a single instruction.
Also holds the implied restriction state.
input_rr means that register must have the MSB 32 bits 0 before the
instruction executes. Such a register can be used by this instruction
as the index register for a memory operation in x86_64. There can
only be one memory operand per instruction. Some AVX instructions allow
a vector register to be used as an index register, impling multiple
index values. However, we currently have no way to sandbox such instructions.
output_rr means that the instruction produces a restricted register, i.e
zeroes out the top 32 bits of a register.
Can also hold partial information about an instruction while incrementally
building up a full instruction.
e.g. vaddpd 0x0(%r15,%r11,8),%ymm3,%ymm2 in ATT syntax is represented as:
-> disasms: ('vaddpd', '0x0(%r15,%r11,8)', '%ymm3', '%ymm2')
-> input_rr: r11 (for x86_64) (or None for x86_32)
-> output_rr: None
When building up partial state, could be:
e.g. just (disasms: ('0x0(%r15,%r11,8)', '%ymm3),
input_rr: '%r111', output_rr: None) from example above.
"""
__slots__ = ('disasms', 'input_rr', 'output_rr')
def __init__(self, disasms=(), input_rr=None, output_rr=None):
assert isinstance(disasms, tuple), disasms
self.disasms = disasms
self.input_rr = input_rr
self.output_rr = output_rr
def __repr__(self):
return str((self.disasms, self.input_rr, self.output_rr))
def __eq__(self, other):
return (self.disasms == other.disasms and
self.input_rr == other.input_rr and
self.output_rr == other.output_rr)
def __hash__(self):
return hash((self.disasms,
self.input_rr,
self.output_rr))
def MergeOperands(ops1, ops2):
"""Combine two different Operands (disassembly parts and implications)."""
assert ops1.input_rr is None or ops2.input_rr is None
assert ops1.output_rr is None or ops2.output_rr is None
return Operands(ops1.disasms + ops2.disasms,
ops1.input_rr if ops1.input_rr else ops2.input_rr,
ops1.output_rr if ops1.output_rr else ops2.output_rr)
def AllXMMOperands(bitness):
"""Returns the set of all XMM registers as individual Operands objects."""
assert bitness in (32, 64), bitness
return set([Operands(disasms=('%xmm{}'.format(i),))
for i in xrange(8 if bitness == 32 else 16)])
def AllYMMOperands(bitness):
"""Returns the set of all YMM registers as individual Operands objects."""
assert bitness in (32, 64), bitness
return set([Operands(disasms=('%ymm{}'.format(i),))
for i in xrange(8 if bitness == 32 else 16)])
def GprOperands(bitness, operand_size, is_write_for_64_bit=True,
can_restrict=False):
"""Returns all gpr operands as an operand set.
Args:
bitness: architecture bitness to distinguish x86_32/x86_64: (32, 64)
operand_size: size of register to be used in write.
is_write_for_64_bit: if bitness == 64, and operand_size == 64,
exclude special registers rsp, rbp, r15 for sandbox
reasons. If bitness == 64 and operand_size == 32,
exclude 'esp', 'ebp', and 'r15d' if it's
not can_restrict. If can_restrict, then
just exclude 'r15d'
can_restrict: if true and bitness == 64, and operand_size == 32, and
is_write_for_64_bit == True, disallow r15 write, and
produce restricted register.
"""
regs = []
operand_to_restriction_map = {
'%eax': '%rax', '%ebx' : '%rbx', '%ecx' : '%rcx', '%edx': '%rdx',
'%ebp': '%rbp', '%edi': '%rdi', '%esi': '%rsi', '%esp': '%rsp',
'%r8d': '%r8', '%r9d': '%r9', '%r10d' : '%r10', '%r11d': '%r11',
'%r12d': '%r12', '%r13d': '%r13', '%r14d' : '%r14',
}
restricts = False
if operand_size == 16 and bitness == 32:
regs = ['%ax', '%bx', '%cx', '%dx', '%bp', '%sp', '%di', '%si']
elif operand_size == 32 and bitness == 32:
regs = ['%eax', '%ebp', '%ebx', '%ecx', '%edi', '%edx', '%esi', '%esp']
elif bitness == 64 and operand_size == 32:
regs = ['%eax', '%ebx', '%ecx', '%edi', '%edx', '%esi',
'%r8d', '%r9d', '%r10d', '%r11d', '%r12d', '%r13d', '%r14d']
# Don't include '%ebp', '%esp', '%r15d' in allowed registers when
# is_write_for_64_bit == True.
if is_write_for_64_bit == False:
regs += ['%esp', '%ebp', '%r15d']
elif can_restrict == True:
regs += ['%esp', '%ebp']
restricts = True
elif bitness == 64 and operand_size == 64:
regs = ['%rax', '%rbx', '%rcx', '%rdi', '%rdx', '%rsi',
'%r8', '%r9', '%r10', '%r11', '%r12', '%r13', '%r14']
# Don't include '%ebp', '%esp', '%r15d' in allowed registers when
# is_write_for_64_bit == True.
if is_write_for_64_bit == False:
regs += ['%rsp', '%rbp', '%r15']
else:
raise AssertionError("Unimplemented")
if restricts:
return set([
Operands(disasms=(reg,), output_rr=operand_to_restriction_map[reg])
for reg in regs])
else:
return set([Operands(disasms=(reg,)) for reg in regs])
def MnemonicOp(name):
"""Returns the mnemonic as an operand set."""
assert isinstance(name, str)
return set([Operands(disasms=(name,))])
def ImmOp():
"""Returns an immediate as an operand set."""
# When walking the DFA, immediates are currently returned as 0x0.
return set([Operands(disasms=('$0x0',))])
def LockPrefix():
"""Returns the lock prefix as an operand set."""
return set([Operands(disasms=('lock',))])
def MemoryOperandsTemplate(disp, base, index, scale, bitness):
"""Returns all the possible different memory operands using given parameters.
Returns list of Operands instances.
e.g. for disp='0x0', base='%eax', index='%ebx', scale=2
[ '(%ebx)', # Base Register Only
'0x0', # Displacement Only
'(%ebx,%eax',2)', # Base Register + Index register * scale.
'0x0(,%eax,2)', # Displacement + Index Register * scale.
'0x0(%ebx)', # Displacement + Base Register.
'0x0(%ebx,%eax,2), # Displacement + Base Register + Index Register * scale
]
Note that Base register must be used for x86_64.
Within the returned Operands objects, the input RR is set to the
index register if the index is used for x86_64.
Args:
disp: displacement to use in memory operand.
base: string register name to use for base register in addressing.
index: string register name to use for index register in addressing.
scale: integer scale to use to multiply index register by in addressing.
bitness: 32 or 64
Returns:
list of Operands instances representing all ways to use the parameters.
"""
assert bitness in (32, 64), bitness
input_rr = None
# Note: %riz is a fake register that always reads 0. It is allowed as an
# index register (though it is redundant). However, because it is always
# 0, we don't encode that it needs to be restricted.
if bitness == 64 and index != '%riz':
input_rr = index
base_only_encoding = []
# There is no way to encode base without displacement with ebp/rbp.
# Have to use 0x0+%ebp.
if base not in ('%ebp', '%rbp'):
base_only_encoding = [Operands(disasms=('({})'.format(base),))]
base_plus_index_scale_encoding = []
# There is no way to encode base without displacement with ebp/rbp.
# Have to use 0x0+%ebp.
if base not in ('%ebp', '%rbp'):
base_plus_index_scale_encoding = [
Operands(disasms=('({},{},{})'.format(base, index, scale),),
input_rr=input_rr)]
disp_only_encoding = [Operands(disasms=(disp,))]
disp_plus_index_scale_encoding = [
Operands(disasms=('{}(,{},{})'.format(disp, index, scale),),
input_rr=input_rr)]
disp_plus_base_encoding = [
Operands(disasms=('{}({})'.format(disp, base),))]
disp_plus_base_plus_index_scale_encoding = [
Operands(
disasms=('{}({},{},{})'.format(disp, base, index, scale),),
input_rr=input_rr)]
# Redundant %eiz/%riz encoding isn't available with scale == 1.
if (base in ('%esp', '%rsp') and
index in ('%eiz', '%riz') and
scale == 1):
return []
if bitness == 32:
return (base_only_encoding +
disp_only_encoding +
base_plus_index_scale_encoding +
disp_plus_index_scale_encoding +
disp_plus_base_encoding +
disp_plus_base_plus_index_scale_encoding)
else:
# Note: x86_64 allows rip relative addressing (x86_32 doesn't_).
# However, not all of the different addressing modes are available
# for rip relative addressing (only disp + rip). This is
# MOD==b'00, RM==b'101
if base == '%rip':
return disp_plus_base_encoding
else:
# x86_64 memory disasms must always include base register, so the
# Disp() and DispPlusIndexScale() options available for x86_32 aren't
# permitted.
return (base_only_encoding +
disp_plus_base_encoding +
base_plus_index_scale_encoding +
disp_plus_base_plus_index_scale_encoding)
def AllMemoryOperands(bitness):
"""The set of all possible memory operands as individual Operands objects."""
assert bitness in (32, 64), bitness
displacements = ['0x0']
scales = [1, 2, 4, 8]
if bitness == 32:
bases = set(['%eax', '%ebp', '%ebx', '%ecx',
'%edi', '%edx', '%esi', '%esp'])
indexes = (bases | set(['%eiz'])) - set(['%esp'])
elif bitness == 64:
indexes = set(['%rax', '%rbx', '%rcx', '%rdi', '%rdx',
'%rsi', '%r8', '%r9', '%r10', '%r11',
'%r12', '%r13', '%r14', '%r15', '%riz'])
bases = set(['%rsp', '%rbp', '%r15', '%rip'])
result = set()
for (d, b, i, s) in itertools.product(displacements, bases, indexes, scales):
result.update(MemoryOperandsTemplate(disp=d, base=b, index=i, scale=s,
bitness=bitness))
return result
def OpsProd(*args):
"""A version of itertools.product that builds Operands.
e.g.
XMM = (Operands(disasms=('%xmm1',)), Operands(disasms=('%xmm2',)))
REG = (Operands(disasms=('%rax',)), Operands(disasms=('%rbx',)))
OpsProd(XMM, REG) ->
set([Operands(disasms=('%xmm1', '%rax')),
Operands(disasms=('%xmm1', '%rbx')),
Operands(disasms=('%xmm2', '%rax')),
Operands(disasms=('%xmm2', '%rbx'))])
Args:
*args: each input is a collection of Operands.
Returns:
set of Operands instances, where each instance is a merge of Operands
objects, one taken from each input iterator.
"""
result = set([Operands()])
for pool in args:
result = set([MergeOperands(x, y) for (x,y) in
itertools.product(result, pool)])
return result
def GetRRInfoFromTrie(trie_state, bitness):
"""Convert rr info from trie to format suitable for Operands instance."""
input_rr = trie_state.input_rr
output_rr = trie_state.output_rr
if bitness == 32:
assert not input_rr, input_rr
assert not output_rr, output_rr
if input_rr == 'any_nonspecial' or not input_rr:
input_rr = None
if output_rr == 'None' or not output_rr:
output_rr = None
return input_rr, output_rr
def Disassemble((bitness, (byte_tuple, accept_info1, accept_info2))):
"""Disassembles byte sequence and returns it in old or new trie."""
global the_validator
old_trie_set = set()
new_trie_set = set()
disassembly = the_validator.DisassembleChunk(
''.join([chr(int(x)) for x in byte_tuple]),
bitness=bitness)
assert len(disassembly) == 1
prefixes, mnemonic, operands = (spec.ParseInstruction(disassembly[0]))
full_operands = tuple(prefixes + [mnemonic] + operands)
if accept_info1 is not None:
input_rr, output_rr = GetRRInfoFromTrie(accept_info1, bitness)
old_trie_set.add(Operands(disasms=full_operands,
input_rr=input_rr,
output_rr=output_rr))
if accept_info2 is not None:
input_rr, output_rr = GetRRInfoFromTrie(accept_info2, bitness)
new_trie_set.add(Operands(disasms=full_operands,
input_rr=input_rr,
output_rr=output_rr))
return old_trie_set, new_trie_set
def ParseStandardOpts():
"""Parses a standard set of options for validator proofs from command line."""
parser = optparse.OptionParser(
usage='%prog --bitness=[32,64] --old=path1 --new=path2')
parser.add_option('--old', help='Path of the old trie')
parser.add_option('--new', help='Path of the new trie')
parser.add_option('--bitness', choices=['32', '64'])
parser.add_option('--validator_dll', help='Path of the validator library')
parser.add_option('--decoder_dll', help='Path of the decoder library')
options, _ = parser.parse_args()
return options
def RunProof(standard_opts, proof_func):
"""Validates that trie diffs conform to to a proof.
Args:
standard_opts: command line options describing the two tries to be diffed,
arch type, etc. (as returned by ParseStandardOpts)
proof_func: Callback of (TrieDiffSet, bitness) to run to prove the diff.
Returns:
None
"""
# The validator itself must be passed to the other processes as a global
# as it is c object that must be passed via forking and not as an argument
# which means the validator must support being via pickled.
global the_validator
the_validator = validator.Validator(
validator_dll=standard_opts.validator_dll,
decoder_dll=standard_opts.decoder_dll)
bitness = int(standard_opts.bitness)
adds = set()
removes = set()
tasks = itertools.izip(itertools.repeat(bitness),
trie.DiffTrieFiles(standard_opts.new,
standard_opts.old))
pool = multiprocessing.Pool()
results = pool.imap_unordered(Disassemble, tasks, chunksize=10000)
for new, old in results:
adds |= new
removes |= old
proof_func((adds, removes), bitness)
def AssertDiffSetEquals((adds, removes),
expected_adds, expected_removes):
"""Assert that diffs is composed of expected_adds and expected_removes."""
if adds != expected_adds:
raise AssertionError('falsely added instructions: ',
adds - expected_adds,
'unadded instructions: ',
expected_adds - adds)
if removes != expected_removes:
raise AssertionError('falsely removed instructions: ',
removes - expected_removes,
'missing instructions: ',
expected_removes - removes)
| bsd-3-clause |
maestrano/odoo | addons/account_analytic_analysis/res_config.py | 426 | 1408 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class sale_configuration(osv.osv_memory):
_inherit = 'sale.config.settings'
_columns = {
'group_template_required': fields.boolean("Mandatory use of templates.",
implied_group='account_analytic_analysis.group_template_required',
help="Allows you to set the template field as required when creating an analytic account or a contract."),
}
| agpl-3.0 |
yurac/python-docx | docx/opc/pkgwriter.py | 18 | 4524 | # encoding: utf-8
"""
Provides a low-level, write-only API to a serialized Open Packaging
Convention (OPC) package, essentially an implementation of OpcPackage.save()
"""
from __future__ import absolute_import
from .constants import CONTENT_TYPE as CT
from .oxml import CT_Types, serialize_part_xml
from .packuri import CONTENT_TYPES_URI, PACKAGE_URI
from .phys_pkg import PhysPkgWriter
from .shared import CaseInsensitiveDict
from .spec import default_content_types
class PackageWriter(object):
"""
Writes a zip-format OPC package to *pkg_file*, where *pkg_file* can be
either a path to a zip file (a string) or a file-like object. Its single
API method, :meth:`write`, is static, so this class is not intended to
be instantiated.
"""
@staticmethod
def write(pkg_file, pkg_rels, parts):
"""
Write a physical package (.pptx file) to *pkg_file* containing
*pkg_rels* and *parts* and a content types stream based on the
content types of the parts.
"""
phys_writer = PhysPkgWriter(pkg_file)
PackageWriter._write_content_types_stream(phys_writer, parts)
PackageWriter._write_pkg_rels(phys_writer, pkg_rels)
PackageWriter._write_parts(phys_writer, parts)
phys_writer.close()
@staticmethod
def _write_content_types_stream(phys_writer, parts):
"""
Write ``[Content_Types].xml`` part to the physical package with an
appropriate content type lookup target for each part in *parts*.
"""
cti = _ContentTypesItem.from_parts(parts)
phys_writer.write(CONTENT_TYPES_URI, cti.blob)
@staticmethod
def _write_parts(phys_writer, parts):
"""
Write the blob of each part in *parts* to the package, along with a
rels item for its relationships if and only if it has any.
"""
for part in parts:
phys_writer.write(part.partname, part.blob)
if len(part._rels):
phys_writer.write(part.partname.rels_uri, part._rels.xml)
@staticmethod
def _write_pkg_rels(phys_writer, pkg_rels):
"""
Write the XML rels item for *pkg_rels* ('/_rels/.rels') to the
package.
"""
phys_writer.write(PACKAGE_URI.rels_uri, pkg_rels.xml)
class _ContentTypesItem(object):
"""
Service class that composes a content types item ([Content_Types].xml)
based on a list of parts. Not meant to be instantiated directly, its
single interface method is xml_for(), e.g.
``_ContentTypesItem.xml_for(parts)``.
"""
def __init__(self):
self._defaults = CaseInsensitiveDict()
self._overrides = dict()
@property
def blob(self):
"""
Return XML form of this content types item, suitable for storage as
``[Content_Types].xml`` in an OPC package.
"""
return serialize_part_xml(self._element)
@classmethod
def from_parts(cls, parts):
"""
Return content types XML mapping each part in *parts* to the
appropriate content type and suitable for storage as
``[Content_Types].xml`` in an OPC package.
"""
cti = cls()
cti._defaults['rels'] = CT.OPC_RELATIONSHIPS
cti._defaults['xml'] = CT.XML
for part in parts:
cti._add_content_type(part.partname, part.content_type)
return cti
def _add_content_type(self, partname, content_type):
"""
Add a content type for the part with *partname* and *content_type*,
using a default or override as appropriate.
"""
ext = partname.ext
if (ext.lower(), content_type) in default_content_types:
self._defaults[ext] = content_type
else:
self._overrides[partname] = content_type
@property
def _element(self):
"""
Return XML form of this content types item, suitable for storage as
``[Content_Types].xml`` in an OPC package. Although the sequence of
elements is not strictly significant, as an aid to testing and
readability Default elements are sorted by extension and Override
elements are sorted by partname.
"""
_types_elm = CT_Types.new()
for ext in sorted(self._defaults.keys()):
_types_elm.add_default(ext, self._defaults[ext])
for partname in sorted(self._overrides.keys()):
_types_elm.add_override(partname, self._overrides[partname])
return _types_elm
| mit |
ikool/metact06 | lib/werkzeug/utils.py | 145 | 22826 | # -*- coding: utf-8 -*-
"""
werkzeug.utils
~~~~~~~~~~~~~~
This module implements various utilities for WSGI applications. Most of
them are used by the request and response wrappers but especially for
middleware development it makes sense to use them without the wrappers.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
import os
import sys
import pkgutil
try:
from html.entities import name2codepoint
except ImportError:
from htmlentitydefs import name2codepoint
from werkzeug._compat import unichr, text_type, string_types, iteritems, \
reraise, PY2
from werkzeug._internal import _DictAccessorProperty, \
_parse_signature, _missing
_format_re = re.compile(r'\$(?:(%s)|\{(%s)\})' % (('[a-zA-Z_][a-zA-Z0-9_]*',) * 2))
_entity_re = re.compile(r'&([^;]+);')
_filename_ascii_strip_re = re.compile(r'[^A-Za-z0-9_.-]')
_windows_device_files = ('CON', 'AUX', 'COM1', 'COM2', 'COM3', 'COM4', 'LPT1',
'LPT2', 'LPT3', 'PRN', 'NUL')
class cached_property(object):
"""A decorator that converts a function into a lazy property. The
function wrapped is called the first time to retrieve the result
and then that calculated result is used the next time you access
the value::
class Foo(object):
@cached_property
def foo(self):
# calculate something important here
return 42
The class has to have a `__dict__` in order for this property to
work.
"""
# implementation detail: this property is implemented as non-data
# descriptor. non-data descriptors are only invoked if there is
# no entry with the same name in the instance's __dict__.
# this allows us to completely get rid of the access function call
# overhead. If one choses to invoke __get__ by hand the property
# will still work as expected because the lookup logic is replicated
# in __get__ for manual invocation.
def __init__(self, func, name=None, doc=None):
self.__name__ = name or func.__name__
self.__module__ = func.__module__
self.__doc__ = doc or func.__doc__
self.func = func
def __get__(self, obj, type=None):
if obj is None:
return self
value = obj.__dict__.get(self.__name__, _missing)
if value is _missing:
value = self.func(obj)
obj.__dict__[self.__name__] = value
return value
class environ_property(_DictAccessorProperty):
"""Maps request attributes to environment variables. This works not only
for the Werzeug request object, but also any other class with an
environ attribute:
>>> class Test(object):
... environ = {'key': 'value'}
... test = environ_property('key')
>>> var = Test()
>>> var.test
'value'
If you pass it a second value it's used as default if the key does not
exist, the third one can be a converter that takes a value and converts
it. If it raises :exc:`ValueError` or :exc:`TypeError` the default value
is used. If no default value is provided `None` is used.
Per default the property is read only. You have to explicitly enable it
by passing ``read_only=False`` to the constructor.
"""
read_only = True
def lookup(self, obj):
return obj.environ
class header_property(_DictAccessorProperty):
"""Like `environ_property` but for headers."""
def lookup(self, obj):
return obj.headers
class HTMLBuilder(object):
"""Helper object for HTML generation.
Per default there are two instances of that class. The `html` one, and
the `xhtml` one for those two dialects. The class uses keyword parameters
and positional parameters to generate small snippets of HTML.
Keyword parameters are converted to XML/SGML attributes, positional
arguments are used as children. Because Python accepts positional
arguments before keyword arguments it's a good idea to use a list with the
star-syntax for some children:
>>> html.p(class_='foo', *[html.a('foo', href='foo.html'), ' ',
... html.a('bar', href='bar.html')])
u'<p class="foo"><a href="foo.html">foo</a> <a href="bar.html">bar</a></p>'
This class works around some browser limitations and can not be used for
arbitrary SGML/XML generation. For that purpose lxml and similar
libraries exist.
Calling the builder escapes the string passed:
>>> html.p(html("<foo>"))
u'<p><foo></p>'
"""
_entity_re = re.compile(r'&([^;]+);')
_entities = name2codepoint.copy()
_entities['apos'] = 39
_empty_elements = set([
'area', 'base', 'basefont', 'br', 'col', 'command', 'embed', 'frame',
'hr', 'img', 'input', 'keygen', 'isindex', 'link', 'meta', 'param',
'source', 'wbr'
])
_boolean_attributes = set([
'selected', 'checked', 'compact', 'declare', 'defer', 'disabled',
'ismap', 'multiple', 'nohref', 'noresize', 'noshade', 'nowrap'
])
_plaintext_elements = set(['textarea'])
_c_like_cdata = set(['script', 'style'])
def __init__(self, dialect):
self._dialect = dialect
def __call__(self, s):
return escape(s)
def __getattr__(self, tag):
if tag[:2] == '__':
raise AttributeError(tag)
def proxy(*children, **arguments):
buffer = '<' + tag
for key, value in iteritems(arguments):
if value is None:
continue
if key[-1] == '_':
key = key[:-1]
if key in self._boolean_attributes:
if not value:
continue
if self._dialect == 'xhtml':
value = '="' + key + '"'
else:
value = ''
else:
value = '="' + escape(value) + '"'
buffer += ' ' + key + value
if not children and tag in self._empty_elements:
if self._dialect == 'xhtml':
buffer += ' />'
else:
buffer += '>'
return buffer
buffer += '>'
children_as_string = ''.join([text_type(x) for x in children
if x is not None])
if children_as_string:
if tag in self._plaintext_elements:
children_as_string = escape(children_as_string)
elif tag in self._c_like_cdata and self._dialect == 'xhtml':
children_as_string = '/*<![CDATA[*/' + \
children_as_string + '/*]]>*/'
buffer += children_as_string + '</' + tag + '>'
return buffer
return proxy
def __repr__(self):
return '<%s for %r>' % (
self.__class__.__name__,
self._dialect
)
html = HTMLBuilder('html')
xhtml = HTMLBuilder('xhtml')
def get_content_type(mimetype, charset):
"""Return the full content type string with charset for a mimetype.
If the mimetype represents text the charset will be appended as charset
parameter, otherwise the mimetype is returned unchanged.
:param mimetype: the mimetype to be used as content type.
:param charset: the charset to be appended in case it was a text mimetype.
:return: the content type.
"""
if mimetype.startswith('text/') or \
mimetype == 'application/xml' or \
(mimetype.startswith('application/') and
mimetype.endswith('+xml')):
mimetype += '; charset=' + charset
return mimetype
def format_string(string, context):
"""String-template format a string:
>>> format_string('$foo and ${foo}s', dict(foo=42))
'42 and 42s'
This does not do any attribute lookup etc. For more advanced string
formattings have a look at the `werkzeug.template` module.
:param string: the format string.
:param context: a dict with the variables to insert.
"""
def lookup_arg(match):
x = context[match.group(1) or match.group(2)]
if not isinstance(x, string_types):
x = type(string)(x)
return x
return _format_re.sub(lookup_arg, string)
def secure_filename(filename):
r"""Pass it a filename and it will return a secure version of it. This
filename can then safely be stored on a regular file system and passed
to :func:`os.path.join`. The filename returned is an ASCII only string
for maximum portability.
On windows system the function also makes sure that the file is not
named after one of the special device files.
>>> secure_filename("My cool movie.mov")
'My_cool_movie.mov'
>>> secure_filename("../../../etc/passwd")
'etc_passwd'
>>> secure_filename(u'i contain cool \xfcml\xe4uts.txt')
'i_contain_cool_umlauts.txt'
The function might return an empty filename. It's your responsibility
to ensure that the filename is unique and that you generate random
filename if the function returned an empty one.
.. versionadded:: 0.5
:param filename: the filename to secure
"""
if isinstance(filename, text_type):
from unicodedata import normalize
filename = normalize('NFKD', filename).encode('ascii', 'ignore')
if not PY2:
filename = filename.decode('ascii')
for sep in os.path.sep, os.path.altsep:
if sep:
filename = filename.replace(sep, ' ')
filename = str(_filename_ascii_strip_re.sub('', '_'.join(
filename.split()))).strip('._')
# on nt a couple of special files are present in each folder. We
# have to ensure that the target file is not such a filename. In
# this case we prepend an underline
if os.name == 'nt' and filename and \
filename.split('.')[0].upper() in _windows_device_files:
filename = '_' + filename
return filename
def escape(s, quote=None):
"""Replace special characters "&", "<", ">" and (") to HTML-safe sequences.
There is a special handling for `None` which escapes to an empty string.
.. versionchanged:: 0.9
`quote` is now implicitly on.
:param s: the string to escape.
:param quote: ignored.
"""
if s is None:
return ''
elif hasattr(s, '__html__'):
return text_type(s.__html__())
elif not isinstance(s, string_types):
s = text_type(s)
if quote is not None:
from warnings import warn
warn(DeprecationWarning('quote parameter is implicit now'), stacklevel=2)
s = s.replace('&', '&').replace('<', '<') \
.replace('>', '>').replace('"', """)
return s
def unescape(s):
"""The reverse function of `escape`. This unescapes all the HTML
entities, not only the XML entities inserted by `escape`.
:param s: the string to unescape.
"""
def handle_match(m):
name = m.group(1)
if name in HTMLBuilder._entities:
return unichr(HTMLBuilder._entities[name])
try:
if name[:2] in ('#x', '#X'):
return unichr(int(name[2:], 16))
elif name.startswith('#'):
return unichr(int(name[1:]))
except ValueError:
pass
return u''
return _entity_re.sub(handle_match, s)
def redirect(location, code=302):
"""Return a response object (a WSGI application) that, if called,
redirects the client to the target location. Supported codes are 301,
302, 303, 305, and 307. 300 is not supported because it's not a real
redirect and 304 because it's the answer for a request with a request
with defined If-Modified-Since headers.
.. versionadded:: 0.6
The location can now be a unicode string that is encoded using
the :func:`iri_to_uri` function.
:param location: the location the response should redirect to.
:param code: the redirect status code. defaults to 302.
"""
from werkzeug.wrappers import Response
display_location = escape(location)
if isinstance(location, text_type):
# Safe conversion is necessary here as we might redirect
# to a broken URI scheme (for instance itms-services).
from werkzeug.urls import iri_to_uri
location = iri_to_uri(location, safe_conversion=True)
response = Response(
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
'<title>Redirecting...</title>\n'
'<h1>Redirecting...</h1>\n'
'<p>You should be redirected automatically to target URL: '
'<a href="%s">%s</a>. If not click the link.' %
(escape(location), display_location), code, mimetype='text/html')
response.headers['Location'] = location
return response
def append_slash_redirect(environ, code=301):
"""Redirect to the same URL but with a slash appended. The behavior
of this function is undefined if the path ends with a slash already.
:param environ: the WSGI environment for the request that triggers
the redirect.
:param code: the status code for the redirect.
"""
new_path = environ['PATH_INFO'].strip('/') + '/'
query_string = environ.get('QUERY_STRING')
if query_string:
new_path += '?' + query_string
return redirect(new_path, code)
def import_string(import_name, silent=False):
"""Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
If `silent` is True the return value will be `None` if the import fails.
:param import_name: the dotted name for the object to import.
:param silent: if set to `True` import errors are ignored and
`None` is returned instead.
:return: imported object
"""
#XXX: py3 review needed
assert isinstance(import_name, string_types)
# force the import name to automatically convert to strings
import_name = str(import_name)
try:
if ':' in import_name:
module, obj = import_name.split(':', 1)
elif '.' in import_name:
module, obj = import_name.rsplit('.', 1)
else:
return __import__(import_name)
# __import__ is not able to handle unicode strings in the fromlist
# if the module is a package
if PY2 and isinstance(obj, unicode):
obj = obj.encode('utf-8')
try:
return getattr(__import__(module, None, None, [obj]), obj)
except (ImportError, AttributeError):
# support importing modules not yet set up by the parent module
# (or package for that matter)
modname = module + '.' + obj
__import__(modname)
return sys.modules[modname]
except ImportError as e:
if not silent:
reraise(
ImportStringError,
ImportStringError(import_name, e),
sys.exc_info()[2])
def find_modules(import_path, include_packages=False, recursive=False):
"""Find all the modules below a package. This can be useful to
automatically import all views / controllers so that their metaclasses /
function decorators have a chance to register themselves on the
application.
Packages are not returned unless `include_packages` is `True`. This can
also recursively list modules but in that case it will import all the
packages to get the correct load path of that module.
:param import_name: the dotted name for the package to find child modules.
:param include_packages: set to `True` if packages should be returned, too.
:param recursive: set to `True` if recursion should happen.
:return: generator
"""
module = import_string(import_path)
path = getattr(module, '__path__', None)
if path is None:
raise ValueError('%r is not a package' % import_path)
basename = module.__name__ + '.'
for importer, modname, ispkg in pkgutil.iter_modules(path):
modname = basename + modname
if ispkg:
if include_packages:
yield modname
if recursive:
for item in find_modules(modname, include_packages, True):
yield item
else:
yield modname
def validate_arguments(func, args, kwargs, drop_extra=True):
"""Check if the function accepts the arguments and keyword arguments.
Returns a new ``(args, kwargs)`` tuple that can safely be passed to
the function without causing a `TypeError` because the function signature
is incompatible. If `drop_extra` is set to `True` (which is the default)
any extra positional or keyword arguments are dropped automatically.
The exception raised provides three attributes:
`missing`
A set of argument names that the function expected but where
missing.
`extra`
A dict of keyword arguments that the function can not handle but
where provided.
`extra_positional`
A list of values that where given by positional argument but the
function cannot accept.
This can be useful for decorators that forward user submitted data to
a view function::
from werkzeug.utils import ArgumentValidationError, validate_arguments
def sanitize(f):
def proxy(request):
data = request.values.to_dict()
try:
args, kwargs = validate_arguments(f, (request,), data)
except ArgumentValidationError:
raise BadRequest('The browser failed to transmit all '
'the data expected.')
return f(*args, **kwargs)
return proxy
:param func: the function the validation is performed against.
:param args: a tuple of positional arguments.
:param kwargs: a dict of keyword arguments.
:param drop_extra: set to `False` if you don't want extra arguments
to be silently dropped.
:return: tuple in the form ``(args, kwargs)``.
"""
parser = _parse_signature(func)
args, kwargs, missing, extra, extra_positional = parser(args, kwargs)[:5]
if missing:
raise ArgumentValidationError(tuple(missing))
elif (extra or extra_positional) and not drop_extra:
raise ArgumentValidationError(None, extra, extra_positional)
return tuple(args), kwargs
def bind_arguments(func, args, kwargs):
"""Bind the arguments provided into a dict. When passed a function,
a tuple of arguments and a dict of keyword arguments `bind_arguments`
returns a dict of names as the function would see it. This can be useful
to implement a cache decorator that uses the function arguments to build
the cache key based on the values of the arguments.
:param func: the function the arguments should be bound for.
:param args: tuple of positional arguments.
:param kwargs: a dict of keyword arguments.
:return: a :class:`dict` of bound keyword arguments.
"""
args, kwargs, missing, extra, extra_positional, \
arg_spec, vararg_var, kwarg_var = _parse_signature(func)(args, kwargs)
values = {}
for (name, has_default, default), value in zip(arg_spec, args):
values[name] = value
if vararg_var is not None:
values[vararg_var] = tuple(extra_positional)
elif extra_positional:
raise TypeError('too many positional arguments')
if kwarg_var is not None:
multikw = set(extra) & set([x[0] for x in arg_spec])
if multikw:
raise TypeError('got multiple values for keyword argument ' +
repr(next(iter(multikw))))
values[kwarg_var] = extra
elif extra:
raise TypeError('got unexpected keyword argument ' +
repr(next(iter(extra))))
return values
class ArgumentValidationError(ValueError):
"""Raised if :func:`validate_arguments` fails to validate"""
def __init__(self, missing=None, extra=None, extra_positional=None):
self.missing = set(missing or ())
self.extra = extra or {}
self.extra_positional = extra_positional or []
ValueError.__init__(self, 'function arguments invalid. ('
'%d missing, %d additional)' % (
len(self.missing),
len(self.extra) + len(self.extra_positional)
))
class ImportStringError(ImportError):
"""Provides information about a failed :func:`import_string` attempt."""
#: String in dotted notation that failed to be imported.
import_name = None
#: Wrapped exception.
exception = None
def __init__(self, import_name, exception):
self.import_name = import_name
self.exception = exception
msg = (
'import_string() failed for %r. Possible reasons are:\n\n'
'- missing __init__.py in a package;\n'
'- package or module path not included in sys.path;\n'
'- duplicated package or module name taking precedence in '
'sys.path;\n'
'- missing module, class, function or variable;\n\n'
'Debugged import:\n\n%s\n\n'
'Original exception:\n\n%s: %s')
name = ''
tracked = []
for part in import_name.replace(':', '.').split('.'):
name += (name and '.') + part
imported = import_string(name, silent=True)
if imported:
tracked.append((name, getattr(imported, '__file__', None)))
else:
track = ['- %r found in %r.' % (n, i) for n, i in tracked]
track.append('- %r not found.' % name)
msg = msg % (import_name, '\n'.join(track),
exception.__class__.__name__, str(exception))
break
ImportError.__init__(self, msg)
def __repr__(self):
return '<%s(%r, %r)>' % (self.__class__.__name__, self.import_name,
self.exception)
# circular dependencies
from werkzeug.http import quote_header_value, unquote_header_value, \
cookie_date
# DEPRECATED
# these objects were previously in this module as well. we import
# them here for backwards compatibility with old pickles.
from werkzeug.datastructures import MultiDict, CombinedMultiDict, \
Headers, EnvironHeaders
from werkzeug.http import parse_cookie, dump_cookie
| apache-2.0 |
NicoSantangelo/sublime-gulp | status_bar.py | 1 | 1362 | import sublime
is_sublime_text_3 = int(sublime.version()) >= 3000
if is_sublime_text_3:
from .settings import Settings
from .caches import ProcessCache
from .timeout import defer_sync
else:
from settings import Settings
from caches import ProcessCache
from timeout import defer_sync
class StatusBar():
def __init__(self, window):
self.window = window
self.settings = Settings()
def update(self):
if ProcessCache.empty():
return self.erase()
status_bar_tasks = self.settings.get('status_bar_tasks', False)
if status_bar_tasks:
task_names = set([process.get_task_name() for process in ProcessCache.get()])
if status_bar_tasks != True:
if not isinstance(status_bar_tasks, list):
status_bar_tasks = [status_bar_tasks]
task_names = task_names.intersection(set(status_bar_tasks))
if task_names:
defer_sync(lambda: self.set(', '.join(task_names)))
def set(self, text):
text_format = self.settings.get('status_bar_format', '{task_name}')
status = text_format.format(task_name=text)
self.window.active_view().set_status(Settings.PACKAGE_NAME, status)
def erase(self):
self.window.active_view().erase_status(Settings.PACKAGE_NAME)
| mit |
wfxiang08/changes | changes/api/jobstep_details.py | 2 | 6870 | from __future__ import absolute_import
from datetime import datetime
from flask import current_app
from flask_restful.reqparse import RequestParser
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.validators.datetime import ISODatetime
from changes.config import db
from changes.constants import Result, Status
from changes.db.utils import get_or_create
from changes.jobs.sync_job import sync_job
from changes.models import (
Command, FailureReason, JobPhase, JobPlan, JobStep, Node, Plan, Snapshot, SnapshotImage,
)
RESULT_CHOICES = ('failed', 'passed', 'aborted', 'skipped', 'infra_failed')
STATUS_CHOICES = ('queued', 'in_progress', 'finished')
# Choices should map to Result/Status names. We don't just use
# the Enum names directly to make it harder to unintentionally
# broaden the public API and because not all options necessarily
# make sense for this interface.
assert set(RESULT_CHOICES) <= set(Result.__members__.keys())
assert set(STATUS_CHOICES) <= set(Status.__members__.keys())
class JobStepDetailsAPIView(APIView):
post_parser = RequestParser()
post_parser.add_argument('date', type=ISODatetime())
post_parser.add_argument('status', choices=STATUS_CHOICES)
post_parser.add_argument('result', choices=RESULT_CHOICES)
post_parser.add_argument('node')
post_parser.add_argument('heartbeat', type=bool)
@classmethod
def get_snapshot_image(self, current_snapshot_id, plan_id):
"""
Get the snapshot image that should be used for a given plan.
If the plan uses a dependent snapshot (that is, it uses a snapshot
created by a different plan for its own build) then it returns
the snapshot image associated with the plan it depends on, else
it returns the snapshot image associated with the plan itself.
"""
snapshot_plan_id = Plan.query.get(plan_id).snapshot_plan_id
if snapshot_plan_id is None:
snapshot_plan_id = plan_id
return SnapshotImage.query.filter(
SnapshotImage.snapshot_id == current_snapshot_id,
SnapshotImage.plan_id == snapshot_plan_id,
).scalar()
def _is_final_jobphase(self, jobphase):
return not db.session.query(
JobPhase.query.filter(
JobPhase.date_created > jobphase.date_created,
).exists(),
).scalar()
def get(self, step_id):
jobstep = JobStep.query.options(
joinedload('project', innerjoin=True),
).get(step_id)
if jobstep is None:
return '', 404
jobplan = JobPlan.query.filter(
JobPlan.job_id == jobstep.job_id,
).first()
# determine if there's an expected snapshot outcome
expected_image = SnapshotImage.query.filter(
SnapshotImage.job_id == jobstep.job_id,
).first()
current_image = None
# we only send a current snapshot if we're not expecting to build
# a new image
if not expected_image:
current_snapshot = Snapshot.get_current(jobstep.project_id)
if current_snapshot and jobplan:
current_image = self.get_snapshot_image(current_snapshot.id, jobplan.plan_id)
elif current_app.config['DEFAULT_SNAPSHOT']:
current_image = {
'id': current_app.config['DEFAULT_SNAPSHOT'],
}
context = self.serialize(jobstep)
context['commands'] = self.serialize(list(jobstep.commands))
context['snapshot'] = self.serialize(current_image)
context['expectedSnapshot'] = self.serialize(expected_image)
context['project'] = self.serialize(jobstep.project)
return self.respond(context, serialize=False)
def post(self, step_id):
jobstep = JobStep.query.options(
joinedload('project', innerjoin=True),
).get(step_id)
if jobstep is None:
return '', 404
args = self.post_parser.parse_args()
current_datetime = args.date or datetime.utcnow()
if args.result:
jobstep.result = Result[args.result]
if args.status:
jobstep.status = Status[args.status]
# if we've finished this job, lets ensure we have set date_finished
if jobstep.status == Status.finished and jobstep.date_finished is None:
jobstep.date_finished = current_datetime
elif jobstep.status != Status.finished and jobstep.date_finished:
jobstep.date_finished = None
if jobstep.status != Status.queued and jobstep.date_started is None:
jobstep.date_started = current_datetime
elif jobstep.status == Status.queued and jobstep.date_started:
jobstep.date_started = None
if args.node:
node, _ = get_or_create(Node, where={
'label': args.node,
})
jobstep.node_id = node.id
# we want to guarantee that even if the jobstep seems to succeed, that
# we accurately reflect what we internally would consider a success state
if jobstep.result == Result.passed and jobstep.status == Status.finished:
last_command = Command.query.filter(
Command.jobstep_id == jobstep.id,
).order_by(Command.order.desc()).first()
if not last_command:
pass
elif last_command.status != Status.finished:
jobstep.result = Result.failed
elif last_command.return_code != 0:
jobstep.result = Result.failed
# are we missing an expansion step? it must happen before reporting
# the result, and would falsely give us a success metric
elif last_command.type.is_collector() and self._is_final_jobphase(jobstep.phase):
jobstep.result = Result.failed
job = jobstep.job
# TODO(dcramer): we should add a better failure reason
db.session.add(FailureReason(
step_id=jobstep.id,
job_id=job.id,
build_id=job.build_id,
project_id=job.project_id,
reason='missing_artifact',
))
db.session.add(jobstep)
if db.session.is_modified(jobstep):
db.session.commit()
# TODO(dcramer): this is a little bit hacky, but until we can entirely
# move to push APIs we need a good way to handle the existing sync
job = jobstep.job
sync_job.delay_if_needed(
task_id=job.id.hex,
parent_task_id=job.id.hex,
job_id=job.build_id.hex,
)
return self.respond(jobstep)
| apache-2.0 |
alikins/subscription-manager | src/subscription_manager/installedproductslib.py | 3 | 1634 | #
# Copyright (c) 2011 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
from subscription_manager import injection as inj
from subscription_manager import certlib
class InstalledProductsActionInvoker(certlib.BaseActionInvoker):
"""Used by rhsmcertd to update the installed
products on this system periodically.
"""
def _do_update(self):
action = InstalledProductsActionCommand()
return action.perform()
class InstalledProductsActionCommand(object):
"""Update the consumers installed product list to RHSM API.
Returns a InstalledProductsActionReport.
"""
def __init__(self):
self.report = InstalledProductsActionReport()
self.cp_provider = inj.require(inj.CP_PROVIDER)
self.uep = self.cp_provider.get_consumer_auth_cp()
def perform(self):
mgr = inj.require(inj.INSTALLED_PRODUCTS_MANAGER)
consumer_identity = inj.require(inj.IDENTITY)
ret = mgr.update_check(self.uep, consumer_identity.uuid)
self.report._status = ret
return self.report
class InstalledProductsActionReport(certlib.ActionReport):
name = "Installed Products"
| gpl-2.0 |
mrtequino/JSW | nodejs/asincronia1/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py | 60 | 65994 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
import copy
import gyp.common
import os
import os.path
import re
import shlex
import subprocess
import sys
import tempfile
from gyp.common import GypError
# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
# "xcodebuild" is called too quickly (it has been found to return incorrect
# version number).
XCODE_VERSION_CACHE = None
# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
# corresponding to the installed version of Xcode.
XCODE_ARCHS_DEFAULT_CACHE = None
def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
"""Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
mapping = {'$(ARCHS_STANDARD)': archs}
if archs_including_64_bit:
mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit
return mapping
class XcodeArchsDefault(object):
"""A class to resolve ARCHS variable from xcode_settings, resolving Xcode
macros and implementing filtering by VALID_ARCHS. The expansion of macros
depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
on the version of Xcode.
"""
# Match variable like $(ARCHS_STANDARD).
variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$')
def __init__(self, default, mac, iphonesimulator, iphoneos):
self._default = (default,)
self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator}
def _VariableMapping(self, sdkroot):
"""Returns the dictionary of variable mapping depending on the SDKROOT."""
sdkroot = sdkroot.lower()
if 'iphoneos' in sdkroot:
return self._archs['ios']
elif 'iphonesimulator' in sdkroot:
return self._archs['iossim']
else:
return self._archs['mac']
def _ExpandArchs(self, archs, sdkroot):
"""Expands variables references in ARCHS, and remove duplicates."""
variable_mapping = self._VariableMapping(sdkroot)
expanded_archs = []
for arch in archs:
if self.variable_pattern.match(arch):
variable = arch
try:
variable_expansion = variable_mapping[variable]
for arch in variable_expansion:
if arch not in expanded_archs:
expanded_archs.append(arch)
except KeyError as e:
print 'Warning: Ignoring unsupported variable "%s".' % variable
elif arch not in expanded_archs:
expanded_archs.append(arch)
return expanded_archs
def ActiveArchs(self, archs, valid_archs, sdkroot):
"""Expands variables references in ARCHS, and filter by VALID_ARCHS if it
is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
values present in VALID_ARCHS are kept)."""
expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '')
if valid_archs:
filtered_archs = []
for arch in expanded_archs:
if arch in valid_archs:
filtered_archs.append(arch)
expanded_archs = filtered_archs
return expanded_archs
def GetXcodeArchsDefault():
"""Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
installed version of Xcode. The default values used by Xcode for ARCHS
and the expansion of the variables depends on the version of Xcode used.
For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
$(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
and deprecated with Xcode 5.1.
For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
architecture as part of $(ARCHS_STANDARD) and default to only building it.
For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
are also part of $(ARCHS_STANDARD).
All thoses rules are coded in the construction of the |XcodeArchsDefault|
object to use depending on the version of Xcode detected. The object is
for performance reason."""
global XCODE_ARCHS_DEFAULT_CACHE
if XCODE_ARCHS_DEFAULT_CACHE:
return XCODE_ARCHS_DEFAULT_CACHE
xcode_version, _ = XcodeVersion()
if xcode_version < '0500':
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
'$(ARCHS_STANDARD)',
XcodeArchsVariableMapping(['i386']),
XcodeArchsVariableMapping(['i386']),
XcodeArchsVariableMapping(['armv7']))
elif xcode_version < '0510':
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
'$(ARCHS_STANDARD_INCLUDING_64_BIT)',
XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']),
XcodeArchsVariableMapping(
['armv7', 'armv7s'],
['armv7', 'armv7s', 'arm64']))
else:
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
'$(ARCHS_STANDARD)',
XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']),
XcodeArchsVariableMapping(
['armv7', 'armv7s', 'arm64'],
['armv7', 'armv7s', 'arm64']))
return XCODE_ARCHS_DEFAULT_CACHE
class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object."""
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
_sdk_root_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_plist_cache = {}
# Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_codesigning_key_cache = {}
def __init__(self, spec):
self.spec = spec
self.isIOS = False
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
# for that config -- the per-target settings as well. Settings that are
# the same for all configs are implicitly per-target settings.
self.xcode_settings = {}
configs = spec['configurations']
for configname, config in configs.iteritems():
self.xcode_settings[configname] = config.get('xcode_settings', {})
self._ConvertConditionalKeys(configname)
if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
None):
self.isIOS = True
# This is only non-None temporarily during the execution of some methods.
self.configname = None
# Used by _AdjustLibrary to match .a and .dylib entries in libraries.
self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
def _ConvertConditionalKeys(self, configname):
"""Converts or warns on conditional keys. Xcode supports conditional keys,
such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
with some keys converted while the rest force a warning."""
settings = self.xcode_settings[configname]
conditional_keys = [key for key in settings if key.endswith(']')]
for key in conditional_keys:
# If you need more, speak up at http://crbug.com/122592
if key.endswith("[sdk=iphoneos*]"):
if configname.endswith("iphoneos"):
new_key = key.split("[")[0]
settings[new_key] = settings[key]
else:
print 'Warning: Conditional keys not implemented, ignoring:', \
' '.join(conditional_keys)
del settings[key]
def _Settings(self):
assert self.configname
return self.xcode_settings[self.configname]
def _Test(self, test_key, cond_key, default):
return self._Settings().get(test_key, default) == cond_key
def _Appendf(self, lst, test_key, format_str, default=None):
if test_key in self._Settings():
lst.append(format_str % str(self._Settings()[test_key]))
elif default:
lst.append(format_str % str(default))
def _WarnUnimplemented(self, test_key):
if test_key in self._Settings():
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
def IsBinaryOutputFormat(self, configname):
default = "binary" if self.isIOS else "xml"
format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT',
default)
return format == "binary"
def _IsBundle(self):
return int(self.spec.get('mac_bundle', 0)) != 0
def _IsIosAppExtension(self):
return int(self.spec.get('ios_app_extension', 0)) != 0
def _IsIosWatchKitExtension(self):
return int(self.spec.get('ios_watchkit_extension', 0)) != 0
def _IsIosWatchApp(self):
return int(self.spec.get('ios_watch_app', 0)) != 0
def _IsXCTest(self):
return int(self.spec.get('mac_xctest_bundle', 0)) != 0
def GetFrameworkVersion(self):
"""Returns the framework version of the current target. Only valid for
bundles."""
assert self._IsBundle()
return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('loadable_module', 'shared_library'):
default_wrapper_extension = {
'loadable_module': 'bundle',
'shared_library': 'framework',
}[self.spec['type']]
wrapper_extension = self.GetPerTargetSetting(
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
return '.' + self.spec.get('product_extension', 'appex')
else:
return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
def GetProductName(self):
"""Returns PRODUCT_NAME."""
return self.spec.get('product_name', self.spec['target_name'])
def GetFullProductName(self):
"""Returns FULL_PRODUCT_NAME."""
if self._IsBundle():
return self.GetWrapperName()
else:
return self._GetStandaloneBinaryPath()
def GetWrapperName(self):
"""Returns the directory name of the bundle represented by this target.
Only valid for bundles."""
assert self._IsBundle()
return self.GetProductName() + self.GetWrapperExtension()
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
if self.isIOS:
return self.GetWrapperName()
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
else:
# loadable_modules have a 'Contents' folder like executables.
return os.path.join(self.GetWrapperName(), 'Contents')
def GetBundleResourceFolder(self):
"""Returns the qualified path to the bundle's resource folder. E.g.
Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
if self.isIOS:
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('executable', 'loadable_module'):
return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
else:
return os.path.join(self.GetBundleContentsFolderPath(),
'Resources', 'Info.plist')
def GetProductType(self):
"""Returns the PRODUCT_TYPE of this target."""
if self._IsIosAppExtension():
assert self._IsBundle(), ('ios_app_extension flag requires mac_bundle '
'(target %s)' % self.spec['target_name'])
return 'com.apple.product-type.app-extension'
if self._IsIosWatchKitExtension():
assert self._IsBundle(), ('ios_watchkit_extension flag requires '
'mac_bundle (target %s)' % self.spec['target_name'])
return 'com.apple.product-type.watchkit-extension'
if self._IsIosWatchApp():
assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
'(target %s)' % self.spec['target_name'])
return 'com.apple.product-type.application.watchapp'
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
'loadable_module': 'com.apple.product-type.bundle',
'shared_library': 'com.apple.product-type.framework',
}[self.spec['type']]
else:
return {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.apple.product-type.library.dynamic',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
}[self.spec['type']]
def GetMachOType(self):
"""Returns the MACH_O_TYPE of this target."""
# Weird, but matches Xcode.
if not self._IsBundle() and self.spec['type'] == 'executable':
return ''
return {
'executable': 'mh_execute',
'static_library': 'staticlib',
'shared_library': 'mh_dylib',
'loadable_module': 'mh_bundle',
}[self.spec['type']]
def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('shared_library') or self.isIOS:
path = self.GetBundleContentsFolderPath()
elif self.spec['type'] in ('executable', 'loadable_module'):
path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
return os.path.join(path, self.GetExecutableName())
def _GetStandaloneExecutableSuffix(self):
if 'product_extension' in self.spec:
return '.' + self.spec['product_extension']
return {
'executable': '',
'static_library': '.a',
'shared_library': '.dylib',
'loadable_module': '.so',
}[self.spec['type']]
def _GetStandaloneExecutablePrefix(self):
return self.spec.get('product_prefix', {
'executable': '',
'static_library': 'lib',
'shared_library': 'lib',
# Non-bundled loadable_modules are called foo.so for some reason
# (that is, .so and no prefix) with the xcode build -- match that.
'loadable_module': '',
}[self.spec['type']])
def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
assert self.spec['type'] in (
'executable', 'shared_library', 'static_library', 'loadable_module'), (
'Unexpected type %s' % self.spec['type'])
target = self.spec['target_name']
if self.spec['type'] == 'static_library':
if target[:3] == 'lib':
target = target[3:]
elif self.spec['type'] in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = self._GetStandaloneExecutablePrefix()
target = self.spec.get('product_name', target)
target_ext = self._GetStandaloneExecutableSuffix()
return target_prefix + target + target_ext
def GetExecutableName(self):
"""Returns the executable name of the bundle represented by this target.
E.g. Chromium."""
if self._IsBundle():
return self.spec.get('product_name', self.spec['target_name'])
else:
return self._GetStandaloneBinaryPath()
def GetExecutablePath(self):
"""Returns the directory name of the bundle represented by this target. E.g.
Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
return self._GetStandaloneBinaryPath()
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
config_settings = self.xcode_settings[configname]
xcode_archs_default = GetXcodeArchsDefault()
return xcode_archs_default.ActiveArchs(
config_settings.get('ARCHS'),
config_settings.get('VALID_ARCHS'),
config_settings.get('SDKROOT'))
def _GetSdkVersionInfoItem(self, sdk, infoitem):
# xcodebuild requires Xcode and can't run on Command Line Tools-only
# systems from 10.7 onward.
# Since the CLT has no SDK paths anyway, returning None is the
# most sensible route and should still do the right thing.
try:
return GetStdoutQuiet(['xcodebuild', '-version', '-sdk', sdk, infoitem])
except:
pass
def _SdkRoot(self, configname):
if configname is None:
configname = self.configname
return self.GetPerConfigSetting('SDKROOT', configname, default='')
def _SdkPath(self, configname=None):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
return sdk_root
return self._XcodeSdkPath(sdk_root)
def _XcodeSdkPath(self, sdk_root):
if sdk_root not in XcodeSettings._sdk_path_cache:
sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
if sdk_root:
XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
return XcodeSettings._sdk_path_cache[sdk_root]
def _AppendPlatformVersionMinFlags(self, lst):
self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
# TODO: Implement this better?
sdk_path_basename = os.path.basename(self._SdkPath())
if sdk_path_basename.lower().startswith('iphonesimulator'):
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-mios-simulator-version-min=%s')
else:
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-miphoneos-version-min=%s')
def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
# This functions (and the similar ones below) do not offer complete
# emulation of all xcode_settings keys. They're implemented on demand.
self.configname = configname
cflags = []
sdk_root = self._SdkPath()
if 'SDKROOT' in self._Settings() and sdk_root:
cflags.append('-isysroot %s' % sdk_root)
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
cflags.append('-funsigned-char')
if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
cflags.append('-fasm-blocks')
if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
cflags.append('-mdynamic-no-pic')
else:
pass
# TODO: In this case, it depends on the target. xcode passes
# mdynamic-no-pic by default for executable and possibly static lib
# according to mento
if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
cflags.append('-mpascal-strings')
self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
if dbg_format == 'dwarf':
cflags.append('-gdwarf-2')
elif dbg_format == 'stabs':
raise NotImplementedError('stabs debug format is not supported yet.')
elif dbg_format == 'dwarf-with-dsym':
cflags.append('-gdwarf-2')
else:
raise NotImplementedError('Unknown debug format %s' % dbg_format)
if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
cflags.append('-fstrict-aliasing')
elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
cflags.append('-fno-strict-aliasing')
if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
cflags.append('-fvisibility=hidden')
if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
cflags.append('-Werror')
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
cflags.append('-Wnewline-eof')
# In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or
# llvm-gcc. It also requires a fairly recent libtool, and
# if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the
# path to the libLTO.dylib that matches the used clang.
if self._Test('LLVM_LTO', 'YES', default='NO'):
cflags.append('-flto')
self._AppendPlatformVersionMinFlags(cflags)
# TODO:
if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
self._WarnUnimplemented('COPY_PHASE_STRIP')
self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
# TODO: This is exported correctly, but assigning to it is not supported.
self._WarnUnimplemented('MACH_O_TYPE')
self._WarnUnimplemented('PRODUCT_TYPE')
if arch is not None:
archs = [arch]
else:
assert self.configname
archs = self.GetActiveArchs(self.configname)
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
cflags.append('-arch ' + archs[0])
if archs[0] in ('i386', 'x86_64'):
if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse3')
if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
default='NO'):
cflags.append('-mssse3') # Note 3rd 's'.
if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.1')
if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.2')
cflags += self._Settings().get('WARNING_CFLAGS', [])
if self._IsXCTest():
platform_root = self._XcodePlatformPath(configname)
if platform_root:
cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
if sdk_root:
framework_root = sdk_root
else:
framework_root = ''
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root))
self.configname = None
return cflags
def GetCflagsC(self, configname):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
cflags_c.append('-ansi')
else:
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
return cflags_c
def GetCflagsCC(self, configname):
"""Returns flags that need to be added to .cc, and .mm compilations."""
self.configname = configname
cflags_cc = []
clang_cxx_language_standard = self._Settings().get(
'CLANG_CXX_LANGUAGE_STANDARD')
# Note: Don't make c++0x to c++11 so that c++0x can be used with older
# clangs that don't understand c++11 yet (like Xcode 4.2's).
if clang_cxx_language_standard:
cflags_cc.append('-std=%s' % clang_cxx_language_standard)
self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
cflags_cc.append('-fno-rtti')
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
cflags_cc.append('-fno-exceptions')
if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
cflags_cc.append('-fvisibility-inlines-hidden')
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
cflags_cc.append('-fno-threadsafe-statics')
# Note: This flag is a no-op for clang, it only has an effect for gcc.
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
cflags_cc.append('-Wno-invalid-offsetof')
other_ccflags = []
for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
# TODO: More general variable expansion. Missing in many other places too.
if flag in ('$inherited', '$(inherited)', '${inherited}'):
flag = '$OTHER_CFLAGS'
if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
else:
other_ccflags.append(flag)
cflags_cc += other_ccflags
self.configname = None
return cflags_cc
def _AddObjectiveCGarbageCollectionFlags(self, flags):
gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
if gc_policy == 'supported':
flags.append('-fobjc-gc')
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
def _AddObjectiveCARCFlags(self, flags):
if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
flags.append('-fobjc-arc')
def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
'YES', default='NO'):
flags.append('-Wobjc-missing-property-synthesis')
def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations."""
self.configname = configname
cflags_objc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
self._AddObjectiveCARCFlags(cflags_objc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
self.configname = None
return cflags_objc
def GetCflagsObjCC(self, configname):
"""Returns flags that need to be added to .mm compilations."""
self.configname = configname
cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
self._AddObjectiveCARCFlags(cflags_objcc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None
return cflags_objcc
def GetInstallNameBase(self):
"""Return DYLIB_INSTALL_NAME_BASE for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
install_base = self.GetPerTargetSetting(
'DYLIB_INSTALL_NAME_BASE',
default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
return install_base
def _StandardizePath(self, path):
"""Do :standardizepath processing for path."""
# I'm not quite sure what :standardizepath does. Just call normpath(),
# but don't let @executable_path/../foo collapse to foo.
if '/' in path:
prefix, rest = '', path
if path.startswith('@'):
prefix, rest = path.split('/', 1)
rest = os.path.normpath(rest) # :standardizepath
path = os.path.join(prefix, rest)
return path
def GetInstallName(self):
"""Return LD_DYLIB_INSTALL_NAME for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
default_install_name = \
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
install_name = self.GetPerTargetSetting(
'LD_DYLIB_INSTALL_NAME', default=default_install_name)
# Hardcode support for the variables used in chromium for now, to
# unblock people using the make build.
if '$' in install_name:
assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
'$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
'yet in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
self._StandardizePath(self.GetInstallNameBase()))
if self._IsBundle():
# These are only valid for bundles, hence the |if|.
install_name = install_name.replace(
'$(WRAPPER_NAME)', self.GetWrapperName())
install_name = install_name.replace(
'$(PRODUCT_NAME)', self.GetProductName())
else:
assert '$(WRAPPER_NAME)' not in install_name
assert '$(PRODUCT_NAME)' not in install_name
install_name = install_name.replace(
'$(EXECUTABLE_PATH)', self.GetExecutablePath())
return install_name
def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
"""Checks if ldflag contains a filename and if so remaps it from
gyp-directory-relative to build-directory-relative."""
# This list is expanded on demand.
# They get matched as:
# -exported_symbols_list file
# -Wl,exported_symbols_list file
# -Wl,exported_symbols_list,file
LINKER_FILE = r'(\S+)'
WORD = r'\S+'
linker_flags = [
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
['-unexported_symbols_list', LINKER_FILE],
['-reexported_symbols_list', LINKER_FILE],
['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
]
for flag_pattern in linker_flags:
regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
m = regex.match(ldflag)
if m:
ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
ldflag[m.end(1):]
# Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
# TODO(thakis): Update ffmpeg.gyp):
if ldflag.startswith('-L'):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
Args:
configname: The name of the configuration to get ld flags for.
product_dir: The directory where products such static and dynamic
libraries are placed. This is added to the library search path.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
self.configname = configname
ldflags = []
# The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
# can contain entries that depend on this. Explicitly absolutify these.
for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
ldflags.append('-Wl,-dead_strip')
if self._Test('PREBINDING', 'YES', default='NO'):
ldflags.append('-Wl,-prebind')
self._Appendf(
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
self._Appendf(
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
self._AppendPlatformVersionMinFlags(ldflags)
if 'SDKROOT' in self._Settings() and self._SdkPath():
ldflags.append('-isysroot ' + self._SdkPath())
for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
ldflags.append('-L' + gyp_to_build_path(library_path))
if 'ORDER_FILE' in self._Settings():
ldflags.append('-Wl,-order_file ' +
'-Wl,' + gyp_to_build_path(
self._Settings()['ORDER_FILE']))
if arch is not None:
archs = [arch]
else:
assert self.configname
archs = self.GetActiveArchs(self.configname)
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
ldflags.append('-arch ' + archs[0])
# Xcode adds the product directory by default.
ldflags.append('-L' + product_dir)
install_name = self.GetInstallName()
if install_name and self.spec['type'] != 'loadable_module':
ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
ldflags.append('-Wl,-rpath,' + rpath)
sdk_root = self._SdkPath()
if not sdk_root:
sdk_root = ''
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
if self._IsXCTest():
platform_root = self._XcodePlatformPath(configname)
if platform_root:
cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
if sdk_root and is_extension:
# Adds the link flags for extensions. These flags are common for all
# extensions and provide loader and main function.
# These flags reflect the compilation options used by xcode to compile
# extensions.
ldflags.append('-lpkstart')
if XcodeVersion() < '0900':
ldflags.append(sdk_root +
'/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
ldflags.append('-fapplication-extension')
ldflags.append('-Xlinker -rpath '
'-Xlinker @executable_path/../../Frameworks')
self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
self.configname = None
return ldflags
def GetLibtoolflags(self, configname):
"""Returns flags that need to be passed to the static linker.
Args:
configname: The name of the configuration to get ld flags for.
"""
self.configname = configname
libtoolflags = []
for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
libtoolflags.append(libtoolflag)
# TODO(thakis): ARCHS?
self.configname = None
return libtoolflags
def GetPerTargetSettings(self):
"""Gets a list of all the per-target settings. This will only fetch keys
whose values are the same across all configurations."""
first_pass = True
result = {}
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
result = dict(self.xcode_settings[configname])
first_pass = False
else:
for key, value in self.xcode_settings[configname].iteritems():
if key not in result:
continue
elif result[key] != value:
del result[key]
return result
def GetPerConfigSetting(self, setting, configname, default=None):
if configname in self.xcode_settings:
return self.xcode_settings[configname].get(setting, default)
else:
return self.GetPerTargetSetting(setting, default)
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
has the same value in all configurations and throws otherwise."""
is_first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
if is_first_pass:
result = self.xcode_settings[configname].get(setting, None)
is_first_pass = False
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
"(target %s)" % (setting, self.spec['target_name']))
if result is None:
return default
return result
def _GetStripPostbuilds(self, configname, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to strip this target's binary. These should be run as postbuilds
before the actual postbuilds run."""
self.configname = configname
result = []
if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
if self.spec['type'] == 'loadable_module' and self._IsBundle():
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
strip_flags = {
'all': '',
'non-global': '-x',
'debugging': '-S',
}[strip_style]
explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
if explicit_strip_flags:
strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
if not quiet:
result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
result.append('strip %s %s' % (strip_flags, output_binary))
self.configname = None
return result
def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to massage this target's debug information. These should be run
as postbuilds before the actual postbuilds run."""
self.configname = configname
# For static libraries, no dSYMs are created.
result = []
if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
self._Test(
'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
self.spec['type'] != 'static_library'):
if not quiet:
result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
self.configname = None
return result
def _GetTargetPostbuilds(self, configname, output, output_binary,
quiet=False):
"""Returns a list of shell commands that contain the shell commands
to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
return (
self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
self._GetStripPostbuilds(configname, output_binary, quiet))
def _GetIOSPostbuilds(self, configname, output_binary):
"""Return a shell command to codesign the iOS output binary so it can
be deployed to a device. This should be run as the very last step of the
build."""
if not (self.isIOS and self.spec['type'] == 'executable'):
return []
settings = self.xcode_settings[configname]
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
return []
# Warn for any unimplemented signing xcode keys.
unimpl = ['OTHER_CODE_SIGN_FLAGS']
unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
if unimpl:
print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
', '.join(sorted(unimpl)))
return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
settings.get('CODE_SIGN_ENTITLEMENTS', ''),
settings.get('PROVISIONING_PROFILE', ''))
]
def _GetIOSCodeSignIdentityKey(self, settings):
identity = settings.get('CODE_SIGN_IDENTITY')
if not identity:
return None
if identity not in XcodeSettings._codesigning_key_cache:
output = subprocess.check_output(
['security', 'find-identity', '-p', 'codesigning', '-v'])
for line in output.splitlines():
if identity in line:
fingerprint = line.split()[1]
cache = XcodeSettings._codesigning_key_cache
assert identity not in cache or fingerprint == cache[identity], (
"Multiple codesigning fingerprints for identity: %s" % identity)
XcodeSettings._codesigning_key_cache[identity] = fingerprint
return XcodeSettings._codesigning_key_cache.get(identity, '')
def AddImplicitPostbuilds(self, configname, output, output_binary,
postbuilds=[], quiet=False):
"""Returns a list of shell commands that should run before and after
|postbuilds|."""
assert output_binary is not None
pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
post = self._GetIOSPostbuilds(configname, output_binary)
return pre + postbuilds + post
def _AdjustLibrary(self, library, config_name=None):
if library.endswith('.framework'):
l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
else:
m = self.library_re.match(library)
if m:
l = '-l' + m.group(1)
else:
l = library
sdk_root = self._SdkPath(config_name)
if not sdk_root:
sdk_root = ''
# Xcode 7 started shipping with ".tbd" (text based stubs) files instead of
# ".dylib" without providing a real support for them. What it does, for
# "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the
# library order and cause collision when building Chrome.
#
# Instead substitude ".tbd" to ".dylib" in the generated project when the
# following conditions are both true:
# - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib",
# - the ".dylib" file does not exists but a ".tbd" file do.
library = l.replace('$(SDKROOT)', sdk_root)
if l.startswith('$(SDKROOT)'):
basename, ext = os.path.splitext(library)
if ext == '.dylib' and not os.path.exists(library):
tbd_library = basename + '.tbd'
if os.path.exists(tbd_library):
library = tbd_library
return library
def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
'-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
"""
libraries = [self._AdjustLibrary(library, config_name)
for library in libraries]
return libraries
def _BuildMachineOSBuild(self):
return GetStdout(['sw_vers', '-buildVersion'])
def _XcodeIOSDeviceFamily(self, configname):
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
return [int(x) for x in family.split(',')]
def GetExtraPlistItems(self, configname=None):
"""Returns a dictionary with extra items to insert into Info.plist."""
if configname not in XcodeSettings._plist_cache:
cache = {}
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
xcode, xcode_build = XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
sdk_root = self._SdkRoot(configname)
if not sdk_root:
sdk_root = self._DefaultSdkRoot()
cache['DTSDKName'] = sdk_root
if xcode >= '0430':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductBuildVersion')
else:
cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
if self.isIOS:
cache['DTPlatformName'] = cache['DTSDKName']
if configname.endswith("iphoneos"):
cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductVersion')
cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
else:
cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
XcodeSettings._plist_cache[configname] = cache
# Include extra plist items that are per-target, not per global
# XcodeSettings.
items = dict(XcodeSettings._plist_cache[configname])
if self.isIOS:
items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
return items
def _DefaultSdkRoot(self):
"""Returns the default SDKROOT to use.
Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed.
"""
xcode_version, xcode_build = XcodeVersion()
if xcode_version < '0500':
return ''
default_sdk_path = self._XcodeSdkPath('')
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root:
return default_sdk_root
try:
all_sdks = GetStdout(['xcodebuild', '-showsdks'])
except:
# If xcodebuild fails, there will be no valid SDKs
return ''
for line in all_sdks.splitlines():
items = line.split()
if len(items) >= 3 and items[-2] == '-sdk':
sdk_root = items[-1]
sdk_path = self._XcodeSdkPath(sdk_root)
if sdk_path == default_sdk_path:
return sdk_root
return ''
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
This feature consists of several pieces:
* If GCC_PREFIX_HEADER is present, all compilations in that project get an
additional |-include path_to_prefix_header| cflag.
* If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
instead compiled, and all other compilations in the project get an
additional |-include path_to_compiled_header| instead.
+ Compiled prefix headers have the extension gch. There is one gch file for
every language used in the project (c, cc, m, mm), since gch files for
different languages aren't compatible.
+ gch files themselves are built with the target's normal cflags, but they
obviously don't get the |-include| flag. Instead, they need a -x flag that
describes their language.
+ All o files in the target need to depend on the gch file, to make sure
it's built before any o file is built.
This class helps with some of these tasks, but it needs help from the build
system for writing dependencies to the gch files, for writing build commands
for the gch files, and for figuring out the location of the gch files.
"""
def __init__(self, xcode_settings,
gyp_path_to_build_path, gyp_path_to_build_output):
"""If xcode_settings is None, all methods on this class are no-ops.
Args:
gyp_path_to_build_path: A function that takes a gyp-relative path,
and returns a path relative to the build directory.
gyp_path_to_build_output: A function that takes a gyp-relative path and
a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
to where the output of precompiling that path for that language
should be placed (without the trailing '.gch').
"""
# This doesn't support per-configuration prefix headers. Good enough
# for now.
self.header = None
self.compile_headers = False
if xcode_settings:
self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
self.compile_headers = xcode_settings.GetPerTargetSetting(
'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
self.compiled_headers = {}
if self.header:
if self.compile_headers:
for lang in ['c', 'cc', 'm', 'mm']:
self.compiled_headers[lang] = gyp_path_to_build_output(
self.header, lang)
self.header = gyp_path_to_build_path(self.header)
def _CompiledHeader(self, lang, arch):
assert self.compile_headers
h = self.compiled_headers[lang]
if arch:
h += '.' + arch
return h
def GetInclude(self, lang, arch=None):
"""Gets the cflags to include the prefix header for language |lang|."""
if self.compile_headers and lang in self.compiled_headers:
return '-include %s' % self._CompiledHeader(lang, arch)
elif self.header:
return '-include %s' % self.header
else:
return ''
def _Gch(self, lang, arch):
"""Returns the actual file name of the prefix header for language |lang|."""
assert self.compile_headers
return self._CompiledHeader(lang, arch) + '.gch'
def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
a list of (source, object, gch) tuples, where |gch| is the build-directory
relative path to the gch file each object file depends on. |compilable[i]|
has to be the source file belonging to |objs[i]|."""
if not self.header or not self.compile_headers:
return []
result = []
for source, obj in zip(sources, objs):
ext = os.path.splitext(source)[1]
lang = {
'.c': 'c',
'.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
'.m': 'm',
'.mm': 'mm',
}.get(ext, None)
if lang:
result.append((source, obj, self._Gch(lang, arch)))
return result
def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory.
"""
if not self.header or not self.compile_headers:
return []
return [
(self._Gch('c', arch), '-x c-header', 'c', self.header),
(self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
(self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
(self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
]
def XcodeVersion():
"""Returns a tuple of version and build version of installed Xcode."""
# `xcodebuild -version` output looks like
# Xcode 4.6.3
# Build version 4H1503
# or like
# Xcode 3.2.6
# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
# BuildVersion: 10M2518
# Convert that to '0463', '4H1503'.
global XCODE_VERSION_CACHE
if XCODE_VERSION_CACHE:
return XCODE_VERSION_CACHE
try:
version_list = GetStdoutQuiet(['xcodebuild', '-version']).splitlines()
# In some circumstances xcodebuild exits 0 but doesn't return
# the right results; for example, a user on 10.7 or 10.8 with
# a bogus path set via xcode-select
# In that case this may be a CLT-only install so fall back to
# checking that version.
if len(version_list) < 2:
raise GypError("xcodebuild returned unexpected results")
except:
version = CLTVersion()
if version:
version = re.match(r'(\d+\.\d+\.?\d*)', version).groups()[0]
else:
raise GypError("No Xcode or CLT version detected!")
# The CLT has no build information, so we return an empty string.
version_list = [version, '']
version = version_list[0]
build = version_list[-1]
# Be careful to convert "4.2" to "0420":
version = version.split()[-1].replace('.', '')
version = (version + '0' * (3 - len(version))).zfill(4)
if build:
build = build.split()[-1]
XCODE_VERSION_CACHE = (version, build)
return XCODE_VERSION_CACHE
# This function ported from the logic in Homebrew's CLT version check
def CLTVersion():
"""Returns the version of command-line tools from pkgutil."""
# pkgutil output looks like
# package-id: com.apple.pkg.CLTools_Executables
# version: 5.0.1.0.1.1382131676
# volume: /
# location: /
# install-time: 1382544035
# groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
regex = re.compile('version: (?P<version>.+)')
for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
try:
output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
return re.search(regex, output).groupdict()['version']
except:
continue
def GetStdoutQuiet(cmdlist):
"""Returns the content of standard output returned by invoking |cmdlist|.
Ignores the stderr.
Raises |GypError| if the command return with a non-zero return code."""
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
return out.rstrip('\n')
def GetStdout(cmdlist):
"""Returns the content of standard output returned by invoking |cmdlist|.
Raises |GypError| if the command return with a non-zero return code."""
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
sys.stderr.write(out + '\n')
raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
return out.rstrip('\n')
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
target represented by spec. For keys that are both in the global and the local
xcode_settings dict, the local key gets precendence.
"""
# The xcode generator special-cases global xcode_settings and does something
# that amounts to merging in the global xcode_settings into each local
# xcode_settings dict.
global_xcode_settings = global_dict.get('xcode_settings', {})
for config in spec['configurations'].values():
if 'xcode_settings' in config:
new_settings = global_xcode_settings.copy()
new_settings.update(config['xcode_settings'])
config['xcode_settings'] = new_settings
def IsMacBundle(flavor, spec):
"""Returns if |spec| should be treated as a bundle.
Bundles are directories with a certain subdirectory structure, instead of
just a single file. Bundle rules do not produce a binary but also package
resources into that directory."""
is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
if is_mac_bundle:
assert spec['type'] != 'none', (
'mac_bundle targets cannot have type none (target "%s")' %
spec['target_name'])
return is_mac_bundle
def GetMacBundleResources(product_dir, xcode_settings, resources):
"""Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
resources: A list of bundle resources, relative to the build directory.
"""
dest = os.path.join(product_dir,
xcode_settings.GetBundleResourceFolder())
for res in resources:
output = dest
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in res, (
"Spaces in resource filenames not supported (%s)" % res)
# Split into (path,file).
res_parts = os.path.split(res)
# Now split the path into (prefix,maybe.lproj).
lproj_parts = os.path.split(res_parts[0])
# If the resource lives in a .lproj bundle, add that to the destination.
if lproj_parts[1].endswith('.lproj'):
output = os.path.join(output, lproj_parts[1])
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
output = os.path.splitext(output)[0] + '.nib'
# Compiled storyboard files are referred to by .storyboardc.
if output.endswith('.storyboard'):
output = os.path.splitext(output)[0] + '.storyboardc'
yield output, res
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:
* |info_plist| is the source plist path, relative to the
build directory,
* |dest_plist| is the destination plist path, relative to the
build directory,
* |defines| is a list of preprocessor defines (empty if the plist
shouldn't be preprocessed,
* |extra_env| is a dict of env variables that should be exported when
invoking |mac_tool copy-info-plist|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
if not info_plist:
return None, None, [], {}
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in info_plist, (
"Spaces in Info.plist filenames not supported (%s)" % info_plist)
info_plist = gyp_path_to_build_path(info_plist)
# If explicitly set to preprocess the plist, invoke the C preprocessor and
# specify any defines as -D flags.
if xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESS', default='NO') == 'YES':
# Create an intermediate file based on the path.
defines = shlex.split(xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
else:
defines = []
dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
extra_env = xcode_settings.GetPerTargetSettings()
return info_plist, dest_plist, defines, extra_env
def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings=None):
"""Return the environment variables that Xcode would set. See
http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
for a full list.
Args:
xcode_settings: An XcodeSettings object. If this is None, this function
returns an empty dict.
built_products_dir: Absolute path to the built products dir.
srcroot: Absolute path to the source root.
configuration: The build configuration name.
additional_settings: An optional dict with more values to add to the
result.
"""
if not xcode_settings: return {}
# This function is considered a friend of XcodeSettings, so let it reach into
# its implementation details.
spec = xcode_settings.spec
# These are filled in on a as-needed basis.
env = {
'BUILT_FRAMEWORKS_DIR' : built_products_dir,
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : configuration,
'PRODUCT_NAME' : xcode_settings.GetProductName(),
# See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
'SRCROOT' : srcroot,
'SOURCE_ROOT': '${SRCROOT}',
# This is not true for static libraries, but currently the env is only
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
}
if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
mach_o_type = xcode_settings.GetMachOType()
if mach_o_type:
env['MACH_O_TYPE'] = mach_o_type
env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
if xcode_settings._IsBundle():
env['CONTENTS_FOLDER_PATH'] = \
xcode_settings.GetBundleContentsFolderPath()
env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
xcode_settings.GetBundleResourceFolder()
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
install_name = xcode_settings.GetInstallName()
if install_name:
env['LD_DYLIB_INSTALL_NAME'] = install_name
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
if XcodeVersion() >= '0500' and not env.get('SDKROOT'):
sdk_root = xcode_settings._SdkRoot(configuration)
if not sdk_root:
sdk_root = xcode_settings._XcodeSdkPath('')
if sdk_root is None:
sdk_root = ''
env['SDKROOT'] = sdk_root
if not additional_settings:
additional_settings = {}
else:
# Flatten lists to strings.
for k in additional_settings:
if not isinstance(additional_settings[k], str):
additional_settings[k] = ' '.join(additional_settings[k])
additional_settings.update(env)
for k in additional_settings:
additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
return additional_settings
def _NormalizeEnvVarReferences(str):
"""Takes a string containing variable references in the form ${FOO}, $(FOO),
or $FOO, and returns a string with all variable references in the form ${FOO}.
"""
# $FOO -> ${FOO}
str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
# $(FOO) -> ${FOO}
matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
for match in matches:
to_replace, variable = match
assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
str = str.replace(to_replace, '${' + variable + '}')
return str
def ExpandEnvVars(string, expansions):
"""Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
expansions list. If the variable expands to something that references
another variable, this variable is expanded as well if it's in env --
until no variables present in env are left."""
for k, v in reversed(expansions):
string = string.replace('${' + k + '}', v)
string = string.replace('$(' + k + ')', v)
string = string.replace('$' + k, v)
return string
def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1].
Throws an Exception in case of dependency cycles.
"""
# Since environment variables can refer to other variables, the evaluation
# order is important. Below is the logic to compute the dependency graph
# and sort it.
regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
for dependee in matches:
assert '${' not in dependee, 'Nested variables not supported: ' + dependee
return matches
try:
# Topologically sort, and then reverse, because we used an edge definition
# that's inverted from the expected result of this function (see comment
# above).
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
except gyp.common.CycleError, e:
raise GypError(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
configuration, additional_settings=None):
env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings)
return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
def GetSpecPostbuildCommands(spec, quiet=False):
"""Returns the list of postbuilds explicitly defined on |spec|, in a form
executable by a shell."""
postbuilds = []
for postbuild in spec.get('postbuilds', []):
if not quiet:
postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
spec['target_name'], postbuild['postbuild_name']))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds
def _HasIOSTarget(targets):
"""Returns true if any target contains the iOS specific key
IPHONEOS_DEPLOYMENT_TARGET."""
for target_dict in targets.values():
for config in target_dict['configurations'].values():
if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
return True
return False
def _AddIOSDeviceConfigurations(targets):
"""Clone all targets and append -iphoneos to the name. Configure these targets
to build for iOS devices and use correct architectures for those builds."""
for target_dict in targets.itervalues():
toolset = target_dict['toolset']
configs = target_dict['configurations']
for config_name, config_dict in dict(configs).iteritems():
iphoneos_config_dict = copy.deepcopy(config_dict)
configs[config_name + '-iphoneos'] = iphoneos_config_dict
configs[config_name + '-iphonesimulator'] = config_dict
if toolset == 'target':
iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
return targets
def CloneConfigurationForDeviceAndEmulator(target_dicts):
"""If |target_dicts| contains any iOS targets, automatically create -iphoneos
targets for iOS device builds."""
if _HasIOSTarget(target_dicts):
return _AddIOSDeviceConfigurations(target_dicts)
return target_dicts
| apache-2.0 |
liu-jc/reinforcement-learning | lib/plotting.py | 4 | 3457 | import matplotlib
import numpy as np
import pandas as pd
from collections import namedtuple
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
EpisodeStats = namedtuple("Stats",["episode_lengths", "episode_rewards"])
def plot_cost_to_go_mountain_car(env, estimator, num_tiles=20):
x = np.linspace(env.observation_space.low[0], env.observation_space.high[0], num=num_tiles)
y = np.linspace(env.observation_space.low[1], env.observation_space.high[1], num=num_tiles)
X, Y = np.meshgrid(x, y)
Z = np.apply_along_axis(lambda _: -np.max(estimator.predict(_)), 2, np.dstack([X, Y]))
fig = plt.figure(figsize=(10, 5))
ax = fig.add_subplot(111, projection='3d')
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1,
cmap=matplotlib.cm.coolwarm, vmin=-1.0, vmax=1.0)
ax.set_xlabel('Position')
ax.set_ylabel('Velocity')
ax.set_zlabel('Value')
ax.set_title("Mountain \"Cost To Go\" Function")
fig.colorbar(surf)
plt.show()
def plot_value_function(V, title="Value Function"):
"""
Plots the value function as a surface plot.
"""
min_x = min(k[0] for k in V.keys())
max_x = max(k[0] for k in V.keys())
min_y = min(k[1] for k in V.keys())
max_y = max(k[1] for k in V.keys())
x_range = np.arange(min_x, max_x + 1)
y_range = np.arange(min_y, max_y + 1)
X, Y = np.meshgrid(x_range, y_range)
# Find value for all (x, y) coordinates
Z_noace = np.apply_along_axis(lambda _: V[(_[0], _[1], False)], 2, np.dstack([X, Y]))
Z_ace = np.apply_along_axis(lambda _: V[(_[0], _[1], True)], 2, np.dstack([X, Y]))
def plot_surface(X, Y, Z, title):
fig = plt.figure(figsize=(20, 10))
ax = fig.add_subplot(111, projection='3d')
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1,
cmap=matplotlib.cm.coolwarm, vmin=-1.0, vmax=1.0)
ax.set_xlabel('Player Sum')
ax.set_ylabel('Dealer Showing')
ax.set_zlabel('Value')
ax.set_title(title)
ax.view_init(ax.elev, -120)
fig.colorbar(surf)
plt.show()
plot_surface(X, Y, Z_noace, "{} (No Usable Ace)".format(title))
plot_surface(X, Y, Z_ace, "{} (Usable Ace)".format(title))
def plot_episode_stats(stats, smoothing_window=10, noshow=False):
# Plot the episode length over time
fig1 = plt.figure(figsize=(10,5))
plt.plot(stats.episode_lengths)
plt.xlabel("Episode")
plt.ylabel("Episode Length")
plt.title("Episode Length over Time")
if noshow:
plt.close(fig1)
else:
plt.show(fig1)
# Plot the episode reward over time
fig2 = plt.figure(figsize=(10,5))
rewards_smoothed = pd.Series(stats.episode_rewards).rolling(smoothing_window, min_periods=smoothing_window).mean()
plt.plot(rewards_smoothed)
plt.xlabel("Episode")
plt.ylabel("Episode Reward (Smoothed)")
plt.title("Episode Reward over Time (Smoothed over window size {})".format(smoothing_window))
if noshow:
plt.close(fig2)
else:
plt.show(fig2)
# Plot time steps and episode number
fig3 = plt.figure(figsize=(10,5))
plt.plot(np.cumsum(stats.episode_lengths), np.arange(len(stats.episode_lengths)))
plt.xlabel("Time Steps")
plt.ylabel("Episode")
plt.title("Episode per time step")
if noshow:
plt.close(fig3)
else:
plt.show(fig3)
return fig1, fig2, fig3
| mit |
backtou/longlab | gr-uhd/apps/hf_explorer/hfx.py | 11 | 34239 | #!/usr/bin/env python
# generated by wxGlade 0.4 on Tue Mar 14 10:16:06 2006
#
# Copyright 2006,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
#-----------------------------------------------------------------
#
# +-->(fft)
# |
# (src)->(xlate)--+->(audio filter)--+-->(sel_am)-+--------------+
# | | |
# | (pll) |
# | | |
# | (pll_carrier_scale) |
# | | |
# | (pll_carrier_filter) |
# | | |
# | +--(fft2) |
# | | |
# | +--(c2f3)--+ |
# | | | |
# | (phaser1) (phaser2) |
# | | | |
# | +--(f2c)---+ |
# | | V
# V +---------->(am_det)
# (c2f) |
# | (c2f2)
# | |
# +-->(sel_sb)------------>(combine)
# |
# V
# +--------------------------(scale)
# | |
# | |
# | +++
# V | |
# (agc)<--(offset)<--(intr)<---(sqr1)
# |
# V
# (dst)
#
#----------------------------------------------------------------------
#
# Versions 2.2.1 adds loop antenna automatic tuner
#
# 2.3.1 adds web control, made AM Sync display optional,
# added more comments.
#
# 2.4.1 updates usrp interface to support auto subdev
#
# 2.8.1 changed saved file format from 8-byte complex to
# 4-byte short for obvious storage space savings.
# Web server control disabled by default. Do not enable
# until directory structure and scripts are in place.
WEB_CONTROL = False
# Controls display of AM Sync Carrier - turn off for smaller
# window if not needed
AM_SYNC_DISPLAY = False
import os, wx, sys, math
import wx.lib.evtmgr as em
from gnuradio.wxgui import powermate, fftsink2
from gnuradio import gr, audio, eng_notation
from gnuradio.eng_option import eng_option
from gnuradio import uhd
from optparse import OptionParser
n2s = eng_notation.num_to_str
ID_BUTTON_1 = wx.NewId() # LSB button
ID_BUTTON_2 = wx.NewId() # USB
ID_BUTTON_3 = wx.NewId() # AM
ID_BUTTON_4 = wx.NewId() # CW
ID_BUTTON_5 = wx.NewId() # Powermate controls: Upper audio freq cutoff
ID_BUTTON_6 = wx.NewId() # " Lower audio freq cutoff
ID_BUTTON_7 = wx.NewId() # " Frequency
ID_BUTTON_8 = wx.NewId() # " Volume
ID_BUTTON_9 = wx.NewId() # " Time
ID_BUTTON_10 = wx.NewId() # Time Seek Forwards
ID_BUTTON_11 = wx.NewId() # Time Seek Backwards
ID_BUTTON_12 = wx.NewId() # Automatic Antenna Tune (AT) enable
ID_BUTTON_13 = wx.NewId() # AT Calibrate point
ID_BUTTON_14 = wx.NewId() # AT Reset
ID_TEXT_1 = wx.NewId() # Band Center, USRP ddc Freq
ID_SPIN_1 = wx.NewId() # Frequency display and control
ID_SLIDER_1 = wx.NewId() # Upper audio freq cutoff
ID_SLIDER_2 = wx.NewId() # Lower audio freq cutoff
ID_SLIDER_3 = wx.NewId() # Frequency
ID_SLIDER_4 = wx.NewId() # Volume
ID_SLIDER_5 = wx.NewId() # Programmable Gain Amp, PGA, RF gain
ID_SLIDER_6 = wx.NewId() # AM Sync carrier level
ID_SLIDER_7 = wx.NewId() # AT control voltage output
ID_EXIT = wx.NewId() # Menu Exit
class MyFrame(wx.Frame):
def __init__(self, *args, **kwds):
# begin wxGlade: MyFrame.__init__
kwds["style"] = wx.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, *args, **kwds)
# Menu Bar
self.frame_1_menubar = wx.MenuBar()
self.SetMenuBar(self.frame_1_menubar)
wxglade_tmp_menu = wx.Menu()
self.Exit = wx.MenuItem(wxglade_tmp_menu, ID_EXIT, "Exit",
"Exit", wx.ITEM_NORMAL)
wxglade_tmp_menu.AppendItem(self.Exit)
self.frame_1_menubar.Append(wxglade_tmp_menu, "File")
# Menu Bar end
self.panel_1 = wx.Panel(self, -1)
self.button_1 = wx.Button(self, ID_BUTTON_1, "LSB")
self.button_2 = wx.Button(self, ID_BUTTON_2, "USB")
self.button_3 = wx.Button(self, ID_BUTTON_3, "AM")
self.button_4 = wx.Button(self, ID_BUTTON_4, "CW")
self.button_5 = wx.ToggleButton(self, ID_BUTTON_5, "Upper")
self.slider_fcutoff_hi = wx.Slider(self, ID_SLIDER_1, 0, -15798, 15799,
style=wx.SL_HORIZONTAL|wx.SL_LABELS)
self.button_6 = wx.ToggleButton(self, ID_BUTTON_6, "Lower")
self.slider_fcutoff_lo = wx.Slider(self, ID_SLIDER_2, 0, -15799, 15798,
style=wx.SL_HORIZONTAL|wx.SL_LABELS)
self.panel_5 = wx.Panel(self, -1)
self.label_1 = wx.StaticText(self, -1, " Band\nCenter")
self.text_ctrl_1 = wx.TextCtrl(self, ID_TEXT_1, "")
self.panel_6 = wx.Panel(self, -1)
self.panel_7 = wx.Panel(self, -1)
self.panel_2 = wx.Panel(self, -1)
self.button_7 = wx.ToggleButton(self, ID_BUTTON_7, "Freq")
self.slider_3 = wx.Slider(self, ID_SLIDER_3, 3000, 0, 6000)
self.spin_ctrl_1 = wx.SpinCtrl(self, ID_SPIN_1, "", min=0, max=100)
self.button_8 = wx.ToggleButton(self, ID_BUTTON_8, "Vol")
self.slider_4 = wx.Slider(self, ID_SLIDER_4, 0, 0, 500)
self.slider_5 = wx.Slider(self, ID_SLIDER_5, 0, 0, 20)
self.button_9 = wx.ToggleButton(self, ID_BUTTON_9, "Time")
self.button_11 = wx.Button(self, ID_BUTTON_11, "Rew")
self.button_10 = wx.Button(self, ID_BUTTON_10, "Fwd")
self.panel_3 = wx.Panel(self, -1)
self.label_2 = wx.StaticText(self, -1, "PGA ")
self.panel_4 = wx.Panel(self, -1)
self.panel_8 = wx.Panel(self, -1)
self.panel_9 = wx.Panel(self, -1)
self.label_3 = wx.StaticText(self, -1, "AM Sync\nCarrier")
self.slider_6 = wx.Slider(self, ID_SLIDER_6, 50, 0, 200,
style=wx.SL_HORIZONTAL|wx.SL_LABELS)
self.label_4 = wx.StaticText(self, -1, "Antenna Tune")
self.slider_7 = wx.Slider(self, ID_SLIDER_7, 1575, 950, 2200,
style=wx.SL_HORIZONTAL|wx.SL_LABELS)
self.panel_10 = wx.Panel(self, -1)
self.button_12 = wx.ToggleButton(self, ID_BUTTON_12, "Auto Tune")
self.button_13 = wx.Button(self, ID_BUTTON_13, "Calibrate")
self.button_14 = wx.Button(self, ID_BUTTON_14, "Reset")
self.panel_11 = wx.Panel(self, -1)
self.panel_12 = wx.Panel(self, -1)
self.__set_properties()
self.__do_layout()
# end wxGlade
parser = OptionParser (option_class=eng_option)
parser.add_option("", "--address", type="string", default="addr=192.168.10.2",
help="Address of UHD device, [default=%default]")
parser.add_option ("-c", "--ddc-freq", type="eng_float", default=3.9e6,
help="set Rx DDC frequency to FREQ", metavar="FREQ")
parser.add_option ("-s", "--samp-rate", type="eng_float", default=256e3,
help="set sample rate (bandwidth) [default=%default]")
parser.add_option ("-a", "--audio_file", default="",
help="audio output file", metavar="FILE")
parser.add_option ("-r", "--radio_file", default="",
help="radio output file", metavar="FILE")
parser.add_option ("-i", "--input_file", default="",
help="radio input file", metavar="FILE")
parser.add_option ("-O", "--audio-output", type="string", default="",
help="audio output device name. E.g., hw:0,0, /dev/dsp, or pulse")
(options, args) = parser.parse_args ()
self.usrp_center = options.ddc_freq
input_rate = options.samp_rate
self.slider_range = input_rate * 0.9375
self.f_lo = self.usrp_center - (self.slider_range/2)
self.f_hi = self.usrp_center + (self.slider_range/2)
self.af_sample_rate = 32000
fir_decim = long (input_rate / self.af_sample_rate)
# data point arrays for antenna tuner
self.xdata = []
self.ydata = []
self.tb = gr.top_block()
# radio variables, initial conditions
self.frequency = self.usrp_center
# these map the frequency slider (0-6000) to the actual range
self.f_slider_offset = self.f_lo
self.f_slider_scale = 10000
self.spin_ctrl_1.SetRange(self.f_lo,self.f_hi)
self.text_ctrl_1.SetValue(str(int(self.usrp_center)))
self.slider_5.SetValue(0)
self.AM_mode = False
self.slider_3.SetValue((self.frequency-self.f_slider_offset)/self.f_slider_scale)
self.spin_ctrl_1.SetValue(int(self.frequency))
POWERMATE = True
try:
self.pm = powermate.powermate(self)
except:
sys.stderr.write("Unable to find PowerMate or Contour Shuttle\n")
POWERMATE = False
if POWERMATE:
powermate.EVT_POWERMATE_ROTATE(self, self.on_rotate)
powermate.EVT_POWERMATE_BUTTON(self, self.on_pmButton)
self.active_button = 7
# command line options
if options.audio_file == "": SAVE_AUDIO_TO_FILE = False
else: SAVE_AUDIO_TO_FILE = True
if options.radio_file == "": SAVE_RADIO_TO_FILE = False
else: SAVE_RADIO_TO_FILE = True
if options.input_file == "": self.PLAY_FROM_USRP = True
else: self.PLAY_FROM_USRP = False
if self.PLAY_FROM_USRP:
self.src = uhd.usrp_source(device_addr=options.address,
io_type=uhd.io_type.COMPLEX_FLOAT32,
num_channels=1)
self.src.set_samp_rate(input_rate)
input_rate = self.src.get_samp_rate()
self.src.set_center_freq(self.usrp_center, 0)
self.tune_offset = 0
else:
self.src = gr.file_source (gr.sizeof_short,options.input_file)
self.tune_offset = 2200 # 2200 works for 3.5-4Mhz band
# convert rf data in interleaved short int form to complex
s2ss = gr.stream_to_streams(gr.sizeof_short,2)
s2f1 = gr.short_to_float()
s2f2 = gr.short_to_float()
src_f2c = gr.float_to_complex()
self.tb.connect(self.src,s2ss)
self.tb.connect((s2ss,0),s2f1)
self.tb.connect((s2ss,1),s2f2)
self.tb.connect(s2f1,(src_f2c,0))
self.tb.connect(s2f2,(src_f2c,1))
# save radio data to a file
if SAVE_RADIO_TO_FILE:
radio_file = gr.file_sink(gr.sizeof_short, options.radio_file)
self.tb.connect (self.src, radio_file)
# 2nd DDC
xlate_taps = gr.firdes.low_pass ( \
1.0, input_rate, 16e3, 4e3, gr.firdes.WIN_HAMMING )
self.xlate = gr.freq_xlating_fir_filter_ccf ( \
fir_decim, xlate_taps, self.tune_offset, input_rate )
# Complex Audio filter
audio_coeffs = gr.firdes.complex_band_pass (
1.0, # gain
self.af_sample_rate, # sample rate
-3000, # low cutoff
0, # high cutoff
100, # transition
gr.firdes.WIN_HAMMING) # window
self.slider_fcutoff_hi.SetValue(0)
self.slider_fcutoff_lo.SetValue(-3000)
self.audio_filter = gr.fir_filter_ccc(1, audio_coeffs)
# Main +/- 16Khz spectrum display
self.fft = fftsink2.fft_sink_c(self.panel_2, fft_size=512,
sample_rate=self.af_sample_rate,
average=True, size=(640,240))
# AM Sync carrier
if AM_SYNC_DISPLAY:
self.fft2 = fftsink.fft_sink_c(self.tb, self.panel_9,
y_per_div=20, fft_size=512,
sample_rate=self.af_sample_rate,
average=True, size=(640,240))
c2f = gr.complex_to_float()
# AM branch
self.sel_am = gr.multiply_const_cc(0)
# the following frequencies turn out to be in radians/sample
# gr.pll_refout_cc(alpha,beta,min_freq,max_freq)
# suggested alpha = X, beta = .25 * X * X
pll = gr.pll_refout_cc(.5,.0625,(2.*math.pi*7.5e3/self.af_sample_rate),
(2.*math.pi*6.5e3/self.af_sample_rate))
self.pll_carrier_scale = gr.multiply_const_cc(complex(10,0))
am_det = gr.multiply_cc()
# these are for converting +7.5kHz to -7.5kHz
# for some reason gr.conjugate_cc() adds noise ??
c2f2 = gr.complex_to_float()
c2f3 = gr.complex_to_float()
f2c = gr.float_to_complex()
phaser1 = gr.multiply_const_ff(1)
phaser2 = gr.multiply_const_ff(-1)
# filter for pll generated carrier
pll_carrier_coeffs = gr.firdes.complex_band_pass (
2.0, # gain
self.af_sample_rate, # sample rate
7400, # low cutoff
7600, # high cutoff
100, # transition
gr.firdes.WIN_HAMMING) # window
self.pll_carrier_filter = gr.fir_filter_ccc (1, pll_carrier_coeffs)
self.sel_sb = gr.multiply_const_ff(1)
combine = gr.add_ff()
#AGC
sqr1 = gr.multiply_ff()
intr = gr.iir_filter_ffd ( [.004, 0], [0, .999] )
offset = gr.add_const_ff(1)
agc = gr.divide_ff()
self.scale = gr.multiply_const_ff(0.00001)
dst = audio.sink(long(self.af_sample_rate),
options.audio_output)
if self.PLAY_FROM_USRP:
self.tb.connect(self.src, self.xlate, self.fft)
else:
self.tb.connect(src_f2c, self.xlate, self.fft)
self.tb.connect(self.xlate,self.audio_filter,self.sel_am,(am_det,0))
self.tb.connect(self.sel_am,pll,self.pll_carrier_scale,
self.pll_carrier_filter,c2f3)
self.tb.connect((c2f3,0),phaser1,(f2c,0))
self.tb.connect((c2f3,1),phaser2,(f2c,1))
self.tb.connect(f2c,(am_det,1))
self.tb.connect(am_det,c2f2,(combine,0))
self.tb.connect(self.audio_filter,c2f,
self.sel_sb,(combine,1))
if AM_SYNC_DISPLAY:
self.tb.connect(self.pll_carrier_filter,self.fft2)
self.tb.connect(combine,self.scale)
self.tb.connect(self.scale,(sqr1,0))
self.tb.connect(self.scale,(sqr1,1))
self.tb.connect(sqr1, intr, offset, (agc, 1))
self.tb.connect(self.scale,(agc, 0))
self.tb.connect(agc,dst)
if SAVE_AUDIO_TO_FILE:
f_out = gr.file_sink(gr.sizeof_short,options.audio_file)
sc1 = gr.multiply_const_ff(64000)
f2s1 = gr.float_to_short()
self.tb.connect(agc,sc1,f2s1,f_out)
self.tb.start()
# for mouse position reporting on fft display
self.fft.win.Bind(wx.EVT_LEFT_UP, self.Mouse)
# and left click to re-tune
self.fft.win.Bind(wx.EVT_LEFT_DOWN, self.Click)
# start a timer to check for web commands
if WEB_CONTROL:
self.timer = UpdateTimer(self, 1000) # every 1000 mSec, 1 Sec
wx.EVT_BUTTON(self,ID_BUTTON_1,self.set_lsb)
wx.EVT_BUTTON(self,ID_BUTTON_2,self.set_usb)
wx.EVT_BUTTON(self,ID_BUTTON_3,self.set_am)
wx.EVT_BUTTON(self,ID_BUTTON_4,self.set_cw)
wx.EVT_BUTTON(self,ID_BUTTON_10,self.fwd)
wx.EVT_BUTTON(self,ID_BUTTON_11,self.rew)
wx.EVT_BUTTON(self, ID_BUTTON_13, self.AT_calibrate)
wx.EVT_BUTTON(self, ID_BUTTON_14, self.AT_reset)
wx.EVT_TOGGLEBUTTON(self,ID_BUTTON_5,self.on_button)
wx.EVT_TOGGLEBUTTON(self,ID_BUTTON_6,self.on_button)
wx.EVT_TOGGLEBUTTON(self,ID_BUTTON_7,self.on_button)
wx.EVT_TOGGLEBUTTON(self,ID_BUTTON_8,self.on_button)
wx.EVT_TOGGLEBUTTON(self,ID_BUTTON_9,self.on_button)
wx.EVT_SLIDER(self,ID_SLIDER_1,self.set_filter)
wx.EVT_SLIDER(self,ID_SLIDER_2,self.set_filter)
wx.EVT_SLIDER(self,ID_SLIDER_3,self.slide_tune)
wx.EVT_SLIDER(self,ID_SLIDER_4,self.set_volume)
wx.EVT_SLIDER(self,ID_SLIDER_5,self.set_pga)
wx.EVT_SLIDER(self,ID_SLIDER_6,self.am_carrier)
wx.EVT_SLIDER(self,ID_SLIDER_7,self.antenna_tune)
wx.EVT_SPINCTRL(self,ID_SPIN_1,self.spin_tune)
wx.EVT_MENU(self, ID_EXIT, self.TimeToQuit)
def __set_properties(self):
# begin wxGlade: MyFrame.__set_properties
self.SetTitle("HF Explorer")
self.slider_fcutoff_hi.SetMinSize((450, 38))
self.slider_fcutoff_lo.SetMinSize((450, 38))
self.panel_2.SetMinSize((640, 240))
self.button_7.SetValue(1)
self.slider_3.SetMinSize((450, 19))
self.slider_4.SetMinSize((275, 19))
self.slider_5.SetMinSize((275, 19))
if AM_SYNC_DISPLAY:
self.panel_9.SetMinSize((640, 240))
self.slider_6.SetMinSize((300, 38))
self.slider_7.SetMinSize((400, 38))
# end wxGlade
def __do_layout(self):
# begin wxGlade: MyFrame.__do_layout
sizer_1 = wx.BoxSizer(wx.VERTICAL)
grid_sizer_1 = wx.FlexGridSizer(11, 2, 0, 0)
sizer_7 = wx.BoxSizer(wx.HORIZONTAL)
sizer_5 = wx.BoxSizer(wx.HORIZONTAL)
sizer_4 = wx.BoxSizer(wx.HORIZONTAL)
sizer_3 = wx.BoxSizer(wx.HORIZONTAL)
sizer_6 = wx.BoxSizer(wx.VERTICAL)
sizer_2 = wx.BoxSizer(wx.HORIZONTAL)
grid_sizer_1.Add(self.panel_1, 1, wx.EXPAND, 0)
sizer_2.Add(self.button_1, 0, wx.ADJUST_MINSIZE, 0)
sizer_2.Add(self.button_2, 0, wx.ADJUST_MINSIZE, 0)
sizer_2.Add(self.button_3, 0, wx.ADJUST_MINSIZE, 0)
sizer_2.Add(self.button_4, 0, wx.ADJUST_MINSIZE, 0)
grid_sizer_1.Add(sizer_2, 1, wx.EXPAND, 0)
grid_sizer_1.Add(self.button_5, 0, wx.ADJUST_MINSIZE, 0)
grid_sizer_1.Add(self.slider_fcutoff_hi, 0,
wx.ALIGN_CENTER_HORIZONTAL|wx.ADJUST_MINSIZE, 0)
grid_sizer_1.Add(self.button_6, 0, wx.ADJUST_MINSIZE, 0)
grid_sizer_1.Add(self.slider_fcutoff_lo, 0,
wx.ALIGN_CENTER_HORIZONTAL|wx.ADJUST_MINSIZE, 0)
sizer_6.Add(self.panel_5, 1, wx.EXPAND, 0)
sizer_6.Add(self.label_1, 0,
wx.ALIGN_CENTER_HORIZONTAL|wx.ADJUST_MINSIZE, 0)
sizer_6.Add(self.text_ctrl_1, 0, wx.ADJUST_MINSIZE, 0)
sizer_6.Add(self.panel_6, 1, wx.EXPAND, 0)
sizer_6.Add(self.panel_7, 1, wx.EXPAND, 0)
grid_sizer_1.Add(sizer_6, 1, wx.EXPAND, 0)
grid_sizer_1.Add(self.panel_2, 1, wx.EXPAND, 0)
grid_sizer_1.Add(self.button_7, 0, wx.ADJUST_MINSIZE, 0)
sizer_3.Add(self.slider_3, 0, wx.ADJUST_MINSIZE, 0)
sizer_3.Add(self.spin_ctrl_1, 0, wx.ADJUST_MINSIZE, 0)
grid_sizer_1.Add(sizer_3, 1, wx.EXPAND, 0)
grid_sizer_1.Add(self.button_8, 0, wx.ADJUST_MINSIZE, 0)
sizer_4.Add(self.slider_4, 0, wx.ADJUST_MINSIZE, 0)
sizer_4.Add(self.slider_5, 0, wx.ADJUST_MINSIZE, 0)
grid_sizer_1.Add(sizer_4, 1, wx.EXPAND, 0)
grid_sizer_1.Add(self.button_9, 0, wx.ADJUST_MINSIZE, 0)
sizer_5.Add(self.button_11, 0, wx.ADJUST_MINSIZE, 0)
sizer_5.Add(self.button_10, 0, wx.ADJUST_MINSIZE, 0)
sizer_5.Add(self.panel_3, 1, wx.EXPAND, 0)
sizer_5.Add(self.label_2, 0, wx.ADJUST_MINSIZE, 0)
sizer_5.Add(self.panel_4, 1, wx.EXPAND, 0)
grid_sizer_1.Add(sizer_5, 1, wx.EXPAND, 0)
grid_sizer_1.Add(self.panel_8, 1, wx.EXPAND, 0)
grid_sizer_1.Add(self.panel_9, 1, wx.EXPAND, 0)
grid_sizer_1.Add(self.label_3, 0,
wx.ALIGN_CENTER_HORIZONTAL|wx.ALIGN_CENTER_VERTICAL|wx.ADJUST_MINSIZE, 0)
grid_sizer_1.Add(self.slider_6, 0, wx.ADJUST_MINSIZE, 0)
grid_sizer_1.Add(self.label_4, 0,
wx.ALIGN_BOTTOM|wx.ADJUST_MINSIZE, 0)
grid_sizer_1.Add(self.slider_7, 0, wx.ADJUST_MINSIZE, 0)
grid_sizer_1.Add(self.panel_10, 1, wx.EXPAND, 0)
sizer_7.Add(self.button_12, 0, wx.ADJUST_MINSIZE, 0)
sizer_7.Add(self.button_13, 0, wx.ADJUST_MINSIZE, 0)
sizer_7.Add(self.button_14, 0, wx.ADJUST_MINSIZE, 0)
sizer_7.Add(self.panel_11, 1, wx.EXPAND, 0)
sizer_7.Add(self.panel_12, 1, wx.EXPAND, 0)
grid_sizer_1.Add(sizer_7, 1, wx.EXPAND, 0)
sizer_1.Add(grid_sizer_1, 1, wx.EXPAND, 0)
self.SetAutoLayout(True)
self.SetSizer(sizer_1)
sizer_1.Fit(self)
sizer_1.SetSizeHints(self)
self.Layout()
# end wxGlade
# Menu exit
def TimeToQuit(self, event):
self.tb.stop()
self.Close(True)
# Powermate being turned
def on_rotate(self, event):
if self.active_button == 5:
self.slider_fcutoff_hi.SetValue(self.slider_fcutoff_hi.GetValue()+event.delta)
if self.slider_fcutoff_lo.GetValue() > (self.slider_fcutoff_hi.GetValue() - 200) :
self.slider_fcutoff_lo.SetValue(self.slider_fcutoff_hi.GetValue() - 200)
self.filter()
if self.active_button == 6:
self.slider_fcutoff_lo.SetValue(self.slider_fcutoff_lo.GetValue()+event.delta)
if self.slider_fcutoff_hi.GetValue() < (self.slider_fcutoff_lo.GetValue() + 200) :
self.slider_fcutoff_hi.SetValue(self.slider_fcutoff_lo.GetValue() + 200)
self.filter()
if self.active_button == 7:
new = max(0, min(6000, self.slider_3.GetValue() + event.delta))
self.slider_3.SetValue(new)
self.frequency = (self.f_slider_scale * new) + self.f_slider_offset
self.spin_ctrl_1.SetValue(self.frequency)
if self.AM_mode == False:
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset))
else:
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset - 7.5e3))
if self.button_12.GetValue():
self.auto_antenna_tune()
if self.active_button == 8:
new = max(0, min(500, self.slider_4.GetValue() + event.delta))
self.slider_4.SetValue(new)
self.scale.set_k(math.pow(10.,((self.slider_4.GetValue()-500.)/100.)))
if self.active_button == 9:
if self.PLAY_FROM_USRP == False:
if event.delta == -1:
self.src.seek(-1000000,gr.SEEK_CUR)
elif event.delta == 1:
self.src.seek(1000000,gr.SEEK_CUR)
# Powermate pressed to switch controlled function
def on_pmButton(self, event):
if event.value == 0:
if self.active_button == 5:
self.active_button = 6
self.button_5.SetValue(False)
self.button_6.SetValue(True)
elif self.active_button == 6:
self.active_button = 7
self.button_6.SetValue(False)
self.button_7.SetValue(True)
elif self.active_button == 7:
self.active_button = 8
self.button_7.SetValue(False)
self.button_8.SetValue(True)
elif self.active_button == 8:
self.active_button = 9
self.button_8.SetValue(False)
self.button_9.SetValue(True)
elif self.active_button == 9:
self.active_button = 5
self.button_9.SetValue(False)
self.button_5.SetValue(True)
# Clicking one PM control button turns the rest off
def on_button(self, event):
id = event.GetId()
if id == ID_BUTTON_5:
self.active_button = 5
self.button_6.SetValue(False)
self.button_7.SetValue(False)
self.button_8.SetValue(False)
self.button_9.SetValue(False)
if id == ID_BUTTON_6:
self.active_button = 6
self.button_5.SetValue(False)
self.button_7.SetValue(False)
self.button_8.SetValue(False)
self.button_9.SetValue(False)
if id == ID_BUTTON_7:
self.active_button = 7
self.button_5.SetValue(False)
self.button_6.SetValue(False)
self.button_8.SetValue(False)
self.button_9.SetValue(False)
if id == ID_BUTTON_8:
self.active_button = 8
self.button_5.SetValue(False)
self.button_6.SetValue(False)
self.button_7.SetValue(False)
self.button_9.SetValue(False)
if id == ID_BUTTON_9:
self.active_button = 9
self.button_5.SetValue(False)
self.button_6.SetValue(False)
self.button_7.SetValue(False)
self.button_8.SetValue(False)
# Make sure filter settings are legal
def set_filter(self, event):
slider = event.GetId()
slider1 = self.slider_fcutoff_hi.GetValue()
slider2 = self.slider_fcutoff_lo.GetValue()
if slider == ID_SLIDER_1:
if slider2 > (self.slider_fcutoff_hi.GetValue() - 200) :
self.slider_fcutoff_lo.SetValue(slider1 - 200)
elif slider == ID_SLIDER_2:
if slider1 < (self.slider_fcutoff_lo.GetValue() + 200) :
self.slider_fcutoff_hi.SetValue(slider2 + 200)
self.filter()
# Calculate taps and apply
def filter(self):
audio_coeffs = gr.firdes.complex_band_pass (
1.0, # gain
self.af_sample_rate, # sample rate
self.slider_fcutoff_lo.GetValue(), # low cutoff
self.slider_fcutoff_hi.GetValue(), # high cutoff
100, # transition
gr.firdes.WIN_HAMMING) # window
self.audio_filter.set_taps(audio_coeffs)
def set_lsb(self, event):
self.AM_mode = False
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset))
self.sel_sb.set_k(1)
self.sel_am.set_k(0)
self.slider_fcutoff_hi.SetValue(0)
self.slider_fcutoff_lo.SetValue(-3000)
self.filter()
def set_usb(self, event):
self.AM_mode = False
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset))
self.sel_sb.set_k(1)
self.sel_am.set_k(0)
self.slider_fcutoff_hi.SetValue(3000)
self.slider_fcutoff_lo.SetValue(0)
self.filter()
def set_am(self, event):
self.AM_mode = True
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset - 7.5e3))
self.sel_sb.set_k(0)
self.sel_am.set_k(1)
self.slider_fcutoff_hi.SetValue(12500)
self.slider_fcutoff_lo.SetValue(2500)
self.filter()
def set_cw(self, event):
self.AM_mode = False
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset))
self.AM_mode = False
self.sel_sb.set_k(1)
self.sel_am.set_k(0)
self.slider_fcutoff_hi.SetValue(-400)
self.slider_fcutoff_lo.SetValue(-800)
self.filter()
def set_volume(self, event):
self.scale.set_k(math.pow(10.,((self.slider_4.GetValue()-500.)/100.)))
def set_pga(self,event):
if self.PLAY_FROM_USRP:
self.src.set_gain(self.slider_5.GetValue())
def slide_tune(self, event):
self.frequency = (self.f_slider_scale * self.slider_3.GetValue()) + self.f_slider_offset
if self.AM_mode == False:
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset))
else:
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset - 7.5e3))
self.spin_ctrl_1.SetValue(self.frequency)
if self.button_12.GetValue():
self.auto_antenna_tune()
def spin_tune(self, event):
self.frequency = self.spin_ctrl_1.GetValue()
if self.AM_mode == False:
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset))
else:
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset - 7.5e3))
self.slider_3.SetValue(int((self.frequency-self.f_slider_offset)/self.f_slider_scale))
if self.button_12.GetValue():
self.auto_antenna_tune()
# Seek forwards in file
def fwd(self, event):
if self.PLAY_FROM_USRP == False:
self.src.seek(10000000,gr.SEEK_CUR)
# Seek backwards in file
def rew(self, event):
if self.PLAY_FROM_USRP == False:
self.src.seek(-10000000,gr.SEEK_CUR)
# Mouse over fft display - show frequency in tooltip
def Mouse(self,event):
if self.AM_mode:
fRel = ( event.GetX() - 330. ) / 14.266666 - 7.5
else:
fRel = ( event.GetX() - 330. ) / 14.266666
self.fft.win.SetToolTip(wx.ToolTip(eng_notation.num_to_str(self.frequency + (fRel*1e3))))
# Mouse clicked on fft display - change frequency
def Click(self,event):
fRel = ( event.GetX() - 330. ) / 14.266666
if self.AM_mode == False:
self.frequency = self.frequency + (fRel*1e3)
else:
self.frequency = self.frequency + (fRel*1e3) - 7.5e3
self.spin_ctrl_1.SetValue(int(self.frequency))
self.slider_3.SetValue(int((self.frequency-self.f_slider_offset)/self.f_slider_scale))
if self.AM_mode == False:
self.xlate.set_center_freq ( self.usrp_center - ( self.frequency - self.tune_offset ))
else:
self.xlate.set_center_freq( self.usrp_center - (self.frequency - self.tune_offset - 7.5e3))
# Set power of AM sync carrier
def am_carrier(self,event):
scale = math.pow(10,(self.slider_6.GetValue())/50.)
self.pll_carrier_scale.set_k(complex(scale,0))
# Reset AT data and start calibrate over
def AT_reset(self, event):
self.xdata = []
self.ydata = []
# Save AT setting for a particular frequency
def AT_calibrate(self, event):
self.xdata.append(float(self.frequency))
self.ydata.append(self.slider_7.GetValue())
if len(self.xdata) > 1:
self.m = []
self.b = []
for i in range(0,len(self.xdata)-1):
self.m.append( (self.ydata[i+1] - self.ydata[i]) / (self.xdata[i+1] - self.xdata[i]) )
self.b.append( self.ydata[i] - self.m[i] * self.xdata[i] )
# Lookup calibrated points and calculate interpolated antenna tune voltage.
# This is to automatically tune a narrowband loop antenna when the freq
# is changed, to keep signals peaked.
def auto_antenna_tune(self):
for i in range(0,len(self.xdata)-1):
if (self.frequency > self.xdata[i]) & (self.frequency < self.xdata[i+1]):
self.slider_7.SetValue(self.m[i]*self.frequency + self.b[i])
self.antenna_tune(0)
# Slider to set loop antenna capacitance
def antenna_tune(self, evt):
if self.PLAY_FROM_USRP:
dev = self.src.get_dboard_iface()
dev.write_aux_dac(uhd.dboard_iface.UNIT_RX,
uhd.dboard_iface.AUX_DAC_C,
float(self.slider_7.GetValue()))
# Timer events - check for web commands
def OnUpdate(self):
cmds = os.listdir("/var/www/cgi-bin/commands/")
if cmds!=[]:
if cmds[0]=='chfreq':
fd=open("/var/www/cgi-bin/commands/chfreq","r")
new=fd.readline()
fd.close()
if new!='':
os.unlink("/var/www/cgi-bin/commands/chfreq")
if ( int(new) >= self.f_lo ) & ( int(new) <= self.f_hi ):
self.frequency = int(new)
self.slider_3.SetValue(( self.frequency - self.f_slider_offset) / self.f_slider_scale )
self.spin_ctrl_1.SetValue(self.frequency)
if self.button_12.GetValue():
self.auto_antenna_tune()
if self.AM_mode:
self.xlate.set_center_freq ( self.usrp_center - ( self.frequency - self.tune_offset - 7.5e3 ))
else:
self.xlate.set_center_freq ( self.usrp_center - ( self.frequency - self.tune_offset ))
if cmds[0]=='chvolume':
fd=open("/var/www/cgi-bin/commands/chvolume","r")
new=fd.readline()
fd.close()
if new!='':
os.unlink("/var/www/cgi-bin/commands/chvolume")
if ( int(new) >= 0 ) & ( int(new) <= 500 ):
self.volume = int(new)
self.slider_4.SetValue(self.volume)
self.scale.set_k(math.pow(10.,((self.slider_4.GetValue()-500.)/100.)))
else: # no new web commands, update state
fh = open("/var/www/cgi-bin/state/freq","w")
fh.write(str(int(self.frequency))+'\n')
fh.close()
fh = open("/var/www/cgi-bin/state/volume","w")
fh.write(str(self.slider_4.GetValue())+'\n')
fh.close()
# end of class MyFrame
# wx.Timer to check for web updates
class UpdateTimer(wx.Timer):
def __init__(self, target, dur=1000):
wx.Timer.__init__(self)
self.target = target
self.Start(dur)
def Notify(self):
"""Called every timer interval"""
if self.target:
self.target.OnUpdate()
class MyApp(wx.App):
def OnInit(self):
frame = MyFrame(None, -1, "HF Explorer")
frame.Show(True)
self.SetTopWindow(frame)
return True
if __name__ == "__main__":
app = MyApp(0)
app.MainLoop()
| gpl-3.0 |
alaunay/bigtop | bigtop-packages/src/charm/giraph/layer-giraph/tests/01-basic-deployment.py | 7 | 1194 | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import amulet
class TestDeploy(unittest.TestCase):
"""
Trivial deployment test for Giraph.
"""
@classmethod
def setUpClass(cls):
cls.d = amulet.Deployment(series='xenial')
cls.d.add('giraph', 'cs:xenial/giraph')
cls.d.setup(timeout=900)
cls.d.sentry.wait(timeout=1800)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
qma/pants | tests/python/pants_test/backend/python/tasks/test_python_repl.py | 3 | 6237 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import sys
from contextlib import contextmanager
from textwrap import dedent
from pants.backend.core.tasks.repl_task_mixin import ReplTaskMixin
from pants.backend.python.tasks.python_repl import PythonRepl
from pants.base.exceptions import TaskError
from pants.build_graph.address import Address
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.build_graph.target import Target
from pants.util.contextutil import temporary_dir
from pants_test.backend.python.tasks.python_task_test_base import PythonTaskTestBase
class PythonReplTest(PythonTaskTestBase):
@classmethod
def task_type(cls):
return PythonRepl
class JvmTarget(Target):
pass
@property
def alias_groups(self):
return super(PythonReplTest, self).alias_groups.merge(
BuildFileAliases(targets={'jvm_target': self.JvmTarget}))
def create_non_python_target(self, relpath, name):
self.create_file(relpath=self.build_path(relpath), contents=dedent("""
jvm_target(
name='{name}',
)
""").format(name=name))
return self.target(Address(relpath, name).spec)
def setUp(self):
super(PythonReplTest, self).setUp()
self.six = self.create_python_requirement_library('3rdparty/python/six', 'six',
requirements=['six==1.9.0'])
self.requests = self.create_python_requirement_library('3rdparty/python/requests', 'requests',
requirements=['requests==2.6.0'])
self.library = self.create_python_library('src/python/lib', 'lib', {'lib.py': dedent("""
import six
def go():
six.print_('go', 'go', 'go!', sep='')
""")}, dependencies=['//3rdparty/python/six'])
self.binary = self.create_python_binary('src/python/bin', 'bin', 'lib.go',
dependencies=['//src/python/lib'])
self.non_python_target = self.create_non_python_target('src/python/java', 'java')
def tearDown(self):
super(PythonReplTest, self).tearDown()
ReplTaskMixin.reset_implementations()
@contextmanager
def new_io(self, input):
orig_stdin, orig_stdout, orig_stderr = sys.stdin, sys.stdout, sys.stderr
with temporary_dir() as iodir:
stdin = os.path.join(iodir, 'stdin')
stdout = os.path.join(iodir, 'stdout')
stderr = os.path.join(iodir, 'stderr')
with open(stdin, 'w') as fp:
fp.write(input)
with open(stdin, 'rb') as inp, open(stdout, 'wb') as out, open(stderr, 'wb') as err:
sys.stdin, sys.stdout, sys.stderr = inp, out, err
try:
yield inp, out, err
finally:
sys.stdin, sys.stdout, sys.stderr = orig_stdin, orig_stdout, orig_stderr
def do_test_repl(self, code, expected, targets, options=None):
if options:
self.set_options(**options)
class JvmRepl(ReplTaskMixin):
options_scope = 'test_scope_jvm_repl'
@classmethod
def select_targets(cls, target):
return isinstance(target, self.JvmTarget)
def setup_repl_session(_, targets):
raise AssertionError()
def launch_repl(_, session_setup):
raise AssertionError()
# Add a competing REPL impl.
JvmRepl.prepare(self.options, round_manager=None)
python_repl = self.create_task(self.context(target_roots=targets))
original_launcher = python_repl.launch_repl
with self.new_io('\n'.join(code)) as (inp, out, err):
def custom_io_patched_launcher(pex):
return original_launcher(pex, stdin=inp, stdout=out, stderr=err)
python_repl.launch_repl = custom_io_patched_launcher
python_repl.execute()
with open(out.name) as fp:
lines = fp.read()
if not expected:
self.assertEqual('', lines)
else:
for expectation in expected:
self.assertIn(expectation, lines)
def do_test_library(self, *targets):
self.do_test_repl(code=['from lib.lib import go',
'go()'],
expected=['gogogo!'],
targets=targets)
def test_library(self):
self.do_test_library(self.library)
def test_binary(self):
self.do_test_library(self.binary)
def test_requirement(self):
self.do_test_repl(code=['import six',
'print("python 2?:{}".format(six.PY2))'],
expected=['python 2?:True'],
targets=[self.six])
def test_mixed_python(self):
self.do_test_repl(code=['import requests',
'import six',
'from lib.lib import go',
'print("teapot response code is: {}".format(requests.codes.teapot))',
'go()',
'print("python 2?:{}".format(six.PY2))'],
expected=['teapot response code is: 418',
'gogogo!',
'python 2?:True'],
targets=[self.requests, self.binary])
def test_disallowed_mix(self):
with self.assertRaises(TaskError):
self.do_test_repl(code=['print("unreachable")'],
expected=[],
targets=[self.library, self.non_python_target])
def test_non_python_targets(self):
self.do_test_repl(code=['import java.lang.unreachable'],
expected=[''],
targets=[self.non_python_target])
def test_ipython(self):
# IPython supports shelling out with a leading !, so indirectly test its presence by reading
# the head of this very file.
with open(__file__) as fp:
me = fp.readline()
self.do_test_repl(code=['!head -1 {}'.format(__file__)],
expected=[me],
targets=[self.six], # Just to get the repl to pop up.
options={'ipython': True})
| apache-2.0 |
gangadharkadam/vlinkfrappe | frappe/build.py | 12 | 5396 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from frappe.utils.minify import JavascriptMinify
"""
Build the `public` folders and setup languages
"""
import os, frappe, json, shutil, re
# from cssmin import cssmin
app_paths = None
def setup():
global app_paths
pymodules = []
for app in frappe.get_all_apps(True):
try:
pymodules.append(frappe.get_module(app))
except ImportError: pass
app_paths = [os.path.dirname(pymodule.__file__) for pymodule in pymodules]
def bundle(no_compress, make_copy=False, verbose=False):
"""concat / minify js files"""
# build js files
setup()
make_asset_dirs(make_copy=make_copy)
build(no_compress, verbose)
def watch(no_compress):
"""watch and rebuild if necessary"""
setup()
import time
compile_less()
build(no_compress=True)
while True:
compile_less()
if files_dirty():
build(no_compress=True)
time.sleep(3)
def make_asset_dirs(make_copy=False):
assets_path = os.path.join(frappe.local.sites_path, "assets")
for dir_path in [
os.path.join(assets_path, 'js'),
os.path.join(assets_path, 'css')]:
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# symlink app/public > assets/app
for app_name in frappe.get_all_apps(True):
pymodule = frappe.get_module(app_name)
source = os.path.join(os.path.abspath(os.path.dirname(pymodule.__file__)), 'public')
target = os.path.join(assets_path, app_name)
if not os.path.exists(target) and os.path.exists(source):
if make_copy:
shutil.copytree(os.path.abspath(source), target)
else:
os.symlink(os.path.abspath(source), target)
def build(no_compress=False, verbose=False):
assets_path = os.path.join(frappe.local.sites_path, "assets")
for target, sources in get_build_maps().iteritems():
pack(os.path.join(assets_path, target), sources, no_compress, verbose)
shutil.copy(os.path.join(os.path.dirname(os.path.abspath(frappe.__file__)), 'data', 'languages.txt'), frappe.local.sites_path)
# reset_app_html()
def get_build_maps():
"""get all build.jsons with absolute paths"""
# framework js and css files
build_maps = {}
for app_path in app_paths:
path = os.path.join(app_path, 'public', 'build.json')
if os.path.exists(path):
with open(path) as f:
try:
for target, sources in json.loads(f.read()).iteritems():
# update app path
source_paths = []
for source in sources:
if isinstance(source, list):
s = frappe.get_pymodule_path(source[0], *source[1].split("/"))
else:
s = os.path.join(app_path, source)
source_paths.append(s)
build_maps[target] = source_paths
except Exception:
print path
raise
return build_maps
timestamps = {}
def pack(target, sources, no_compress, verbose):
from cStringIO import StringIO
outtype, outtxt = target.split(".")[-1], ''
jsm = JavascriptMinify()
for f in sources:
suffix = None
if ':' in f: f, suffix = f.split(':')
if not os.path.exists(f) or os.path.isdir(f):
print "did not find " + f
continue
timestamps[f] = os.path.getmtime(f)
try:
with open(f, 'r') as sourcefile:
data = unicode(sourcefile.read(), 'utf-8', errors='ignore')
extn = f.rsplit(".", 1)[1]
if outtype=="js" and extn=="js" and (not no_compress) and suffix!="concat" and (".min." not in f):
tmpin, tmpout = StringIO(data.encode('utf-8')), StringIO()
jsm.minify(tmpin, tmpout)
minified = tmpout.getvalue()
outtxt += unicode(minified or '', 'utf-8').strip('\n') + ';'
if verbose:
print "{0}: {1}k".format(f, int(len(minified) / 1024))
elif outtype=="js" and extn=="html":
# add to frappe.templates
outtxt += html_to_js_template(f, data)
else:
outtxt += ('\n/*\n *\t%s\n */' % f)
outtxt += '\n' + data + '\n'
except Exception:
print "--Error in:" + f + "--"
print frappe.get_traceback()
if not no_compress and outtype == 'css':
pass
#outtxt = cssmin(outtxt)
with open(target, 'w') as f:
f.write(outtxt.encode("utf-8"))
print "Wrote %s - %sk" % (target, str(int(os.path.getsize(target)/1024)))
def html_to_js_template(path, content):
# remove whitespace to a single space
content = re.sub("\s+", " ", content)
# strip comments
content = re.sub("(<!--.*?-->)", "", content)
return """frappe.templates["{key}"] = '{content}';\n""".format(\
key=path.rsplit("/", 1)[-1][:-5], content=content.replace("'", "\'"))
def files_dirty():
for target, sources in get_build_maps().iteritems():
for f in sources:
if ':' in f: f, suffix = f.split(':')
if not os.path.exists(f) or os.path.isdir(f): continue
if os.path.getmtime(f) != timestamps.get(f):
print f + ' dirty'
return True
else:
return False
def compile_less():
for path in app_paths:
less_path = os.path.join(path, "public", "less")
if os.path.exists(less_path):
for fname in os.listdir(less_path):
if fname.endswith(".less") and fname != "variables.less":
fpath = os.path.join(less_path, fname)
mtime = os.path.getmtime(fpath)
if fpath in timestamps and mtime == timestamps[fpath]:
continue
timestamps[fpath] = mtime
print "compiling {0}".format(fpath)
css_path = os.path.join(path, "public", "css", fname.rsplit(".", 1)[0] + ".css")
os.system("which lessc && lessc {0} > {1}".format(fpath, css_path))
| mit |
materialsproject/pymatgen | pymatgen/apps/battery/tests/test_insertion_battery.py | 5 | 7072 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import json
import os
import unittest
from monty.json import MontyDecoder, MontyEncoder
from pymatgen.apps.battery.insertion_battery import InsertionElectrode, InsertionVoltagePair
from pymatgen.entries.computed_entries import ComputedEntry
from pymatgen.util.testing import PymatgenTest
class InsertionElectrodeTest(unittest.TestCase):
def setUp(self):
self.entry_Li = ComputedEntry("Li", -1.90753119)
self.entry_Ca = ComputedEntry("Ca", -1.99689568)
with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "LiTiO2_batt.json"), "r") as f:
self.entries_LTO = json.load(f, cls=MontyDecoder)
with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "MgVO_batt.json"), "r") as file:
self.entries_MVO = json.load(file, cls=MontyDecoder)
with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "Mg_batt.json"), "r") as file:
self.entry_Mg = json.load(file, cls=MontyDecoder)
with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "CaMoO2_batt.json"), "r") as f:
self.entries_CMO = json.load(f, cls=MontyDecoder)
self.ie_LTO = InsertionElectrode.from_entries(self.entries_LTO, self.entry_Li)
self.ie_MVO = InsertionElectrode.from_entries(self.entries_MVO, self.entry_Mg)
self.ie_CMO = InsertionElectrode.from_entries(self.entries_CMO, self.entry_Ca)
def test_voltage(self):
# test basic voltage
self.assertAlmostEqual(self.ie_LTO.max_voltage, 2.78583901, 3)
self.assertAlmostEqual(self.ie_LTO.min_voltage, 0.89702381, 3)
self.assertAlmostEqual(self.ie_LTO.get_average_voltage(), 1.84143141, 3)
# test voltage range selectors
self.assertAlmostEqual(self.ie_LTO.get_average_voltage(0, 1), 0.89702381, 3)
self.assertAlmostEqual(self.ie_LTO.get_average_voltage(2, 3), 2.78583901, 3)
# test non-existing voltage range
self.assertAlmostEqual(self.ie_LTO.get_average_voltage(0, 0.1), 0, 3)
self.assertAlmostEqual(self.ie_LTO.get_average_voltage(4, 5), 0, 3)
self.assertAlmostEqual(self.ie_MVO.get_average_voltage(), 2.513767, 3)
def test_capacities(self):
# test basic capacity
self.assertAlmostEqual(self.ie_LTO.get_capacity_grav(), 308.74865045, 3)
self.assertAlmostEqual(self.ie_LTO.get_capacity_vol(), 1205.99391136, 3)
# test capacity selector
self.assertAlmostEqual(self.ie_LTO.get_capacity_grav(1, 3), 154.374325225, 3)
# test alternate normalization option
self.assertAlmostEqual(self.ie_LTO.get_capacity_grav(1, 3, False), 160.803169506, 3)
self.assertIsNotNone(self.ie_LTO.get_summary_dict(True))
self.assertAlmostEqual(self.ie_MVO.get_capacity_grav(), 281.845548242, 3)
self.assertAlmostEqual(self.ie_MVO.get_capacity_vol(), 1145.80087994, 3)
def test_get_instability(self):
self.assertIsNone(self.ie_LTO.get_max_instability())
self.assertAlmostEqual(self.ie_MVO.get_max_instability(), 0.7233711650000014)
self.assertAlmostEqual(self.ie_MVO.get_min_instability(), 0.4913575099999994)
def test_get_muO2(self):
self.assertIsNone(self.ie_LTO.get_max_muO2())
self.assertAlmostEqual(self.ie_MVO.get_max_muO2(), -4.93552791875)
self.assertAlmostEqual(self.ie_MVO.get_min_muO2(), -11.06599657)
def test_entries(self):
# test that the proper number of sub-electrodes are returned
self.assertEqual(len(self.ie_LTO.get_sub_electrodes(False, True)), 3)
self.assertEqual(len(self.ie_LTO.get_sub_electrodes(True, True)), 2)
def test_get_all_entries(self):
self.ie_LTO.get_all_entries()
def test_to_from_dict(self):
d = self.ie_LTO.as_dict()
ie = InsertionElectrode.from_dict(d)
self.assertAlmostEqual(ie.max_voltage, 2.78583901, 3)
self.assertAlmostEqual(ie.min_voltage, 0.89702381, 3)
self.assertAlmostEqual(ie.get_average_voltage(), 1.84143141, 3)
# Just to make sure json string works.
json_str = json.dumps(self.ie_LTO, cls=MontyEncoder)
ie = json.loads(json_str, cls=MontyDecoder)
self.assertAlmostEqual(ie.max_voltage, 2.78583901, 3)
self.assertAlmostEqual(ie.min_voltage, 0.89702381, 3)
self.assertAlmostEqual(ie.get_average_voltage(), 1.84143141, 3)
def test_voltage_pair(self):
vpair = self.ie_LTO[0]
self.assertAlmostEqual(vpair.voltage, 2.78583901)
self.assertAlmostEqual(vpair.mAh, 13400.7411749, 2)
self.assertAlmostEqual(vpair.mass_charge, 79.8658)
self.assertAlmostEqual(vpair.mass_discharge, 83.3363)
self.assertAlmostEqual(vpair.vol_charge, 37.553684467)
self.assertAlmostEqual(vpair.vol_discharge, 37.917719932)
self.assertAlmostEqual(vpair.frac_charge, 0.0)
self.assertAlmostEqual(vpair.frac_discharge, 0.14285714285714285)
self.assertAlmostEqual(vpair.x_charge, 0.0)
self.assertAlmostEqual(vpair.x_discharge, 0.5)
# reconstruct the voltage pair
dd = vpair.as_dict()
vv = InsertionVoltagePair.from_dict(dd)
self.assertAlmostEqual(vv.entry_charge.energy, -105.53608265)
self.assertAlmostEqual(vv.voltage, 2.78583901)
def test_get_summary_dict(self):
d = self.ie_CMO.get_summary_dict()
self.assertAlmostEqual(d["stability_charge"], 0.2346574583333325)
self.assertAlmostEqual(d["stability_discharge"], 0.33379544031249786)
self.assertAlmostEqual(d["muO2_data"]["mp-714969"][0]["chempot"], -4.93552791875)
self.assertAlmostEqual(d["adj_pairs"][0]["muO2_data"]["mp-714969"][0]["chempot"], -4.93552791875)
self.assertAlmostEqual(d["framework_formula"], "MoO2")
self.assertAlmostEqual(d["adj_pairs"][1]["framework_formula"], "MoO2")
def test_init_no_structure(self):
def remove_structure(entries):
ents = []
for ient in entries:
dd = ient.as_dict()
ent = ComputedEntry.from_dict(dd)
ent.data["volume"] = ient.structure.volume
ents.append(ent)
return ents
ie_CMO_no_struct = InsertionElectrode.from_entries(remove_structure(self.entries_CMO), self.entry_Ca)
d = ie_CMO_no_struct.get_summary_dict()
self.assertAlmostEqual(d["stability_charge"], 0.2346574583333325)
self.assertAlmostEqual(d["stability_discharge"], 0.33379544031249786)
self.assertAlmostEqual(d["muO2_data"]["mp-714969"][0]["chempot"], -4.93552791875)
ie_LTO_no_struct = InsertionElectrode.from_entries(self.entries_LTO, self.entry_Li, strip_structures=True)
vols_no_struct = [ient.data["volume"] for ient in ie_LTO_no_struct.get_all_entries()]
vols_struct = [ient.structure.volume for ient in self.ie_LTO.get_all_entries()]
self.assertAlmostEqual(vols_no_struct, vols_struct)
if __name__ == "__main__":
unittest.main()
| mit |
r-mart/scikit-learn | sklearn/feature_extraction/text.py | 110 | 50157 | # -*- coding: utf-8 -*-
# Authors: Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
# Lars Buitinck <L.J.Buitinck@uva.nl>
# Robert Layton <robertlayton@gmail.com>
# Jochen Wersdörfer <jochen@wersdoerfer.de>
# Roman Sinayev <roman.sinayev@gmail.com>
#
# License: BSD 3 clause
"""
The :mod:`sklearn.feature_extraction.text` submodule gathers utilities to
build feature vectors from text documents.
"""
from __future__ import unicode_literals
import array
from collections import Mapping, defaultdict
import numbers
from operator import itemgetter
import re
import unicodedata
import numpy as np
import scipy.sparse as sp
from ..base import BaseEstimator, TransformerMixin
from ..externals import six
from ..externals.six.moves import xrange
from ..preprocessing import normalize
from .hashing import FeatureHasher
from .stop_words import ENGLISH_STOP_WORDS
from ..utils import deprecated
from ..utils.fixes import frombuffer_empty, bincount
from ..utils.validation import check_is_fitted
__all__ = ['CountVectorizer',
'ENGLISH_STOP_WORDS',
'TfidfTransformer',
'TfidfVectorizer',
'strip_accents_ascii',
'strip_accents_unicode',
'strip_tags']
def strip_accents_unicode(s):
"""Transform accentuated unicode symbols into their simple counterpart
Warning: the python-level loop and join operations make this
implementation 20 times slower than the strip_accents_ascii basic
normalization.
See also
--------
strip_accents_ascii
Remove accentuated char for any unicode symbol that has a direct
ASCII equivalent.
"""
return ''.join([c for c in unicodedata.normalize('NFKD', s)
if not unicodedata.combining(c)])
def strip_accents_ascii(s):
"""Transform accentuated unicode symbols into ascii or nothing
Warning: this solution is only suited for languages that have a direct
transliteration to ASCII symbols.
See also
--------
strip_accents_unicode
Remove accentuated char for any unicode symbol.
"""
nkfd_form = unicodedata.normalize('NFKD', s)
return nkfd_form.encode('ASCII', 'ignore').decode('ASCII')
def strip_tags(s):
"""Basic regexp based HTML / XML tag stripper function
For serious HTML/XML preprocessing you should rather use an external
library such as lxml or BeautifulSoup.
"""
return re.compile(r"<([^>]+)>", flags=re.UNICODE).sub(" ", s)
def _check_stop_list(stop):
if stop == "english":
return ENGLISH_STOP_WORDS
elif isinstance(stop, six.string_types):
raise ValueError("not a built-in stop list: %s" % stop)
elif stop is None:
return None
else: # assume it's a collection
return frozenset(stop)
class VectorizerMixin(object):
"""Provides common code for text vectorizers (tokenization logic)."""
_white_spaces = re.compile(r"\s\s+")
def decode(self, doc):
"""Decode the input into a string of unicode symbols
The decoding strategy depends on the vectorizer parameters.
"""
if self.input == 'filename':
with open(doc, 'rb') as fh:
doc = fh.read()
elif self.input == 'file':
doc = doc.read()
if isinstance(doc, bytes):
doc = doc.decode(self.encoding, self.decode_error)
if doc is np.nan:
raise ValueError("np.nan is an invalid document, expected byte or "
"unicode string.")
return doc
def _word_ngrams(self, tokens, stop_words=None):
"""Turn tokens into a sequence of n-grams after stop words filtering"""
# handle stop words
if stop_words is not None:
tokens = [w for w in tokens if w not in stop_words]
# handle token n-grams
min_n, max_n = self.ngram_range
if max_n != 1:
original_tokens = tokens
tokens = []
n_original_tokens = len(original_tokens)
for n in xrange(min_n,
min(max_n + 1, n_original_tokens + 1)):
for i in xrange(n_original_tokens - n + 1):
tokens.append(" ".join(original_tokens[i: i + n]))
return tokens
def _char_ngrams(self, text_document):
"""Tokenize text_document into a sequence of character n-grams"""
# normalize white spaces
text_document = self._white_spaces.sub(" ", text_document)
text_len = len(text_document)
ngrams = []
min_n, max_n = self.ngram_range
for n in xrange(min_n, min(max_n + 1, text_len + 1)):
for i in xrange(text_len - n + 1):
ngrams.append(text_document[i: i + n])
return ngrams
def _char_wb_ngrams(self, text_document):
"""Whitespace sensitive char-n-gram tokenization.
Tokenize text_document into a sequence of character n-grams
excluding any whitespace (operating only inside word boundaries)"""
# normalize white spaces
text_document = self._white_spaces.sub(" ", text_document)
min_n, max_n = self.ngram_range
ngrams = []
for w in text_document.split():
w = ' ' + w + ' '
w_len = len(w)
for n in xrange(min_n, max_n + 1):
offset = 0
ngrams.append(w[offset:offset + n])
while offset + n < w_len:
offset += 1
ngrams.append(w[offset:offset + n])
if offset == 0: # count a short word (w_len < n) only once
break
return ngrams
def build_preprocessor(self):
"""Return a function to preprocess the text before tokenization"""
if self.preprocessor is not None:
return self.preprocessor
# unfortunately python functools package does not have an efficient
# `compose` function that would have allowed us to chain a dynamic
# number of functions. However the cost of a lambda call is a few
# hundreds of nanoseconds which is negligible when compared to the
# cost of tokenizing a string of 1000 chars for instance.
noop = lambda x: x
# accent stripping
if not self.strip_accents:
strip_accents = noop
elif callable(self.strip_accents):
strip_accents = self.strip_accents
elif self.strip_accents == 'ascii':
strip_accents = strip_accents_ascii
elif self.strip_accents == 'unicode':
strip_accents = strip_accents_unicode
else:
raise ValueError('Invalid value for "strip_accents": %s' %
self.strip_accents)
if self.lowercase:
return lambda x: strip_accents(x.lower())
else:
return strip_accents
def build_tokenizer(self):
"""Return a function that splits a string into a sequence of tokens"""
if self.tokenizer is not None:
return self.tokenizer
token_pattern = re.compile(self.token_pattern)
return lambda doc: token_pattern.findall(doc)
def get_stop_words(self):
"""Build or fetch the effective stop words list"""
return _check_stop_list(self.stop_words)
def build_analyzer(self):
"""Return a callable that handles preprocessing and tokenization"""
if callable(self.analyzer):
return self.analyzer
preprocess = self.build_preprocessor()
if self.analyzer == 'char':
return lambda doc: self._char_ngrams(preprocess(self.decode(doc)))
elif self.analyzer == 'char_wb':
return lambda doc: self._char_wb_ngrams(
preprocess(self.decode(doc)))
elif self.analyzer == 'word':
stop_words = self.get_stop_words()
tokenize = self.build_tokenizer()
return lambda doc: self._word_ngrams(
tokenize(preprocess(self.decode(doc))), stop_words)
else:
raise ValueError('%s is not a valid tokenization scheme/analyzer' %
self.analyzer)
def _validate_vocabulary(self):
vocabulary = self.vocabulary
if vocabulary is not None:
if not isinstance(vocabulary, Mapping):
vocab = {}
for i, t in enumerate(vocabulary):
if vocab.setdefault(t, i) != i:
msg = "Duplicate term in vocabulary: %r" % t
raise ValueError(msg)
vocabulary = vocab
else:
indices = set(six.itervalues(vocabulary))
if len(indices) != len(vocabulary):
raise ValueError("Vocabulary contains repeated indices.")
for i in xrange(len(vocabulary)):
if i not in indices:
msg = ("Vocabulary of size %d doesn't contain index "
"%d." % (len(vocabulary), i))
raise ValueError(msg)
if not vocabulary:
raise ValueError("empty vocabulary passed to fit")
self.fixed_vocabulary_ = True
self.vocabulary_ = dict(vocabulary)
else:
self.fixed_vocabulary_ = False
def _check_vocabulary(self):
"""Check if vocabulary is empty or missing (not fit-ed)"""
msg = "%(name)s - Vocabulary wasn't fitted."
check_is_fitted(self, 'vocabulary_', msg=msg),
if len(self.vocabulary_) == 0:
raise ValueError("Vocabulary is empty")
@property
@deprecated("The `fixed_vocabulary` attribute is deprecated and will be "
"removed in 0.18. Please use `fixed_vocabulary_` instead.")
def fixed_vocabulary(self):
return self.fixed_vocabulary_
class HashingVectorizer(BaseEstimator, VectorizerMixin):
"""Convert a collection of text documents to a matrix of token occurrences
It turns a collection of text documents into a scipy.sparse matrix holding
token occurrence counts (or binary occurrence information), possibly
normalized as token frequencies if norm='l1' or projected on the euclidean
unit sphere if norm='l2'.
This text vectorizer implementation uses the hashing trick to find the
token string name to feature integer index mapping.
This strategy has several advantages:
- it is very low memory scalable to large datasets as there is no need to
store a vocabulary dictionary in memory
- it is fast to pickle and un-pickle as it holds no state besides the
constructor parameters
- it can be used in a streaming (partial fit) or parallel pipeline as there
is no state computed during fit.
There are also a couple of cons (vs using a CountVectorizer with an
in-memory vocabulary):
- there is no way to compute the inverse transform (from feature indices to
string feature names) which can be a problem when trying to introspect
which features are most important to a model.
- there can be collisions: distinct tokens can be mapped to the same
feature index. However in practice this is rarely an issue if n_features
is large enough (e.g. 2 ** 18 for text classification problems).
- no IDF weighting as this would render the transformer stateful.
The hash function employed is the signed 32-bit version of Murmurhash3.
Read more in the :ref:`User Guide <text_feature_extraction>`.
Parameters
----------
input : string {'filename', 'file', 'content'}
If 'filename', the sequence passed as an argument to fit is
expected to be a list of filenames that need reading to fetch
the raw content to analyze.
If 'file', the sequence items must have a 'read' method (file-like
object) that is called to fetch the bytes in memory.
Otherwise the input is expected to be the sequence strings or
bytes items are expected to be analyzed directly.
encoding : string, default='utf-8'
If bytes or files are given to analyze, this encoding is used to
decode.
decode_error : {'strict', 'ignore', 'replace'}
Instruction on what to do if a byte sequence is given to analyze that
contains characters not of the given `encoding`. By default, it is
'strict', meaning that a UnicodeDecodeError will be raised. Other
values are 'ignore' and 'replace'.
strip_accents : {'ascii', 'unicode', None}
Remove accents during the preprocessing step.
'ascii' is a fast method that only works on characters that have
an direct ASCII mapping.
'unicode' is a slightly slower method that works on any characters.
None (default) does nothing.
analyzer : string, {'word', 'char', 'char_wb'} or callable
Whether the feature should be made of word or character n-grams.
Option 'char_wb' creates character n-grams only from text inside
word boundaries.
If a callable is passed it is used to extract the sequence of features
out of the raw, unprocessed input.
preprocessor : callable or None (default)
Override the preprocessing (string transformation) stage while
preserving the tokenizing and n-grams generation steps.
tokenizer : callable or None (default)
Override the string tokenization step while preserving the
preprocessing and n-grams generation steps.
Only applies if ``analyzer == 'word'``.
ngram_range : tuple (min_n, max_n), default=(1, 1)
The lower and upper boundary of the range of n-values for different
n-grams to be extracted. All values of n such that min_n <= n <= max_n
will be used.
stop_words : string {'english'}, list, or None (default)
If 'english', a built-in stop word list for English is used.
If a list, that list is assumed to contain stop words, all of which
will be removed from the resulting tokens.
Only applies if ``analyzer == 'word'``.
lowercase : boolean, default=True
Convert all characters to lowercase before tokenizing.
token_pattern : string
Regular expression denoting what constitutes a "token", only used
if ``analyzer == 'word'``. The default regexp selects tokens of 2
or more alphanumeric characters (punctuation is completely ignored
and always treated as a token separator).
n_features : integer, default=(2 ** 20)
The number of features (columns) in the output matrices. Small numbers
of features are likely to cause hash collisions, but large numbers
will cause larger coefficient dimensions in linear learners.
norm : 'l1', 'l2' or None, optional
Norm used to normalize term vectors. None for no normalization.
binary: boolean, default=False.
If True, all non zero counts are set to 1. This is useful for discrete
probabilistic models that model binary events rather than integer
counts.
dtype: type, optional
Type of the matrix returned by fit_transform() or transform().
non_negative : boolean, default=False
Whether output matrices should contain non-negative values only;
effectively calls abs on the matrix prior to returning it.
When True, output values can be interpreted as frequencies.
When False, output values will have expected value zero.
See also
--------
CountVectorizer, TfidfVectorizer
"""
def __init__(self, input='content', encoding='utf-8',
decode_error='strict', strip_accents=None,
lowercase=True, preprocessor=None, tokenizer=None,
stop_words=None, token_pattern=r"(?u)\b\w\w+\b",
ngram_range=(1, 1), analyzer='word', n_features=(2 ** 20),
binary=False, norm='l2', non_negative=False,
dtype=np.float64):
self.input = input
self.encoding = encoding
self.decode_error = decode_error
self.strip_accents = strip_accents
self.preprocessor = preprocessor
self.tokenizer = tokenizer
self.analyzer = analyzer
self.lowercase = lowercase
self.token_pattern = token_pattern
self.stop_words = stop_words
self.n_features = n_features
self.ngram_range = ngram_range
self.binary = binary
self.norm = norm
self.non_negative = non_negative
self.dtype = dtype
def partial_fit(self, X, y=None):
"""Does nothing: this transformer is stateless.
This method is just there to mark the fact that this transformer
can work in a streaming setup.
"""
return self
def fit(self, X, y=None):
"""Does nothing: this transformer is stateless."""
# triggers a parameter validation
self._get_hasher().fit(X, y=y)
return self
def transform(self, X, y=None):
"""Transform a sequence of documents to a document-term matrix.
Parameters
----------
X : iterable over raw text documents, length = n_samples
Samples. Each sample must be a text document (either bytes or
unicode strings, file name or file object depending on the
constructor argument) which will be tokenized and hashed.
y : (ignored)
Returns
-------
X : scipy.sparse matrix, shape = (n_samples, self.n_features)
Document-term matrix.
"""
analyzer = self.build_analyzer()
X = self._get_hasher().transform(analyzer(doc) for doc in X)
if self.binary:
X.data.fill(1)
if self.norm is not None:
X = normalize(X, norm=self.norm, copy=False)
return X
# Alias transform to fit_transform for convenience
fit_transform = transform
def _get_hasher(self):
return FeatureHasher(n_features=self.n_features,
input_type='string', dtype=self.dtype,
non_negative=self.non_negative)
def _document_frequency(X):
"""Count the number of non-zero values for each feature in sparse X."""
if sp.isspmatrix_csr(X):
return bincount(X.indices, minlength=X.shape[1])
else:
return np.diff(sp.csc_matrix(X, copy=False).indptr)
class CountVectorizer(BaseEstimator, VectorizerMixin):
"""Convert a collection of text documents to a matrix of token counts
This implementation produces a sparse representation of the counts using
scipy.sparse.coo_matrix.
If you do not provide an a-priori dictionary and you do not use an analyzer
that does some kind of feature selection then the number of features will
be equal to the vocabulary size found by analyzing the data.
Read more in the :ref:`User Guide <text_feature_extraction>`.
Parameters
----------
input : string {'filename', 'file', 'content'}
If 'filename', the sequence passed as an argument to fit is
expected to be a list of filenames that need reading to fetch
the raw content to analyze.
If 'file', the sequence items must have a 'read' method (file-like
object) that is called to fetch the bytes in memory.
Otherwise the input is expected to be the sequence strings or
bytes items are expected to be analyzed directly.
encoding : string, 'utf-8' by default.
If bytes or files are given to analyze, this encoding is used to
decode.
decode_error : {'strict', 'ignore', 'replace'}
Instruction on what to do if a byte sequence is given to analyze that
contains characters not of the given `encoding`. By default, it is
'strict', meaning that a UnicodeDecodeError will be raised. Other
values are 'ignore' and 'replace'.
strip_accents : {'ascii', 'unicode', None}
Remove accents during the preprocessing step.
'ascii' is a fast method that only works on characters that have
an direct ASCII mapping.
'unicode' is a slightly slower method that works on any characters.
None (default) does nothing.
analyzer : string, {'word', 'char', 'char_wb'} or callable
Whether the feature should be made of word or character n-grams.
Option 'char_wb' creates character n-grams only from text inside
word boundaries.
If a callable is passed it is used to extract the sequence of features
out of the raw, unprocessed input.
Only applies if ``analyzer == 'word'``.
preprocessor : callable or None (default)
Override the preprocessing (string transformation) stage while
preserving the tokenizing and n-grams generation steps.
tokenizer : callable or None (default)
Override the string tokenization step while preserving the
preprocessing and n-grams generation steps.
Only applies if ``analyzer == 'word'``.
ngram_range : tuple (min_n, max_n)
The lower and upper boundary of the range of n-values for different
n-grams to be extracted. All values of n such that min_n <= n <= max_n
will be used.
stop_words : string {'english'}, list, or None (default)
If 'english', a built-in stop word list for English is used.
If a list, that list is assumed to contain stop words, all of which
will be removed from the resulting tokens.
Only applies if ``analyzer == 'word'``.
If None, no stop words will be used. max_df can be set to a value
in the range [0.7, 1.0) to automatically detect and filter stop
words based on intra corpus document frequency of terms.
lowercase : boolean, True by default
Convert all characters to lowercase before tokenizing.
token_pattern : string
Regular expression denoting what constitutes a "token", only used
if ``analyzer == 'word'``. The default regexp select tokens of 2
or more alphanumeric characters (punctuation is completely ignored
and always treated as a token separator).
max_df : float in range [0.0, 1.0] or int, default=1.0
When building the vocabulary ignore terms that have a document
frequency strictly higher than the given threshold (corpus-specific
stop words).
If float, the parameter represents a proportion of documents, integer
absolute counts.
This parameter is ignored if vocabulary is not None.
min_df : float in range [0.0, 1.0] or int, default=1
When building the vocabulary ignore terms that have a document
frequency strictly lower than the given threshold. This value is also
called cut-off in the literature.
If float, the parameter represents a proportion of documents, integer
absolute counts.
This parameter is ignored if vocabulary is not None.
max_features : int or None, default=None
If not None, build a vocabulary that only consider the top
max_features ordered by term frequency across the corpus.
This parameter is ignored if vocabulary is not None.
vocabulary : Mapping or iterable, optional
Either a Mapping (e.g., a dict) where keys are terms and values are
indices in the feature matrix, or an iterable over terms. If not
given, a vocabulary is determined from the input documents. Indices
in the mapping should not be repeated and should not have any gap
between 0 and the largest index.
binary : boolean, default=False
If True, all non zero counts are set to 1. This is useful for discrete
probabilistic models that model binary events rather than integer
counts.
dtype : type, optional
Type of the matrix returned by fit_transform() or transform().
Attributes
----------
vocabulary_ : dict
A mapping of terms to feature indices.
stop_words_ : set
Terms that were ignored because they either:
- occurred in too many documents (`max_df`)
- occurred in too few documents (`min_df`)
- were cut off by feature selection (`max_features`).
This is only available if no vocabulary was given.
See also
--------
HashingVectorizer, TfidfVectorizer
Notes
-----
The ``stop_words_`` attribute can get large and increase the model size
when pickling. This attribute is provided only for introspection and can
be safely removed using delattr or set to None before pickling.
"""
def __init__(self, input='content', encoding='utf-8',
decode_error='strict', strip_accents=None,
lowercase=True, preprocessor=None, tokenizer=None,
stop_words=None, token_pattern=r"(?u)\b\w\w+\b",
ngram_range=(1, 1), analyzer='word',
max_df=1.0, min_df=1, max_features=None,
vocabulary=None, binary=False, dtype=np.int64):
self.input = input
self.encoding = encoding
self.decode_error = decode_error
self.strip_accents = strip_accents
self.preprocessor = preprocessor
self.tokenizer = tokenizer
self.analyzer = analyzer
self.lowercase = lowercase
self.token_pattern = token_pattern
self.stop_words = stop_words
self.max_df = max_df
self.min_df = min_df
if max_df < 0 or min_df < 0:
raise ValueError("negative value for max_df of min_df")
self.max_features = max_features
if max_features is not None:
if (not isinstance(max_features, numbers.Integral) or
max_features <= 0):
raise ValueError(
"max_features=%r, neither a positive integer nor None"
% max_features)
self.ngram_range = ngram_range
self.vocabulary = vocabulary
self.binary = binary
self.dtype = dtype
def _sort_features(self, X, vocabulary):
"""Sort features by name
Returns a reordered matrix and modifies the vocabulary in place
"""
sorted_features = sorted(six.iteritems(vocabulary))
map_index = np.empty(len(sorted_features), dtype=np.int32)
for new_val, (term, old_val) in enumerate(sorted_features):
map_index[new_val] = old_val
vocabulary[term] = new_val
return X[:, map_index]
def _limit_features(self, X, vocabulary, high=None, low=None,
limit=None):
"""Remove too rare or too common features.
Prune features that are non zero in more samples than high or less
documents than low, modifying the vocabulary, and restricting it to
at most the limit most frequent.
This does not prune samples with zero features.
"""
if high is None and low is None and limit is None:
return X, set()
# Calculate a mask based on document frequencies
dfs = _document_frequency(X)
tfs = np.asarray(X.sum(axis=0)).ravel()
mask = np.ones(len(dfs), dtype=bool)
if high is not None:
mask &= dfs <= high
if low is not None:
mask &= dfs >= low
if limit is not None and mask.sum() > limit:
mask_inds = (-tfs[mask]).argsort()[:limit]
new_mask = np.zeros(len(dfs), dtype=bool)
new_mask[np.where(mask)[0][mask_inds]] = True
mask = new_mask
new_indices = np.cumsum(mask) - 1 # maps old indices to new
removed_terms = set()
for term, old_index in list(six.iteritems(vocabulary)):
if mask[old_index]:
vocabulary[term] = new_indices[old_index]
else:
del vocabulary[term]
removed_terms.add(term)
kept_indices = np.where(mask)[0]
if len(kept_indices) == 0:
raise ValueError("After pruning, no terms remain. Try a lower"
" min_df or a higher max_df.")
return X[:, kept_indices], removed_terms
def _count_vocab(self, raw_documents, fixed_vocab):
"""Create sparse feature matrix, and vocabulary where fixed_vocab=False
"""
if fixed_vocab:
vocabulary = self.vocabulary_
else:
# Add a new value when a new vocabulary item is seen
vocabulary = defaultdict()
vocabulary.default_factory = vocabulary.__len__
analyze = self.build_analyzer()
j_indices = _make_int_array()
indptr = _make_int_array()
indptr.append(0)
for doc in raw_documents:
for feature in analyze(doc):
try:
j_indices.append(vocabulary[feature])
except KeyError:
# Ignore out-of-vocabulary items for fixed_vocab=True
continue
indptr.append(len(j_indices))
if not fixed_vocab:
# disable defaultdict behaviour
vocabulary = dict(vocabulary)
if not vocabulary:
raise ValueError("empty vocabulary; perhaps the documents only"
" contain stop words")
j_indices = frombuffer_empty(j_indices, dtype=np.intc)
indptr = np.frombuffer(indptr, dtype=np.intc)
values = np.ones(len(j_indices))
X = sp.csr_matrix((values, j_indices, indptr),
shape=(len(indptr) - 1, len(vocabulary)),
dtype=self.dtype)
X.sum_duplicates()
return vocabulary, X
def fit(self, raw_documents, y=None):
"""Learn a vocabulary dictionary of all tokens in the raw documents.
Parameters
----------
raw_documents : iterable
An iterable which yields either str, unicode or file objects.
Returns
-------
self
"""
self.fit_transform(raw_documents)
return self
def fit_transform(self, raw_documents, y=None):
"""Learn the vocabulary dictionary and return term-document matrix.
This is equivalent to fit followed by transform, but more efficiently
implemented.
Parameters
----------
raw_documents : iterable
An iterable which yields either str, unicode or file objects.
Returns
-------
X : array, [n_samples, n_features]
Document-term matrix.
"""
# We intentionally don't call the transform method to make
# fit_transform overridable without unwanted side effects in
# TfidfVectorizer.
self._validate_vocabulary()
max_df = self.max_df
min_df = self.min_df
max_features = self.max_features
vocabulary, X = self._count_vocab(raw_documents,
self.fixed_vocabulary_)
if self.binary:
X.data.fill(1)
if not self.fixed_vocabulary_:
X = self._sort_features(X, vocabulary)
n_doc = X.shape[0]
max_doc_count = (max_df
if isinstance(max_df, numbers.Integral)
else max_df * n_doc)
min_doc_count = (min_df
if isinstance(min_df, numbers.Integral)
else min_df * n_doc)
if max_doc_count < min_doc_count:
raise ValueError(
"max_df corresponds to < documents than min_df")
X, self.stop_words_ = self._limit_features(X, vocabulary,
max_doc_count,
min_doc_count,
max_features)
self.vocabulary_ = vocabulary
return X
def transform(self, raw_documents):
"""Transform documents to document-term matrix.
Extract token counts out of raw text documents using the vocabulary
fitted with fit or the one provided to the constructor.
Parameters
----------
raw_documents : iterable
An iterable which yields either str, unicode or file objects.
Returns
-------
X : sparse matrix, [n_samples, n_features]
Document-term matrix.
"""
if not hasattr(self, 'vocabulary_'):
self._validate_vocabulary()
self._check_vocabulary()
# use the same matrix-building strategy as fit_transform
_, X = self._count_vocab(raw_documents, fixed_vocab=True)
if self.binary:
X.data.fill(1)
return X
def inverse_transform(self, X):
"""Return terms per document with nonzero entries in X.
Parameters
----------
X : {array, sparse matrix}, shape = [n_samples, n_features]
Returns
-------
X_inv : list of arrays, len = n_samples
List of arrays of terms.
"""
self._check_vocabulary()
if sp.issparse(X):
# We need CSR format for fast row manipulations.
X = X.tocsr()
else:
# We need to convert X to a matrix, so that the indexing
# returns 2D objects
X = np.asmatrix(X)
n_samples = X.shape[0]
terms = np.array(list(self.vocabulary_.keys()))
indices = np.array(list(self.vocabulary_.values()))
inverse_vocabulary = terms[np.argsort(indices)]
return [inverse_vocabulary[X[i, :].nonzero()[1]].ravel()
for i in range(n_samples)]
def get_feature_names(self):
"""Array mapping from feature integer indices to feature name"""
self._check_vocabulary()
return [t for t, i in sorted(six.iteritems(self.vocabulary_),
key=itemgetter(1))]
def _make_int_array():
"""Construct an array.array of a type suitable for scipy.sparse indices."""
return array.array(str("i"))
class TfidfTransformer(BaseEstimator, TransformerMixin):
"""Transform a count matrix to a normalized tf or tf-idf representation
Tf means term-frequency while tf-idf means term-frequency times inverse
document-frequency. This is a common term weighting scheme in information
retrieval, that has also found good use in document classification.
The goal of using tf-idf instead of the raw frequencies of occurrence of a
token in a given document is to scale down the impact of tokens that occur
very frequently in a given corpus and that are hence empirically less
informative than features that occur in a small fraction of the training
corpus.
The actual formula used for tf-idf is tf * (idf + 1) = tf + tf * idf,
instead of tf * idf. The effect of this is that terms with zero idf, i.e.
that occur in all documents of a training set, will not be entirely
ignored. The formulas used to compute tf and idf depend on parameter
settings that correspond to the SMART notation used in IR, as follows:
Tf is "n" (natural) by default, "l" (logarithmic) when sublinear_tf=True.
Idf is "t" when use_idf is given, "n" (none) otherwise.
Normalization is "c" (cosine) when norm='l2', "n" (none) when norm=None.
Read more in the :ref:`User Guide <text_feature_extraction>`.
Parameters
----------
norm : 'l1', 'l2' or None, optional
Norm used to normalize term vectors. None for no normalization.
use_idf : boolean, default=True
Enable inverse-document-frequency reweighting.
smooth_idf : boolean, default=True
Smooth idf weights by adding one to document frequencies, as if an
extra document was seen containing every term in the collection
exactly once. Prevents zero divisions.
sublinear_tf : boolean, default=False
Apply sublinear tf scaling, i.e. replace tf with 1 + log(tf).
References
----------
.. [Yates2011] `R. Baeza-Yates and B. Ribeiro-Neto (2011). Modern
Information Retrieval. Addison Wesley, pp. 68-74.`
.. [MRS2008] `C.D. Manning, P. Raghavan and H. Schuetze (2008).
Introduction to Information Retrieval. Cambridge University
Press, pp. 118-120.`
"""
def __init__(self, norm='l2', use_idf=True, smooth_idf=True,
sublinear_tf=False):
self.norm = norm
self.use_idf = use_idf
self.smooth_idf = smooth_idf
self.sublinear_tf = sublinear_tf
def fit(self, X, y=None):
"""Learn the idf vector (global term weights)
Parameters
----------
X : sparse matrix, [n_samples, n_features]
a matrix of term/token counts
"""
if not sp.issparse(X):
X = sp.csc_matrix(X)
if self.use_idf:
n_samples, n_features = X.shape
df = _document_frequency(X)
# perform idf smoothing if required
df += int(self.smooth_idf)
n_samples += int(self.smooth_idf)
# log+1 instead of log makes sure terms with zero idf don't get
# suppressed entirely.
idf = np.log(float(n_samples) / df) + 1.0
self._idf_diag = sp.spdiags(idf,
diags=0, m=n_features, n=n_features)
return self
def transform(self, X, copy=True):
"""Transform a count matrix to a tf or tf-idf representation
Parameters
----------
X : sparse matrix, [n_samples, n_features]
a matrix of term/token counts
copy : boolean, default True
Whether to copy X and operate on the copy or perform in-place
operations.
Returns
-------
vectors : sparse matrix, [n_samples, n_features]
"""
if hasattr(X, 'dtype') and np.issubdtype(X.dtype, np.float):
# preserve float family dtype
X = sp.csr_matrix(X, copy=copy)
else:
# convert counts or binary occurrences to floats
X = sp.csr_matrix(X, dtype=np.float64, copy=copy)
n_samples, n_features = X.shape
if self.sublinear_tf:
np.log(X.data, X.data)
X.data += 1
if self.use_idf:
check_is_fitted(self, '_idf_diag', 'idf vector is not fitted')
expected_n_features = self._idf_diag.shape[0]
if n_features != expected_n_features:
raise ValueError("Input has n_features=%d while the model"
" has been trained with n_features=%d" % (
n_features, expected_n_features))
# *= doesn't work
X = X * self._idf_diag
if self.norm:
X = normalize(X, norm=self.norm, copy=False)
return X
@property
def idf_(self):
if hasattr(self, "_idf_diag"):
return np.ravel(self._idf_diag.sum(axis=0))
else:
return None
class TfidfVectorizer(CountVectorizer):
"""Convert a collection of raw documents to a matrix of TF-IDF features.
Equivalent to CountVectorizer followed by TfidfTransformer.
Read more in the :ref:`User Guide <text_feature_extraction>`.
Parameters
----------
input : string {'filename', 'file', 'content'}
If 'filename', the sequence passed as an argument to fit is
expected to be a list of filenames that need reading to fetch
the raw content to analyze.
If 'file', the sequence items must have a 'read' method (file-like
object) that is called to fetch the bytes in memory.
Otherwise the input is expected to be the sequence strings or
bytes items are expected to be analyzed directly.
encoding : string, 'utf-8' by default.
If bytes or files are given to analyze, this encoding is used to
decode.
decode_error : {'strict', 'ignore', 'replace'}
Instruction on what to do if a byte sequence is given to analyze that
contains characters not of the given `encoding`. By default, it is
'strict', meaning that a UnicodeDecodeError will be raised. Other
values are 'ignore' and 'replace'.
strip_accents : {'ascii', 'unicode', None}
Remove accents during the preprocessing step.
'ascii' is a fast method that only works on characters that have
an direct ASCII mapping.
'unicode' is a slightly slower method that works on any characters.
None (default) does nothing.
analyzer : string, {'word', 'char'} or callable
Whether the feature should be made of word or character n-grams.
If a callable is passed it is used to extract the sequence of features
out of the raw, unprocessed input.
preprocessor : callable or None (default)
Override the preprocessing (string transformation) stage while
preserving the tokenizing and n-grams generation steps.
tokenizer : callable or None (default)
Override the string tokenization step while preserving the
preprocessing and n-grams generation steps.
Only applies if ``analyzer == 'word'``.
ngram_range : tuple (min_n, max_n)
The lower and upper boundary of the range of n-values for different
n-grams to be extracted. All values of n such that min_n <= n <= max_n
will be used.
stop_words : string {'english'}, list, or None (default)
If a string, it is passed to _check_stop_list and the appropriate stop
list is returned. 'english' is currently the only supported string
value.
If a list, that list is assumed to contain stop words, all of which
will be removed from the resulting tokens.
Only applies if ``analyzer == 'word'``.
If None, no stop words will be used. max_df can be set to a value
in the range [0.7, 1.0) to automatically detect and filter stop
words based on intra corpus document frequency of terms.
lowercase : boolean, default True
Convert all characters to lowercase before tokenizing.
token_pattern : string
Regular expression denoting what constitutes a "token", only used
if ``analyzer == 'word'``. The default regexp selects tokens of 2
or more alphanumeric characters (punctuation is completely ignored
and always treated as a token separator).
max_df : float in range [0.0, 1.0] or int, default=1.0
When building the vocabulary ignore terms that have a document
frequency strictly higher than the given threshold (corpus-specific
stop words).
If float, the parameter represents a proportion of documents, integer
absolute counts.
This parameter is ignored if vocabulary is not None.
min_df : float in range [0.0, 1.0] or int, default=1
When building the vocabulary ignore terms that have a document
frequency strictly lower than the given threshold. This value is also
called cut-off in the literature.
If float, the parameter represents a proportion of documents, integer
absolute counts.
This parameter is ignored if vocabulary is not None.
max_features : int or None, default=None
If not None, build a vocabulary that only consider the top
max_features ordered by term frequency across the corpus.
This parameter is ignored if vocabulary is not None.
vocabulary : Mapping or iterable, optional
Either a Mapping (e.g., a dict) where keys are terms and values are
indices in the feature matrix, or an iterable over terms. If not
given, a vocabulary is determined from the input documents.
binary : boolean, default=False
If True, all non-zero term counts are set to 1. This does not mean
outputs will have only 0/1 values, only that the tf term in tf-idf
is binary. (Set idf and normalization to False to get 0/1 outputs.)
dtype : type, optional
Type of the matrix returned by fit_transform() or transform().
norm : 'l1', 'l2' or None, optional
Norm used to normalize term vectors. None for no normalization.
use_idf : boolean, default=True
Enable inverse-document-frequency reweighting.
smooth_idf : boolean, default=True
Smooth idf weights by adding one to document frequencies, as if an
extra document was seen containing every term in the collection
exactly once. Prevents zero divisions.
sublinear_tf : boolean, default=False
Apply sublinear tf scaling, i.e. replace tf with 1 + log(tf).
Attributes
----------
idf_ : array, shape = [n_features], or None
The learned idf vector (global term weights)
when ``use_idf`` is set to True, None otherwise.
stop_words_ : set
Terms that were ignored because they either:
- occurred in too many documents (`max_df`)
- occurred in too few documents (`min_df`)
- were cut off by feature selection (`max_features`).
This is only available if no vocabulary was given.
See also
--------
CountVectorizer
Tokenize the documents and count the occurrences of token and return
them as a sparse matrix
TfidfTransformer
Apply Term Frequency Inverse Document Frequency normalization to a
sparse matrix of occurrence counts.
Notes
-----
The ``stop_words_`` attribute can get large and increase the model size
when pickling. This attribute is provided only for introspection and can
be safely removed using delattr or set to None before pickling.
"""
def __init__(self, input='content', encoding='utf-8',
decode_error='strict', strip_accents=None, lowercase=True,
preprocessor=None, tokenizer=None, analyzer='word',
stop_words=None, token_pattern=r"(?u)\b\w\w+\b",
ngram_range=(1, 1), max_df=1.0, min_df=1,
max_features=None, vocabulary=None, binary=False,
dtype=np.int64, norm='l2', use_idf=True, smooth_idf=True,
sublinear_tf=False):
super(TfidfVectorizer, self).__init__(
input=input, encoding=encoding, decode_error=decode_error,
strip_accents=strip_accents, lowercase=lowercase,
preprocessor=preprocessor, tokenizer=tokenizer, analyzer=analyzer,
stop_words=stop_words, token_pattern=token_pattern,
ngram_range=ngram_range, max_df=max_df, min_df=min_df,
max_features=max_features, vocabulary=vocabulary, binary=binary,
dtype=dtype)
self._tfidf = TfidfTransformer(norm=norm, use_idf=use_idf,
smooth_idf=smooth_idf,
sublinear_tf=sublinear_tf)
# Broadcast the TF-IDF parameters to the underlying transformer instance
# for easy grid search and repr
@property
def norm(self):
return self._tfidf.norm
@norm.setter
def norm(self, value):
self._tfidf.norm = value
@property
def use_idf(self):
return self._tfidf.use_idf
@use_idf.setter
def use_idf(self, value):
self._tfidf.use_idf = value
@property
def smooth_idf(self):
return self._tfidf.smooth_idf
@smooth_idf.setter
def smooth_idf(self, value):
self._tfidf.smooth_idf = value
@property
def sublinear_tf(self):
return self._tfidf.sublinear_tf
@sublinear_tf.setter
def sublinear_tf(self, value):
self._tfidf.sublinear_tf = value
@property
def idf_(self):
return self._tfidf.idf_
def fit(self, raw_documents, y=None):
"""Learn vocabulary and idf from training set.
Parameters
----------
raw_documents : iterable
an iterable which yields either str, unicode or file objects
Returns
-------
self : TfidfVectorizer
"""
X = super(TfidfVectorizer, self).fit_transform(raw_documents)
self._tfidf.fit(X)
return self
def fit_transform(self, raw_documents, y=None):
"""Learn vocabulary and idf, return term-document matrix.
This is equivalent to fit followed by transform, but more efficiently
implemented.
Parameters
----------
raw_documents : iterable
an iterable which yields either str, unicode or file objects
Returns
-------
X : sparse matrix, [n_samples, n_features]
Tf-idf-weighted document-term matrix.
"""
X = super(TfidfVectorizer, self).fit_transform(raw_documents)
self._tfidf.fit(X)
# X is already a transformed view of raw_documents so
# we set copy to False
return self._tfidf.transform(X, copy=False)
def transform(self, raw_documents, copy=True):
"""Transform documents to document-term matrix.
Uses the vocabulary and document frequencies (df) learned by fit (or
fit_transform).
Parameters
----------
raw_documents : iterable
an iterable which yields either str, unicode or file objects
copy : boolean, default True
Whether to copy X and operate on the copy or perform in-place
operations.
Returns
-------
X : sparse matrix, [n_samples, n_features]
Tf-idf-weighted document-term matrix.
"""
check_is_fitted(self, '_tfidf', 'The tfidf vector is not fitted')
X = super(TfidfVectorizer, self).transform(raw_documents)
return self._tfidf.transform(X, copy=False)
| bsd-3-clause |
jkonecki/autorest | AutoRest/Generators/Python/Python.Tests/Expected/AcceptanceTests/BodyByte/autorestswaggerbatbyteservice/models/error.py | 50 | 1295 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
from msrest.exceptions import HttpOperationError
class Error(Model):
"""Error
:param status:
:type status: int
:param message:
:type message: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, status=None, message=None, **kwargs):
self.status = status
self.message = message
class ErrorException(HttpOperationError):
"""Server responsed with exception of type: 'Error'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, deserialize, response, *args):
super(ErrorException, self).__init__(deserialize, response, 'Error', *args)
| mit |
markovg/nest-simulator | pynest/nest/tests/test_errors.py | 18 | 2194 | # -*- coding: utf-8 -*-
#
# test_errors.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Tests for error handling
"""
import unittest
import nest
@nest.check_stack
class ErrorTestCase(unittest.TestCase):
"""Tests if errors are handled correctly"""
def test_Raise(self):
"""Error raising"""
def raise_custom_exception(exc, msg):
raise exc(msg)
message = "test"
exception = nest.NESTError
self.assertRaisesRegex(
exception, message, raise_custom_exception, exception, message)
def test_StackUnderFlow(self):
"""Stack underflow"""
nest.ResetKernel()
self.assertRaisesRegex(
nest.NESTError, "StackUnderflow", nest.sli_run, 'clear ;')
def test_DivisionByZero(self):
"""Division by zero"""
nest.ResetKernel()
self.assertRaisesRegex(
nest.NESTError, "DivisionByZero", nest.sli_run, '1 0 div')
def test_UnknownNode(self):
"""Unknown node"""
nest.ResetKernel()
self.assertRaisesRegex(
nest.NESTError, "UnknownNode", nest.Connect, (99, ), (99, ))
def test_UnknownModel(self):
"""Unknown model name"""
nest.ResetKernel()
self.assertRaisesRegex(
nest.NESTError, "UnknownModelName", nest.Create, -1)
def suite():
suite = unittest.makeSuite(ErrorTestCase, 'test')
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == "__main__":
run()
| gpl-2.0 |
duramato/SickRage | lib/hachoir_core/field/integer.py | 73 | 1848 | """
Integer field classes:
- UInt8, UInt16, UInt24, UInt32, UInt64: unsigned integer of 8, 16, 32, 64 bits ;
- Int8, Int16, Int24, Int32, Int64: signed integer of 8, 16, 32, 64 bits.
"""
from hachoir_core.field import Bits, FieldError
class GenericInteger(Bits):
"""
Generic integer class used to generate other classes.
"""
def __init__(self, parent, name, signed, size, description=None):
if not (8 <= size <= 16384):
raise FieldError("Invalid integer size (%s): have to be in 8..16384" % size)
Bits.__init__(self, parent, name, size, description)
self.signed = signed
def createValue(self):
return self._parent.stream.readInteger(
self.absolute_address, self.signed, self._size, self._parent.endian)
def integerFactory(name, is_signed, size, doc):
class Integer(GenericInteger):
__doc__ = doc
static_size = size
def __init__(self, parent, name, description=None):
GenericInteger.__init__(self, parent, name, is_signed, size, description)
cls = Integer
cls.__name__ = name
return cls
UInt8 = integerFactory("UInt8", False, 8, "Unsigned integer of 8 bits")
UInt16 = integerFactory("UInt16", False, 16, "Unsigned integer of 16 bits")
UInt24 = integerFactory("UInt24", False, 24, "Unsigned integer of 24 bits")
UInt32 = integerFactory("UInt32", False, 32, "Unsigned integer of 32 bits")
UInt64 = integerFactory("UInt64", False, 64, "Unsigned integer of 64 bits")
Int8 = integerFactory("Int8", True, 8, "Signed integer of 8 bits")
Int16 = integerFactory("Int16", True, 16, "Signed integer of 16 bits")
Int24 = integerFactory("Int24", True, 24, "Signed integer of 24 bits")
Int32 = integerFactory("Int32", True, 32, "Signed integer of 32 bits")
Int64 = integerFactory("Int64", True, 64, "Signed integer of 64 bits")
| gpl-3.0 |
fedora-conary/rbuild | plugins/buildpackages.py | 1 | 4531 | #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from rbuild import errors
from rbuild import pluginapi
from rbuild.pluginapi import command
from rbuild_plugins.build import packages
from rbuild_plugins.build import refresh
class BuildPackagesCommand(command.BaseCommand):
"""
Builds or rebuilds specified packages, or all checked-out packages
if none are specified.
Additionally, rebuilds any other packages in the product group that
depend on the built packages.
"""
help = 'Build edited packages for this stage'
paramHelp = '[package]*'
docs = {'refresh' : 'refreshes the source of specified packages, or all '
'checked-out packages if none are specified',
'message' : 'message describing why the commit was performed',
'no-watch' : 'do not watch the job after starting the build',
'no-commit' : 'do not automatically commit successful builds',
'no-recurse' : 'default behavior left for backwards compatibility',
'recurse' : 'build every package listed on the '
'command line plus all of its dependencies',
}
def addLocalParameters(self, argDef):
argDef['no-watch'] = command.NO_PARAM
argDef['no-commit'] = command.NO_PARAM
argDef['no-recurse'] = command.NO_PARAM
argDef['recurse'] = command.NO_PARAM
argDef['refresh'] = command.NO_PARAM
argDef['message'] = '-m', command.ONE_PARAM
#pylint: disable-msg=R0201,R0903
# could be a function, and too few public methods
def runCommand(self, handle, argSet, args):
watch = not argSet.pop('no-watch', False)
commit = not argSet.pop('no-commit', False)
recurse = argSet.pop('recurse', False)
argSet.pop('no-recurse', False) # ignored, now the default
refreshArg = argSet.pop('refresh', False)
message = argSet.pop('message', None)
success = True
_, packageList, = self.requireParameters(args, allowExtra=True)
if not packageList:
if refreshArg:
handle.BuildPackages.refreshAllPackages()
jobId = handle.BuildPackages.buildAllPackages()
else:
if refreshArg:
handle.BuildPackages.refreshPackages(packageList)
jobId = handle.BuildPackages.buildPackages(packageList, recurse)
if watch and commit:
success = handle.Build.watchAndCommitJob(jobId, message)
elif watch:
success = handle.Build.watchJob(jobId)
if not success:
raise errors.PluginError('Package build failed')
class BuildPackages(pluginapi.Plugin):
def initialize(self):
self.handle.Commands.getCommandClass('build').registerSubCommand(
'packages', BuildPackagesCommand,
aliases=['package', ])
def buildAllPackages(self):
self.handle.Build.warnIfOldProductDefinition('building all packages')
job = self.createJobForAllPackages()
jobId = self.handle.facade.rmake.buildJob(job)
self.handle.productStore.setPackageJobId(jobId)
return jobId
def buildPackages(self, packageList, recurse=True):
self.handle.Build.warnIfOldProductDefinition('building packages')
job = self.createJobForPackages(packageList, recurse)
jobId = self.handle.facade.rmake.buildJob(job)
self.handle.productStore.setPackageJobId(jobId)
return jobId
def createJobForAllPackages(self):
return packages.createRmakeJobForAllPackages(self.handle)
def createJobForPackages(self, packageList, recurse=True):
return packages.createRmakeJobForPackages(self.handle, packageList,
recurse)
def refreshPackages(self, packageList=None):
return refresh.refreshPackages(self.handle, packageList)
def refreshAllPackages(self):
return refresh.refreshAllPackages(self.handle)
| apache-2.0 |
KeepSafe/aiohttp | examples/legacy/crawl.py | 5 | 3129 | #!/usr/bin/env python3
import asyncio
import logging
import re
import signal
import sys
import urllib.parse
import aiohttp
class Crawler:
def __init__(self, rooturl, loop, maxtasks=100):
self.rooturl = rooturl
self.loop = loop
self.todo = set()
self.busy = set()
self.done = {}
self.tasks = set()
self.sem = asyncio.Semaphore(maxtasks, loop=loop)
# connector stores cookies between requests and uses connection pool
self.session = aiohttp.ClientSession(loop=loop)
async def run(self):
t = asyncio.ensure_future(self.addurls([(self.rooturl, '')]),
loop=self.loop)
await asyncio.sleep(1, loop=self.loop)
while self.busy:
await asyncio.sleep(1, loop=self.loop)
await t
await self.session.close()
self.loop.stop()
async def addurls(self, urls):
for url, parenturl in urls:
url = urllib.parse.urljoin(parenturl, url)
url, frag = urllib.parse.urldefrag(url)
if (url.startswith(self.rooturl) and
url not in self.busy and
url not in self.done and
url not in self.todo):
self.todo.add(url)
await self.sem.acquire()
task = asyncio.ensure_future(self.process(url), loop=self.loop)
task.add_done_callback(lambda t: self.sem.release())
task.add_done_callback(self.tasks.remove)
self.tasks.add(task)
async def process(self, url):
print('processing:', url)
self.todo.remove(url)
self.busy.add(url)
try:
resp = await self.session.get(url)
except Exception as exc:
print('...', url, 'has error', repr(str(exc)))
self.done[url] = False
else:
if (resp.status == 200 and
('text/html' in resp.headers.get('content-type'))):
data = (await resp.read()).decode('utf-8', 'replace')
urls = re.findall(r'(?i)href=["\']?([^\s"\'<>]+)', data)
asyncio.Task(self.addurls([(u, url) for u in urls]))
resp.close()
self.done[url] = True
self.busy.remove(url)
print(len(self.done), 'completed tasks,', len(self.tasks),
'still pending, todo', len(self.todo))
def main():
loop = asyncio.get_event_loop()
c = Crawler(sys.argv[1], loop)
asyncio.ensure_future(c.run(), loop=loop)
try:
loop.add_signal_handler(signal.SIGINT, loop.stop)
except RuntimeError:
pass
loop.run_forever()
print('todo:', len(c.todo))
print('busy:', len(c.busy))
print('done:', len(c.done), '; ok:', sum(c.done.values()))
print('tasks:', len(c.tasks))
if __name__ == '__main__':
if '--iocp' in sys.argv:
from asyncio import events, windows_events
sys.argv.remove('--iocp')
logging.info('using iocp')
el = windows_events.ProactorEventLoop()
events.set_event_loop(el)
main()
| apache-2.0 |
nicholasbs/zulip | zerver/management/commands/gravatar_to_user_avatar.py | 124 | 2043 | from __future__ import absolute_import
import requests
from zerver.models import get_user_profile_by_email, UserProfile
from zerver.lib.avatar import gravatar_hash
from zerver.lib.upload import upload_avatar_image
from django.core.management.base import BaseCommand, CommandError
from django.core.files.uploadedfile import SimpleUploadedFile
class Command(BaseCommand):
help = """Migrate the specified user's Gravatar over to an avatar that we serve. If two
email addresses are specified, use the Gravatar for the first and upload the image
for both email addresses."""
def add_arguments(self, parser):
parser.add_argument('old_email', metavar='<old email>', type=str,
help="user whose Gravatar should be migrated")
parser.add_argument('new_email', metavar='<new email>', type=str, nargs='?', default=None,
help="user to copy the Gravatar to")
def handle(self, *args, **options):
old_email = options['old_email']
if options['new_email']:
new_email = options['new_email']
else:
new_email = old_email
gravatar_url = "https://secure.gravatar.com/avatar/%s?d=identicon" % (gravatar_hash(old_email),)
gravatar_data = requests.get(gravatar_url).content
gravatar_file = SimpleUploadedFile('gravatar.jpg', gravatar_data, 'image/jpeg')
try:
user_profile = get_user_profile_by_email(old_email)
except UserProfile.DoesNotExist:
try:
user_profile = get_user_profile_by_email(new_email)
except UserProfile.DoesNotExist:
raise CommandError("Could not find specified user")
upload_avatar_image(gravatar_file, user_profile, old_email)
if old_email != new_email:
gravatar_file.seek(0)
upload_avatar_image(gravatar_file, user_profile, new_email)
user_profile.avatar_source = UserProfile.AVATAR_FROM_USER
user_profile.save(update_fields=['avatar_source'])
| apache-2.0 |
Autodesk/molecular-design-toolkit | moldesign/helpers/qmmm.py | 1 | 3277 | # Copyright 2016 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import moldesign as mdt
LINKBONDRATIO = 0.709 # fixed ratio of C-C to C-H bond length for link atoms
def create_link_atoms(mol, qmatoms):
""" Create hydrogen caps for bonds between QM and MM regions.
Each link atom will have ``metadata.mmatom``, ``metadata.mmpartner`` attributes to identify the
atom it replaces and the atom it's bonded to in the MM system.
Raises:
ValueError: if any MM/QM atom is bonded to more than one QM/MM atom, or the bond
order is not one
Returns:
List[mdt.Atom]: list of link atoms
"""
linkatoms = []
qmset = set(qmatoms)
for qmatom in qmatoms:
mmatom = _get_mm_nbr(mol, qmatom, qmset)
if mmatom is None:
continue
la = mdt.Atom(atnum=1, name='HL%d' % len(linkatoms),
metadata={'mmatom': mmatom, 'mmpartner': qmatom})
linkatoms.append(la)
set_link_atom_positions(linkatoms)
return linkatoms
def _get_mm_nbr(mol, qmatom, qmset):
mm_nbrs = [nbr for nbr in qmatom.bonded_atoms
if nbr not in qmset]
if len(mm_nbrs) == 0:
return None
# everything below is sanity checks
mmatom = mm_nbrs[0]
if len(mm_nbrs) != 1:
raise ValueError('QM atom %s is bonded to more than one MM atom' % qmatom)
if mol.bond_graph[qmatom][mmatom] != 1:
raise ValueError('Bond crossing QM/MM boundary (%s - %s) does not have order 1'
% (qmatom, mmatom))
if qmatom.atnum != 6 or mmatom.atnum != 6:
print ('WARNING: QM/MM bond involving non-carbon atoms: %s - %s' %
(qmatom, mmatom))
mm_qm_nbrs = [qmnbr for qmnbr in mmatom.bonded_atoms
if qmnbr in qmset]
if len(mm_qm_nbrs) != 1:
raise ValueError('MM atom %s is bonded to more than one QM atom'%mmatom)
return mmatom
def set_link_atom_positions(linkatoms):
"""
Set link atom positions using a fixed ratio of MM bond length to QM bond length
Warnings:
- This is only valid for
- Presumably, the most "correct" way to do this is to place the hydrogen in order to
match the force exterted on the QM atom by the MM atom. This is not currently supported.
Args:
linkatoms (List[mdt.Atom]): list of link atoms to set positions for
References:
http://www.nwchem-sw.org/index.php/Qmmm_link_atoms
"""
for atom in linkatoms:
nbr = atom.metadata.mmpartner
proxy = atom.metadata.mmatom
dist = LINKBONDRATIO * nbr.distance(proxy)
atom.position = (nbr.position +
dist * mdt.mathutils.normalized(proxy.position - nbr.position))
| apache-2.0 |
masierra/ardupilot | mk/PX4/Tools/genmsg/scripts/genmsg_check_deps.py | 216 | 2999 | #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2014, Open Source Robotics Foundation, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Open Source Robotics Foundation, Inc. nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
import os
import sys
from genmsg import EXT_MSG, EXT_SRV, MsgContext
from genmsg.gentools import compute_full_type_name
from genmsg.msg_loader import load_msg_from_file, load_srv_from_file
from genmsg.msgs import bare_msg_type, is_builtin, resolve_type
pkg_name = sys.argv[1]
msg_file = sys.argv[2]
deps = sys.argv[3].split(':') if len(sys.argv) > 3 else []
msg_context = MsgContext.create_default()
full_type_name = compute_full_type_name(pkg_name, os.path.basename(msg_file))
if msg_file.endswith(EXT_MSG):
spec = load_msg_from_file(msg_context, msg_file, full_type_name)
unresolved_types = spec.types
elif msg_file.endswith(EXT_SRV):
spec = load_srv_from_file(msg_context, msg_file, full_type_name)
unresolved_types = spec.request.types + spec.response.types
else:
print("Processing file: '%s' - unknown file extension" % msg_file, file=sys.stderr)
sys.exit(1)
package_context = spec.package
for unresolved_type in unresolved_types:
bare_type = bare_msg_type(unresolved_type)
resolved_type = resolve_type(bare_type, package_context)
if not is_builtin(resolved_type) and resolved_type not in deps:
print("The dependencies of the message/service '%s' have changed. Please rerun cmake." % spec.full_name, file=sys.stderr)
sys.exit(1)
| gpl-3.0 |
gifford-lab/bcbio-nextgen | bcbio/ngsalign/hisat2.py | 3 | 3097 | import os
from bcbio.utils import file_exists
import bcbio.pipeline.datadict as dd
from bcbio.distributed.transaction import file_transaction
from bcbio.pipeline import config_utils
from bcbio.ngsalign import postalign
from bcbio.provenance import do
def align(fastq_file, pair_file, ref_file, names, align_dir, data):
paired = True if pair_file else False
hisat2 = config_utils.get_program("hisat2", data)
num_cores = dd.get_num_cores(data)
quality_flag = _get_quality_flag(data)
stranded_flag = _get_stranded_flag(data, paired)
rg_flags = _get_rg_flags(names)
out_file = os.path.join(align_dir, dd.get_lane(data)) + ".bam"
if file_exists(out_file):
data = dd.set_work_bam(data, out_file)
return data
cmd = ("{hisat2} -x {ref_file} -p {num_cores} {quality_flag} {stranded_flag} "
"{rg_flags} ")
if paired:
cmd += "-1 {fastq_file} -2 {pair_file} "
else:
cmd += "-U {fastq_file} "
if dd.get_analysis(data).lower() == "smallrna-seq":
cmd += "-k 1000 "
# if assembling transcripts, set flags that cufflinks/stringtie can use
if dd.get_transcript_assembler(data):
cmd += "--dta-cufflinks "
if dd.get_analysis(data).lower() == "rna-seq":
gtf_file = dd.get_gtf_file(data)
splicesites = os.path.join(os.path.dirname(gtf_file),
"ref-transcripts-splicesites.txt")
cmd += "--known-splicesite-infile {splicesites} "
message = "Aligning %s and %s with hisat2." %(fastq_file, pair_file)
with file_transaction(out_file) as tx_out_file:
cmd += " | " + postalign.sam_to_sortbam_cl(data, tx_out_file)
do.run(cmd.format(**locals()), message)
data = dd.set_work_bam(data, out_file)
return data
def _get_quality_flag(data):
qual_format = dd.get_quality_format(data)
if qual_format.lower() == "illumina":
return "--phred64"
elif qual_format.lower() == "solexa":
return "--solexa-quals"
else:
return "--phred33"
def _get_stranded_flag(data, paired):
strandedness = dd.get_strandedness(data)
base = "--rna-strandness "
if paired:
if strandedness == "firststrand":
return base + "RF"
elif strandedness == "secondstrand":
return base + "FR"
else:
return ""
else:
if strandedness == "firstrand":
return base + "R"
elif strandedness == "secondstrand":
return base + "F"
else:
return ""
def _get_rg_flags(names):
rg_id = names["rg"]
rg_sample = names["sample"]
rg_library = names["pl"]
rg_platform_unit = names["pu"]
rg_lb = ("--rg LB:%s " % names.get("lb")) if names.get("lb") else ""
flags = ("--rg-id {rg_id} --rg PL:{rg_library} --rg PU:{rg_platform_unit} "
"--rg SM:{rg_sample} {rg_lb}")
return flags.format(**locals())
def remap_index_fn(ref_file):
"""Map sequence references to equivalent hisat2 indexes
"""
return os.path.splitext(ref_file)[0].replace("/seq/", "/hisat2/")
| mit |
kpcyrd/cjdns | node_build/dependencies/libuv/build/gyp/test/generator-output/gyptest-depth.py | 232 | 1561 | #!/usr/bin/env python
# Copyright 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies building a project hierarchy created when the --generator-output=
and --depth= options is used to put the build configuration files in a separate
directory tree.
"""
import TestGyp
import os
# This is a regression test for the make generator only.
test = TestGyp.TestGyp(formats=['make'])
test.writable(test.workpath('src'), False)
toplevel_dir = os.path.basename(test.workpath())
test.run_gyp(os.path.join(toplevel_dir, 'src', 'prog1.gyp'),
'-Dset_symroot=1',
'--generator-output=gypfiles',
depth=toplevel_dir,
chdir='..')
test.writable(test.workpath('src/build'), True)
test.writable(test.workpath('src/subdir2/build'), True)
test.writable(test.workpath('src/subdir3/build'), True)
test.build('prog1.gyp', test.ALL, chdir='gypfiles')
chdir = 'gypfiles'
expect = """\
Hello from %s
Hello from inc.h
Hello from inc1/include1.h
Hello from inc2/include2.h
Hello from inc3/include3.h
Hello from subdir2/deeper/deeper.h
"""
if test.format == 'xcode':
chdir = 'src'
test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
if test.format == 'xcode':
chdir = 'src/subdir2'
test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
if test.format == 'xcode':
chdir = 'src/subdir3'
test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
test.pass_test()
| gpl-3.0 |
yasoob/youtube-dl-GUI | youtube_dl/extractor/voicerepublic.py | 11 | 2302 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
ExtractorError,
determine_ext,
int_or_none,
urljoin,
)
class VoiceRepublicIE(InfoExtractor):
_VALID_URL = r'https?://voicerepublic\.com/(?:talks|embed)/(?P<id>[0-9a-z-]+)'
_TESTS = [{
'url': 'http://voicerepublic.com/talks/watching-the-watchers-building-a-sousveillance-state',
'md5': 'b9174d651323f17783000876347116e3',
'info_dict': {
'id': '2296',
'display_id': 'watching-the-watchers-building-a-sousveillance-state',
'ext': 'm4a',
'title': 'Watching the Watchers: Building a Sousveillance State',
'description': 'Secret surveillance programs have metadata too. The people and companies that operate secret surveillance programs can be surveilled.',
'duration': 1556,
'view_count': int,
}
}, {
'url': 'http://voicerepublic.com/embed/watching-the-watchers-building-a-sousveillance-state',
'only_matching': True,
}]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
if '>Queued for processing, please stand by...<' in webpage:
raise ExtractorError(
'Audio is still queued for processing', expected=True)
talk = self._parse_json(self._search_regex(
r'initialSnapshot\s*=\s*({.+?});',
webpage, 'talk'), display_id)['talk']
title = talk['title']
formats = [{
'url': urljoin(url, talk_url),
'format_id': format_id,
'ext': determine_ext(talk_url) or format_id,
'vcodec': 'none',
} for format_id, talk_url in talk['media_links'].items()]
self._sort_formats(formats)
return {
'id': compat_str(talk.get('id') or display_id),
'display_id': display_id,
'title': title,
'description': talk.get('teaser'),
'thumbnail': talk.get('image_url'),
'duration': int_or_none(talk.get('archived_duration')),
'view_count': int_or_none(talk.get('play_count')),
'formats': formats,
}
| mit |
USGSDenverPychron/pychron | pychron/hardware/fusions/fusions_motor_configurer.py | 1 | 1639 | # ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
'''
@author: Jake Ross
@copyright: 2009
@license: Educational Community License 1.0
'''
# =============enthought library imports=======================
from traits.api import HasTraits, List
from traitsui.api import View, Item, Group
# =============standard library imports ========================
# =============local library imports ==========================
class FusionsMotorConfigurer(HasTraits):
'''
G{classtree}
'''
motors = List
def traits_view(self):
'''
'''
motorgroup = Group(layout='tabbed')
for m in self.motors:
n = m.name
self.add_trait(n, m)
i = Item(n, style='custom', show_label=False)
motorgroup.content.append(i)
return View(motorgroup, resizable=True, title='Configure Motors',
buttons=['OK', 'Cancel', 'Revert'],
)
| apache-2.0 |
marratj/ansible | lib/ansible/plugins/callback/context_demo.py | 25 | 1791 | # (C) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: context_demo
type: aggregate
short_description: demo callback that adds play/task context
description:
- Displays some play and task context along with normal output
- This is mostly for demo purposes
version_added: "2.1"
requirements:
- whitelist in configuration
'''
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
This is a very trivial example of how any callback function can get at play and task objects.
play will be 'None' for runner invocations, and task will be None for 'setup' invocations.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'context_demo'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self, *args, **kwargs):
super(CallbackModule, self).__init__(*args, **kwargs)
self.task = None
self.play = None
def v2_on_any(self, *args, **kwargs):
self._display.display("--- play: {} task: {} ---".format(getattr(self.play, 'name', None), self.task))
self._display.display(" --- ARGS ")
for i, a in enumerate(args):
self._display.display(' %s: %s' % (i, a))
self._display.display(" --- KWARGS ")
for k in kwargs:
self._display.display(' %s: %s' % (k, kwargs[k]))
def v2_playbook_on_play_start(self, play):
self.play = play
def v2_playbook_on_task_start(self, task, is_conditional):
self.task = task
| gpl-3.0 |
bccp/nbodykit | nbodykit/source/catalog/subvolumes.py | 1 | 2079 | from nbodykit.base.catalog import CatalogSource
from pmesh.domain import GridND
from nbodykit.utils import split_size_3d
import numpy
class SubVolumesCatalog(CatalogSource):
""" A catalog that distributes the particles spatially into subvolumes per
MPI rank.
Attributes
----------
domain : :class:`pmesh.domain.GridND`;
The domain objects for decomposition. If None, generate
a domain to decompose the catalog into a 3d grid.
layout : A large object that holds which particle belongs to which rank.
source : the original source object
Parameters
----------
columns: list
a list of columns to already exchange
"""
def __init__(self, source, domain=None, position='Position', columns=None):
comm = source.comm
if domain is None:
# determine processor division for domain decomposition
np = split_size_3d(comm.size)
if comm.rank == 0:
self.logger.info("using cpu grid decomposition: %s" %str(np))
grid = [
numpy.linspace(0, source.attrs['BoxSize'][0], np[0] + 1, endpoint=True),
numpy.linspace(0, source.attrs['BoxSize'][1], np[1] + 1, endpoint=True),
numpy.linspace(0, source.attrs['BoxSize'][2], np[2] + 1, endpoint=True),
]
domain = GridND(grid, comm=comm)
self.domain = domain
self.source = source
layout = domain.decompose(source[position].compute())
self._size = layout.recvlength
CatalogSource.__init__(self, comm=comm)
self.attrs.update(source.attrs)
self._frozen = {}
if columns is None: columns = source.columns
for column in columns:
data = source[column].compute()
self._frozen[column] = self.make_column(layout.exchange(data))
@property
def hardcolumns(self):
return sorted(list(self._frozen.keys()))
def get_hardcolumn(self, col):
return self._frozen[col]
| gpl-3.0 |
DANS-KNAW/dariah-contribute | dariah_static_data/migrations/0003_auto__del_field_country_iso3166_2__del_field_country_uri__add_field_co.py | 1 | 4571 | # -*- coding: utf-8 -*-
"""
DARIAH Contribute - DARIAH-EU Contribute: edit your DARIAH contributions.
Copyright 2014 Data Archiving and Networked Services
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Country.iso3166_2'
db.delete_column(u'dariah_static_data_country', 'iso3166_2')
# Deleting field 'Country.uri'
db.delete_column(u'dariah_static_data_country', 'uri')
# Adding field 'Country.geonameid'
db.add_column(u'dariah_static_data_country', 'geonameid',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Adding field 'Country.iso3166_2'
db.add_column(u'dariah_static_data_country', 'iso3166_2',
self.gf('django.db.models.fields.CharField')(default='', max_length=2),
keep_default=False)
# Adding field 'Country.uri'
db.add_column(u'dariah_static_data_country', 'uri',
self.gf('django.db.models.fields.URLField')(default='', max_length=200),
keep_default=False)
# Deleting field 'Country.geonameid'
db.delete_column(u'dariah_static_data_country', 'geonameid')
models = {
u'dariah_static_data.activitygroupname': {
'Meta': {'object_name': 'ActivityGroupName'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'dariah_static_data.country': {
'Meta': {'object_name': 'Country'},
'geonameid': ('django.db.models.fields.PositiveIntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'dariah_static_data.tadirahactivity': {
'Meta': {'object_name': 'TADIRAHActivity'},
'activity_group_name': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tadirah_activities'", 'to': u"orm['dariah_static_data.ActivityGroupName']"}),
'activity_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'dariah_static_data.tadirahobject': {
'Meta': {'object_name': 'TADIRAHObject'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'dariah_static_data.tadirahtechnique': {
'Meta': {'object_name': 'TADIRAHTechnique'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'dariah_static_data.vcc': {
'Meta': {'object_name': 'VCC'},
'description': ('django.db.models.fields.TextField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'})
}
}
complete_apps = ['dariah_static_data'] | apache-2.0 |
gorjuce/odoo | addons/website_mail/__openerp__.py | 379 | 1623 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Website Mail',
'category': 'Hidden',
'summary': 'Website Module for Mail',
'version': '0.1',
'description': """Glue module holding mail improvements for website.""",
'author': 'OpenERP SA',
'depends': ['website', 'mail', 'email_template'],
'data': [
'views/snippets.xml',
'views/website_mail.xml',
'views/website_email_designer.xml',
'views/email_template_view.xml',
'data/mail_groups.xml',
'security/website_mail.xml',
],
'qweb': [
'static/src/xml/website_mail.xml'
],
'installable': True,
'auto_install': True,
}
| agpl-3.0 |
lacrazyboy/scrapy | scrapy/utils/benchserver.py | 130 | 1312 | import random
from six.moves.urllib.parse import urlencode
from twisted.web.server import Site
from twisted.web.resource import Resource
from twisted.internet import reactor
class Root(Resource):
isLeaf = True
def getChild(self, name, request):
return self
def render(self, request):
total = _getarg(request, 'total', 100, int)
show = _getarg(request, 'show', 10, int)
nlist = [random.randint(1, total) for _ in range(show)]
request.write("<html><head></head><body>")
args = request.args.copy()
for nl in nlist:
args['n'] = nl
argstr = urlencode(args, doseq=True)
request.write("<a href='/follow?{0}'>follow {1}</a><br>"
.format(argstr, nl))
request.write("</body></html>")
return ''
def _getarg(request, name, default=None, type=str):
return type(request.args[name][0]) \
if name in request.args else default
if __name__ == '__main__':
root = Root()
factory = Site(root)
httpPort = reactor.listenTCP(8998, Site(root))
def _print_listening():
httpHost = httpPort.getHost()
print("Bench server at http://{}:{}".format(httpHost.host, httpHost.port))
reactor.callWhenRunning(_print_listening)
reactor.run()
| bsd-3-clause |
cylc/cylc | tests/unit/main_loop/main_loop.py | 2 | 6730 | import asyncio
from collections import deque
from functools import partial
import logging
from time import sleep
import pytest
from cylc.flow import CYLC_LOG
from cylc.flow.exceptions import CylcError
from cylc.flow.main_loop import (
CoroTypes,
MainLoopPluginException,
_wrapper,
get_runners,
load,
)
from cylc.flow.main_loop.health_check import health_check as hc_during
def test_load_plugins_blank():
"""Test that log_plugins works when no plugins are requested."""
conf = {
'plugins': []
}
assert load(conf) == {
'config': conf,
'state': {},
'timings': {}
}
def test_load_plugins():
"""Test the loading of a built-in plugin."""
conf = {
'plugins': ['health check'],
'health check': {
'interval': 1234
}
}
assert load(conf) == {
CoroTypes.Periodic: {
('health check', 'health_check'): hc_during
},
'state': {
'health check': {
}
},
'config': conf,
'timings': {
('health check', 'health_check'): deque([], maxlen=1)
}
}
def test_wrapper_calls_function():
"""Ensure the wrapper calls coroutines."""
flag = False
async def test_coro(arg1, arg2):
assert arg1 == 'arg1'
assert arg2 == 'arg2'
nonlocal flag
flag = True
coro = _wrapper(
test_coro,
'arg1',
'arg2'
)
asyncio.run(coro)
assert flag
def test_wrapper_logging(caplog):
"""Ensure the wrapper logs each coroutine call."""
async def test_coro(*_):
pass
coro = _wrapper(
test_coro,
None,
None
)
with caplog.at_level(logging.DEBUG, logger=CYLC_LOG):
asyncio.run(coro)
assert len(caplog.record_tuples) == 2
(
(run_log, run_level, run_msg),
(end_log, end_level, end_msg)
) = caplog.record_tuples
# we should have two messages, one sent before and one after
# the function
assert 'run' in run_msg
assert 'end' in end_msg
# both should contain the name of the function
assert 'test_coro' in run_msg
assert 'test_coro' in end_msg
# and should be sent to the cylc logger at the debug level
assert run_log == end_log == CYLC_LOG
assert run_level == end_level == logging.DEBUG
def test_wrapper_catches_exceptions(caplog):
"""Ensure the wrapper catches Exception instances and logs them."""
async def test_coro(*_):
raise Exception('foo')
coro = _wrapper(
test_coro,
None,
None
)
with caplog.at_level(logging.DEBUG, logger=CYLC_LOG):
asyncio.run(coro)
assert len(caplog.record_tuples) == 4
run, error, traceback, completed = caplog.record_tuples
assert 'run' in run[2]
assert error[1] == logging.ERROR
assert traceback[1] == logging.ERROR
assert 'foo' in traceback[2]
assert completed[1] == logging.DEBUG
def test_wrapper_passes_cylc_error():
"""Ensure the wrapper does not catch CylcError instances."""
async def test_coro(*_):
raise CylcError('foo')
coro = _wrapper(
test_coro,
None,
None
)
with pytest.raises(MainLoopPluginException):
asyncio.run(coro)
@pytest.fixture
def basic_plugins():
calls = []
def capture(*args):
nonlocal calls
calls.append(args)
plugins = {
'config': {
'periodic plugin': {
'interval': 10
}
},
'timings': {
('periodic plugin', 'periodic_coro'): [],
('startup plugin', 'startup_coro'): [],
},
'state': {
'periodic plugin': {
'a': 1
},
'startup plugin': {
'b': 2
}
},
CoroTypes.Periodic: {
('periodic plugin', 'periodic_coro'): capture
},
CoroTypes.StartUp: {
('startup plugin', 'startup_coro'): capture
}
}
return (plugins, calls, capture)
def test_get_runners_startup(basic_plugins):
"""IT should return runners for startup functions."""
plugins, calls, capture = basic_plugins
runners = get_runners(
plugins,
CoroTypes.StartUp,
'scheduler object'
)
assert len(runners) == 1
asyncio.run(runners[0])
assert calls == [('scheduler object', {'b': 2})]
def test_get_runners_periodic(basic_plugins):
"""It should return runners for periodic functions."""
plugins, calls, capture = basic_plugins
runners = get_runners(
plugins,
CoroTypes.Periodic,
'scheduler object'
)
assert len(runners) == 1
asyncio.run(runners[0])
assert calls == [('scheduler object', {'a': 1})]
def test_get_runners_periodic_debounce(basic_plugins):
"""It should run periodic functions based on the configured interval."""
plugins, calls, capture = basic_plugins
# we should start with a blank timings object
assert len(plugins['timings'][('periodic plugin', 'periodic_coro')]) == 0
runners = get_runners(
plugins,
CoroTypes.Periodic,
'scheduler object'
)
assert len(runners) == 1
asyncio.run(runners[0])
assert calls == [('scheduler object', {'a': 1})]
# the timings object should now contain the previous run
assert len(plugins['timings'][('periodic plugin', 'periodic_coro')]) == 1
# the next run should be skipped because of the interval
runners = get_runners(
plugins,
CoroTypes.Periodic,
'scheduler object'
)
assert len(runners) == 0
# if we remove the interval the next run will not get skipped
plugins['config']['periodic plugin']['interval'] = 0
runners = get_runners(
plugins,
CoroTypes.Periodic,
'scheduler object'
)
assert len(runners) == 1
assert calls[-1] == ('scheduler object', {'a': 1})
def test_state(basic_plugins):
"""It should pass the same state object with each function call.
* Run the same plugin function twice.
* Ensure that the state object recieved by each call is the same object.
"""
plugins, calls, capture = basic_plugins
runners = get_runners(
plugins,
CoroTypes.StartUp,
'scheduler object'
)
assert len(runners) == 1
asyncio.run(*runners)
assert len(calls) == 1
runners = get_runners(
plugins,
CoroTypes.StartUp,
'scheduler object'
)
assert len(runners) == 1
asyncio.run(*runners)
assert len(calls) == 2
(_, state1), (_, state2) = calls
assert id(state1) == id(state2)
| gpl-3.0 |
rwillmer/django | django/db/backends/postgresql_psycopg2/schema.py | 84 | 3891 | import psycopg2
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s USING %(column)s::%(type)s"
sql_create_sequence = "CREATE SEQUENCE %(sequence)s"
sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE"
sql_set_sequence_max = "SELECT setval('%(sequence)s', MAX(%(column)s)) FROM %(table)s"
sql_create_varchar_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s varchar_pattern_ops)%(extra)s"
sql_create_text_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s text_pattern_ops)%(extra)s"
def quote_value(self, value):
return psycopg2.extensions.adapt(value)
def _model_indexes_sql(self, model):
output = super(DatabaseSchemaEditor, self)._model_indexes_sql(model)
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return output
for field in model._meta.local_fields:
db_type = field.db_type(connection=self.connection)
if db_type is not None and (field.db_index or field.unique):
# Fields with database column types of `varchar` and `text` need
# a second index that specifies their operator class, which is
# needed when performing correct LIKE queries outside the
# C locale. See #12234.
if db_type.startswith('varchar'):
output.append(self._create_index_sql(
model, [field], suffix='_like', sql=self.sql_create_varchar_index))
elif db_type.startswith('text'):
output.append(self._create_index_sql(
model, [field], suffix='_like', sql=self.sql_create_text_index))
return output
def _alter_column_type_sql(self, table, old_field, new_field, new_type):
"""
Makes ALTER TYPE with SERIAL make sense.
"""
if new_type.lower() == "serial":
column = new_field.column
sequence_name = "%s_%s_seq" % (table, column)
return (
(
self.sql_alter_column_type % {
"column": self.quote_name(column),
"type": "integer",
},
[],
),
[
(
self.sql_delete_sequence % {
"sequence": self.quote_name(sequence_name),
},
[],
),
(
self.sql_create_sequence % {
"sequence": self.quote_name(sequence_name),
},
[],
),
(
self.sql_alter_column % {
"table": self.quote_name(table),
"changes": self.sql_alter_column_default % {
"column": self.quote_name(column),
"default": "nextval('%s')" % self.quote_name(sequence_name),
}
},
[],
),
(
self.sql_set_sequence_max % {
"table": self.quote_name(table),
"column": self.quote_name(column),
"sequence": self.quote_name(sequence_name),
},
[],
),
],
)
else:
return super(DatabaseSchemaEditor, self)._alter_column_type_sql(
table, old_field, new_field, new_type
)
| bsd-3-clause |
mcardillo55/django | django/http/request.py | 50 | 19501 | from __future__ import unicode_literals
import copy
import re
import sys
from io import BytesIO
from itertools import chain
from django.conf import settings
from django.core import signing
from django.core.exceptions import DisallowedHost, ImproperlyConfigured
from django.core.files import uploadhandler
from django.http.multipartparser import MultiPartParser, MultiPartParserError
from django.utils import six
from django.utils.datastructures import ImmutableList, MultiValueDict
from django.utils.encoding import (
escape_uri_path, force_bytes, force_str, force_text, iri_to_uri,
)
from django.utils.six.moves.urllib.parse import (
parse_qsl, quote, urlencode, urljoin, urlsplit,
)
RAISE_ERROR = object()
host_validation_re = re.compile(r"^([a-z0-9.-]+|\[[a-f0-9]*:[a-f0-9:]+\])(:\d+)?$")
class UnreadablePostError(IOError):
pass
class RawPostDataException(Exception):
"""
You cannot access raw_post_data from a request that has
multipart/* POST data if it has been accessed via POST,
FILES, etc..
"""
pass
class HttpRequest(object):
"""A basic HTTP request."""
# The encoding used in GET/POST dicts. None means use default setting.
_encoding = None
_upload_handlers = []
def __init__(self):
# WARNING: The `WSGIRequest` subclass doesn't call `super`.
# Any variable assignment made here should also happen in
# `WSGIRequest.__init__()`.
self.GET = QueryDict(mutable=True)
self.POST = QueryDict(mutable=True)
self.COOKIES = {}
self.META = {}
self.FILES = MultiValueDict()
self.path = ''
self.path_info = ''
self.method = None
self.resolver_match = None
self._post_parse_error = False
def __repr__(self):
if self.method is None or not self.get_full_path():
return force_str('<%s>' % self.__class__.__name__)
return force_str(
'<%s: %s %r>' % (self.__class__.__name__, self.method, force_str(self.get_full_path()))
)
def get_host(self):
"""Returns the HTTP host using the environment or request headers."""
# We try three options, in order of decreasing preference.
if settings.USE_X_FORWARDED_HOST and (
'HTTP_X_FORWARDED_HOST' in self.META):
host = self.META['HTTP_X_FORWARDED_HOST']
elif 'HTTP_HOST' in self.META:
host = self.META['HTTP_HOST']
else:
# Reconstruct the host using the algorithm from PEP 333.
host = self.META['SERVER_NAME']
server_port = str(self.META['SERVER_PORT'])
if server_port != ('443' if self.is_secure() else '80'):
host = '%s:%s' % (host, server_port)
# There is no hostname validation when DEBUG=True
if settings.DEBUG:
return host
domain, port = split_domain_port(host)
if domain and validate_host(domain, settings.ALLOWED_HOSTS):
return host
else:
msg = "Invalid HTTP_HOST header: %r." % host
if domain:
msg += " You may need to add %r to ALLOWED_HOSTS." % domain
else:
msg += " The domain name provided is not valid according to RFC 1034/1035."
raise DisallowedHost(msg)
def get_full_path(self, force_append_slash=False):
# RFC 3986 requires query string arguments to be in the ASCII range.
# Rather than crash if this doesn't happen, we encode defensively.
return '%s%s%s' % (
escape_uri_path(self.path),
'/' if force_append_slash and not self.path.endswith('/') else '',
('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))) if self.META.get('QUERY_STRING', '') else ''
)
def get_signed_cookie(self, key, default=RAISE_ERROR, salt='', max_age=None):
"""
Attempts to return a signed cookie. If the signature fails or the
cookie has expired, raises an exception... unless you provide the
default argument in which case that value will be returned instead.
"""
try:
cookie_value = self.COOKIES[key]
except KeyError:
if default is not RAISE_ERROR:
return default
else:
raise
try:
value = signing.get_cookie_signer(salt=key + salt).unsign(
cookie_value, max_age=max_age)
except signing.BadSignature:
if default is not RAISE_ERROR:
return default
else:
raise
return value
def build_absolute_uri(self, location=None):
"""
Builds an absolute URI from the location and the variables available in
this request. If no ``location`` is specified, the absolute URI is
built on ``request.get_full_path()``. Anyway, if the location is
absolute, it is simply converted to an RFC 3987 compliant URI and
returned and if location is relative or is scheme-relative (i.e.,
``//example.com/``), it is urljoined to a base URL constructed from the
request variables.
"""
if location is None:
# Make it an absolute url (but schemeless and domainless) for the
# edge case that the path starts with '//'.
location = '//%s' % self.get_full_path()
bits = urlsplit(location)
if not (bits.scheme and bits.netloc):
current_uri = '{scheme}://{host}{path}'.format(scheme=self.scheme,
host=self.get_host(),
path=self.path)
# Join the constructed URL with the provided location, which will
# allow the provided ``location`` to apply query strings to the
# base path as well as override the host, if it begins with //
location = urljoin(current_uri, location)
return iri_to_uri(location)
def _get_scheme(self):
"""
Hook for subclasses like WSGIRequest to implement. Returns 'http' by
default.
"""
return 'http'
@property
def scheme(self):
if settings.SECURE_PROXY_SSL_HEADER:
try:
header, value = settings.SECURE_PROXY_SSL_HEADER
except ValueError:
raise ImproperlyConfigured(
'The SECURE_PROXY_SSL_HEADER setting must be a tuple containing two values.'
)
if self.META.get(header) == value:
return 'https'
return self._get_scheme()
def is_secure(self):
return self.scheme == 'https'
def is_ajax(self):
return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest'
@property
def encoding(self):
return self._encoding
@encoding.setter
def encoding(self, val):
"""
Sets the encoding used for GET/POST accesses. If the GET or POST
dictionary has already been created, it is removed and recreated on the
next access (so that it is decoded correctly).
"""
self._encoding = val
if hasattr(self, '_get'):
del self._get
if hasattr(self, '_post'):
del self._post
def _initialize_handlers(self):
self._upload_handlers = [uploadhandler.load_handler(handler, self)
for handler in settings.FILE_UPLOAD_HANDLERS]
@property
def upload_handlers(self):
if not self._upload_handlers:
# If there are no upload handlers defined, initialize them from settings.
self._initialize_handlers()
return self._upload_handlers
@upload_handlers.setter
def upload_handlers(self, upload_handlers):
if hasattr(self, '_files'):
raise AttributeError("You cannot set the upload handlers after the upload has been processed.")
self._upload_handlers = upload_handlers
def parse_file_upload(self, META, post_data):
"""Returns a tuple of (POST QueryDict, FILES MultiValueDict)."""
self.upload_handlers = ImmutableList(
self.upload_handlers,
warning="You cannot alter upload handlers after the upload has been processed."
)
parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding)
return parser.parse()
@property
def body(self):
if not hasattr(self, '_body'):
if self._read_started:
raise RawPostDataException("You cannot access body after reading from request's data stream")
try:
self._body = self.read()
except IOError as e:
six.reraise(UnreadablePostError, UnreadablePostError(*e.args), sys.exc_info()[2])
self._stream = BytesIO(self._body)
return self._body
def _mark_post_parse_error(self):
self._post = QueryDict('')
self._files = MultiValueDict()
self._post_parse_error = True
def _load_post_and_files(self):
"""Populate self._post and self._files if the content-type is a form type"""
if self.method != 'POST':
self._post, self._files = QueryDict('', encoding=self._encoding), MultiValueDict()
return
if self._read_started and not hasattr(self, '_body'):
self._mark_post_parse_error()
return
if self.META.get('CONTENT_TYPE', '').startswith('multipart/form-data'):
if hasattr(self, '_body'):
# Use already read data
data = BytesIO(self._body)
else:
data = self
try:
self._post, self._files = self.parse_file_upload(self.META, data)
except MultiPartParserError:
# An error occurred while parsing POST data. Since when
# formatting the error the request handler might access
# self.POST, set self._post and self._file to prevent
# attempts to parse POST data again.
# Mark that an error occurred. This allows self.__repr__ to
# be explicit about it instead of simply representing an
# empty POST
self._mark_post_parse_error()
raise
elif self.META.get('CONTENT_TYPE', '').startswith('application/x-www-form-urlencoded'):
self._post, self._files = QueryDict(self.body, encoding=self._encoding), MultiValueDict()
else:
self._post, self._files = QueryDict('', encoding=self._encoding), MultiValueDict()
def close(self):
if hasattr(self, '_files'):
for f in chain.from_iterable(l[1] for l in self._files.lists()):
f.close()
# File-like and iterator interface.
#
# Expects self._stream to be set to an appropriate source of bytes by
# a corresponding request subclass (e.g. WSGIRequest).
# Also when request data has already been read by request.POST or
# request.body, self._stream points to a BytesIO instance
# containing that data.
def read(self, *args, **kwargs):
self._read_started = True
try:
return self._stream.read(*args, **kwargs)
except IOError as e:
six.reraise(UnreadablePostError, UnreadablePostError(*e.args), sys.exc_info()[2])
def readline(self, *args, **kwargs):
self._read_started = True
try:
return self._stream.readline(*args, **kwargs)
except IOError as e:
six.reraise(UnreadablePostError, UnreadablePostError(*e.args), sys.exc_info()[2])
def xreadlines(self):
while True:
buf = self.readline()
if not buf:
break
yield buf
__iter__ = xreadlines
def readlines(self):
return list(iter(self))
class QueryDict(MultiValueDict):
"""
A specialized MultiValueDict which represents a query string.
A QueryDict can be used to represent GET or POST data. It subclasses
MultiValueDict since keys in such data can be repeated, for instance
in the data from a form with a <select multiple> field.
By default QueryDicts are immutable, though the copy() method
will always return a mutable copy.
Both keys and values set on this class are converted from the given encoding
(DEFAULT_CHARSET by default) to unicode.
"""
# These are both reset in __init__, but is specified here at the class
# level so that unpickling will have valid values
_mutable = True
_encoding = None
def __init__(self, query_string=None, mutable=False, encoding=None):
super(QueryDict, self).__init__()
if not encoding:
encoding = settings.DEFAULT_CHARSET
self.encoding = encoding
if six.PY3:
if isinstance(query_string, bytes):
# query_string normally contains URL-encoded data, a subset of ASCII.
try:
query_string = query_string.decode(encoding)
except UnicodeDecodeError:
# ... but some user agents are misbehaving :-(
query_string = query_string.decode('iso-8859-1')
for key, value in parse_qsl(query_string or '',
keep_blank_values=True,
encoding=encoding):
self.appendlist(key, value)
else:
for key, value in parse_qsl(query_string or '',
keep_blank_values=True):
try:
value = value.decode(encoding)
except UnicodeDecodeError:
value = value.decode('iso-8859-1')
self.appendlist(force_text(key, encoding, errors='replace'),
value)
self._mutable = mutable
@property
def encoding(self):
if self._encoding is None:
self._encoding = settings.DEFAULT_CHARSET
return self._encoding
@encoding.setter
def encoding(self, value):
self._encoding = value
def _assert_mutable(self):
if not self._mutable:
raise AttributeError("This QueryDict instance is immutable")
def __setitem__(self, key, value):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
value = bytes_to_text(value, self.encoding)
super(QueryDict, self).__setitem__(key, value)
def __delitem__(self, key):
self._assert_mutable()
super(QueryDict, self).__delitem__(key)
def __copy__(self):
result = self.__class__('', mutable=True, encoding=self.encoding)
for key, value in six.iterlists(self):
result.setlist(key, value)
return result
def __deepcopy__(self, memo):
result = self.__class__('', mutable=True, encoding=self.encoding)
memo[id(self)] = result
for key, value in six.iterlists(self):
result.setlist(copy.deepcopy(key, memo), copy.deepcopy(value, memo))
return result
def setlist(self, key, list_):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
list_ = [bytes_to_text(elt, self.encoding) for elt in list_]
super(QueryDict, self).setlist(key, list_)
def setlistdefault(self, key, default_list=None):
self._assert_mutable()
return super(QueryDict, self).setlistdefault(key, default_list)
def appendlist(self, key, value):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
value = bytes_to_text(value, self.encoding)
super(QueryDict, self).appendlist(key, value)
def pop(self, key, *args):
self._assert_mutable()
return super(QueryDict, self).pop(key, *args)
def popitem(self):
self._assert_mutable()
return super(QueryDict, self).popitem()
def clear(self):
self._assert_mutable()
super(QueryDict, self).clear()
def setdefault(self, key, default=None):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
default = bytes_to_text(default, self.encoding)
return super(QueryDict, self).setdefault(key, default)
def copy(self):
"""Returns a mutable copy of this object."""
return self.__deepcopy__({})
def urlencode(self, safe=None):
"""
Returns an encoded string of all query string arguments.
:arg safe: Used to specify characters which do not require quoting, for
example::
>>> q = QueryDict('', mutable=True)
>>> q['next'] = '/a&b/'
>>> q.urlencode()
'next=%2Fa%26b%2F'
>>> q.urlencode(safe='/')
'next=/a%26b/'
"""
output = []
if safe:
safe = force_bytes(safe, self.encoding)
encode = lambda k, v: '%s=%s' % ((quote(k, safe), quote(v, safe)))
else:
encode = lambda k, v: urlencode({k: v})
for k, list_ in self.lists():
k = force_bytes(k, self.encoding)
output.extend(encode(k, force_bytes(v, self.encoding))
for v in list_)
return '&'.join(output)
# It's neither necessary nor appropriate to use
# django.utils.encoding.smart_text for parsing URLs and form inputs. Thus,
# this slightly more restricted function, used by QueryDict.
def bytes_to_text(s, encoding):
"""
Converts basestring objects to unicode, using the given encoding. Illegally
encoded input characters are replaced with Unicode "unknown" codepoint
(\ufffd).
Returns any non-basestring objects without change.
"""
if isinstance(s, bytes):
return six.text_type(s, encoding, 'replace')
else:
return s
def split_domain_port(host):
"""
Return a (domain, port) tuple from a given host.
Returned domain is lower-cased. If the host is invalid, the domain will be
empty.
"""
host = host.lower()
if not host_validation_re.match(host):
return '', ''
if host[-1] == ']':
# It's an IPv6 address without a port.
return host, ''
bits = host.rsplit(':', 1)
if len(bits) == 2:
return tuple(bits)
return bits[0], ''
def validate_host(host, allowed_hosts):
"""
Validate the given host for this site.
Check that the host looks valid and matches a host or host pattern in the
given list of ``allowed_hosts``. Any pattern beginning with a period
matches a domain and all its subdomains (e.g. ``.example.com`` matches
``example.com`` and any subdomain), ``*`` matches anything, and anything
else must match exactly.
Note: This function assumes that the given host is lower-cased and has
already had the port, if any, stripped off.
Return ``True`` for a valid host, ``False`` otherwise.
"""
host = host[:-1] if host.endswith('.') else host
for pattern in allowed_hosts:
pattern = pattern.lower()
match = (
pattern == '*' or
pattern.startswith('.') and (
host.endswith(pattern) or host == pattern[1:]
) or
pattern == host
)
if match:
return True
return False
| bsd-3-clause |
ifuding/Kaggle | ADDC/Code/BarisKanber.py | 3 | 14070 | """
A non-blending lightGBM model that incorporates portions and ideas from various public kernels
This kernel gives LB: 0.977 when the parameter 'debug' below is set to 0 but this implementation requires a machine with ~32 GB of memory
"""
import pandas as pd
import time
import numpy as np
from sklearn.cross_validation import train_test_split
import lightgbm as lgb
import gc
import matplotlib.pyplot as plt
import os
debug=1
if debug:
print('*** debug parameter set: this is a test run for debugging purposes ***')
def lgb_modelfit_nocv(params, dtrain, dvalid, predictors, target='target', objective='binary', metrics='auc',
feval=None, early_stopping_rounds=20, num_boost_round=3000, verbose_eval=10, categorical_features=None):
lgb_params = {
'boosting_type': 'gbdt',
'objective': objective,
'metric':metrics,
'learning_rate': 0.2,
#'is_unbalance': 'true', #because training data is unbalance (replaced with scale_pos_weight)
'num_leaves': 31, # we should let it be smaller than 2^(max_depth)
'max_depth': -1, # -1 means no limit
'min_child_samples': 20, # Minimum number of data need in a child(min_data_in_leaf)
'max_bin': 255, # Number of bucketed bin for feature values
'subsample': 0.6, # Subsample ratio of the training instance.
'subsample_freq': 0, # frequence of subsample, <=0 means no enable
'colsample_bytree': 0.3, # Subsample ratio of columns when constructing each tree.
'min_child_weight': 5, # Minimum sum of instance weight(hessian) needed in a child(leaf)
'subsample_for_bin': 200000, # Number of samples for constructing bin
'min_split_gain': 0, # lambda_l1, lambda_l2 and min_gain_to_split to regularization
'reg_alpha': 0, # L1 regularization term on weights
'reg_lambda': 0, # L2 regularization term on weights
'nthread': 4,
'verbose': 0,
'metric':metrics
}
lgb_params.update(params)
print("preparing validation datasets")
xgtrain = lgb.Dataset(dtrain[predictors].values, label=dtrain[target].values,
feature_name=predictors,
categorical_feature=categorical_features
)
xgvalid = lgb.Dataset(dvalid[predictors].values, label=dvalid[target].values,
feature_name=predictors,
categorical_feature=categorical_features
)
evals_results = {}
bst1 = lgb.train(lgb_params,
xgtrain,
valid_sets=[xgtrain, xgvalid],
valid_names=['train','valid'],
evals_result=evals_results,
num_boost_round=num_boost_round,
early_stopping_rounds=early_stopping_rounds,
verbose_eval=10,
feval=feval)
print("\nModel Report")
print("bst1.best_iteration: ", bst1.best_iteration)
print(metrics+":", evals_results['valid'][metrics][bst1.best_iteration-1])
return (bst1,bst1.best_iteration)
def DO(frm,to,fileno):
dtypes = {
'ip' : 'uint32',
'app' : 'uint16',
'device' : 'uint16',
'os' : 'uint16',
'channel' : 'uint16',
'is_attributed' : 'uint8',
'click_id' : 'uint32',
}
print('loading train data...',frm,to)
train_df = pd.read_csv("../input/train.csv", parse_dates=['click_time'], skiprows=range(1,frm), nrows=to-frm, dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'is_attributed'])
print('loading test data...')
if debug:
test_df = pd.read_csv("../input/test.csv", nrows=100000, parse_dates=['click_time'], dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'click_id'])
else:
test_df = pd.read_csv("../input/test.csv", parse_dates=['click_time'], dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'click_id'])
len_train = len(train_df)
train_df=train_df.append(test_df)
del test_df
gc.collect()
print('Extracting new features...')
train_df['hour'] = pd.to_datetime(train_df.click_time).dt.hour.astype('uint8')
train_df['day'] = pd.to_datetime(train_df.click_time).dt.day.astype('uint8')
gc.collect()
naddfeat=9
for i in range(0,naddfeat):
if i==0: selcols=['ip', 'channel']; QQ=4;
if i==1: selcols=['ip', 'device', 'os', 'app']; QQ=5;
if i==2: selcols=['ip', 'day', 'hour']; QQ=4;
if i==3: selcols=['ip', 'app']; QQ=4;
if i==4: selcols=['ip', 'app', 'os']; QQ=4;
if i==5: selcols=['ip', 'device']; QQ=4;
if i==6: selcols=['app', 'channel']; QQ=4;
if i==7: selcols=['ip', 'os']; QQ=5;
if i==8: selcols=['ip', 'device', 'os', 'app']; QQ=4;
print('selcols',selcols,'QQ',QQ)
filename='X%d_%d_%d.csv'%(i,frm,to)
if os.path.exists(filename):
if QQ==5:
gp=pd.read_csv(filename,header=None)
train_df['X'+str(i)]=gp
else:
gp=pd.read_csv(filename)
train_df = train_df.merge(gp, on=selcols[0:len(selcols)-1], how='left')
else:
if QQ==0:
gp = train_df[selcols].groupby(by=selcols[0:len(selcols)-1])[selcols[len(selcols)-1]].count().reset_index().\
rename(index=str, columns={selcols[len(selcols)-1]: 'X'+str(i)})
train_df = train_df.merge(gp, on=selcols[0:len(selcols)-1], how='left')
if QQ==1:
gp = train_df[selcols].groupby(by=selcols[0:len(selcols)-1])[selcols[len(selcols)-1]].mean().reset_index().\
rename(index=str, columns={selcols[len(selcols)-1]: 'X'+str(i)})
train_df = train_df.merge(gp, on=selcols[0:len(selcols)-1], how='left')
if QQ==2:
gp = train_df[selcols].groupby(by=selcols[0:len(selcols)-1])[selcols[len(selcols)-1]].var().reset_index().\
rename(index=str, columns={selcols[len(selcols)-1]: 'X'+str(i)})
train_df = train_df.merge(gp, on=selcols[0:len(selcols)-1], how='left')
if QQ==3:
gp = train_df[selcols].groupby(by=selcols[0:len(selcols)-1])[selcols[len(selcols)-1]].skew().reset_index().\
rename(index=str, columns={selcols[len(selcols)-1]: 'X'+str(i)})
train_df = train_df.merge(gp, on=selcols[0:len(selcols)-1], how='left')
if QQ==4:
gp = train_df[selcols].groupby(by=selcols[0:len(selcols)-1])[selcols[len(selcols)-1]].nunique().reset_index().\
rename(index=str, columns={selcols[len(selcols)-1]: 'X'+str(i)})
train_df = train_df.merge(gp, on=selcols[0:len(selcols)-1], how='left')
if QQ==5:
gp = train_df[selcols].groupby(by=selcols[0:len(selcols)-1])[selcols[len(selcols)-1]].cumcount()
train_df['X'+str(i)]=gp.values
if not debug:
gp.to_csv(filename,index=False)
del gp
gc.collect()
print('doing nextClick')
predictors=[]
new_feature = 'nextClick'
filename='nextClick_%d_%d.csv'%(frm,to)
if os.path.exists(filename):
print('loading from save file')
QQ=pd.read_csv(filename).values
else:
D=2**26
train_df['category'] = (train_df['ip'].astype(str) + "_" + train_df['app'].astype(str) + "_" + train_df['device'].astype(str) \
+ "_" + train_df['os'].astype(str)).apply(hash) % D
click_buffer= np.full(D, 3000000000, dtype=np.uint32)
train_df['epochtime']= train_df['click_time'].astype(np.int64) // 10 ** 9
next_clicks= []
for category, t in zip(reversed(train_df['category'].values), reversed(train_df['epochtime'].values)):
next_clicks.append(click_buffer[category]-t)
click_buffer[category]= t
del(click_buffer)
QQ= list(reversed(next_clicks))
if not debug:
print('saving')
pd.DataFrame(QQ).to_csv(filename,index=False)
train_df[new_feature] = QQ
predictors.append(new_feature)
train_df[new_feature+'_shift'] = pd.DataFrame(QQ).shift(+1).values
predictors.append(new_feature+'_shift')
del QQ
gc.collect()
print('grouping by ip-day-hour combination...')
gp = train_df[['ip','day','hour','channel']].groupby(by=['ip','day','hour'])[['channel']].count().reset_index().rename(index=str, columns={'channel': 'ip_tcount'})
train_df = train_df.merge(gp, on=['ip','day','hour'], how='left')
del gp
gc.collect()
print('grouping by ip-app combination...')
gp = train_df[['ip', 'app', 'channel']].groupby(by=['ip', 'app'])[['channel']].count().reset_index().rename(index=str, columns={'channel': 'ip_app_count'})
train_df = train_df.merge(gp, on=['ip','app'], how='left')
del gp
gc.collect()
print('grouping by ip-app-os combination...')
gp = train_df[['ip','app', 'os', 'channel']].groupby(by=['ip', 'app', 'os'])[['channel']].count().reset_index().rename(index=str, columns={'channel': 'ip_app_os_count'})
train_df = train_df.merge(gp, on=['ip','app', 'os'], how='left')
del gp
gc.collect()
# Adding features with var and mean hour (inspired from nuhsikander's script)
print('grouping by : ip_day_chl_var_hour')
gp = train_df[['ip','day','hour','channel']].groupby(by=['ip','day','channel'])[['hour']].var().reset_index().rename(index=str, columns={'hour': 'ip_tchan_count'})
train_df = train_df.merge(gp, on=['ip','day','channel'], how='left')
del gp
gc.collect()
print('grouping by : ip_app_os_var_hour')
gp = train_df[['ip','app', 'os', 'hour']].groupby(by=['ip', 'app', 'os'])[['hour']].var().reset_index().rename(index=str, columns={'hour': 'ip_app_os_var'})
train_df = train_df.merge(gp, on=['ip','app', 'os'], how='left')
del gp
gc.collect()
print('grouping by : ip_app_channel_var_day')
gp = train_df[['ip','app', 'channel', 'day']].groupby(by=['ip', 'app', 'channel'])[['day']].var().reset_index().rename(index=str, columns={'day': 'ip_app_channel_var_day'})
train_df = train_df.merge(gp, on=['ip','app', 'channel'], how='left')
del gp
gc.collect()
print('grouping by : ip_app_chl_mean_hour')
gp = train_df[['ip','app', 'channel','hour']].groupby(by=['ip', 'app', 'channel'])[['hour']].mean().reset_index().rename(index=str, columns={'hour': 'ip_app_channel_mean_hour'})
print("merging...")
train_df = train_df.merge(gp, on=['ip','app', 'channel'], how='left')
del gp
gc.collect()
print("vars and data type: ")
train_df.info()
train_df['ip_tcount'] = train_df['ip_tcount'].astype('uint16')
train_df['ip_app_count'] = train_df['ip_app_count'].astype('uint16')
train_df['ip_app_os_count'] = train_df['ip_app_os_count'].astype('uint16')
target = 'is_attributed'
predictors.extend(['app','device','os', 'channel', 'hour', 'day',
'ip_tcount', 'ip_tchan_count', 'ip_app_count',
'ip_app_os_count', 'ip_app_os_var',
'ip_app_channel_var_day','ip_app_channel_mean_hour'])
categorical = ['app', 'device', 'os', 'channel', 'hour', 'day']
for i in range(0,naddfeat):
predictors.append('X'+str(i))
print('predictors',predictors)
test_df = train_df[len_train:]
val_df = train_df[(len_train-val_size):len_train]
train_df = train_df[:(len_train-val_size)]
print("train size: ", len(train_df))
print("valid size: ", len(val_df))
print("test size : ", len(test_df))
sub = pd.DataFrame()
sub['click_id'] = test_df['click_id'].astype('int')
gc.collect()
print("Training...")
start_time = time.time()
params = {
'learning_rate': 0.20,
#'is_unbalance': 'true', # replaced with scale_pos_weight argument
'num_leaves': 7, # 2^max_depth - 1
'max_depth': 3, # -1 means no limit
'min_child_samples': 100, # Minimum number of data need in a child(min_data_in_leaf)
'max_bin': 100, # Number of bucketed bin for feature values
'subsample': 0.7, # Subsample ratio of the training instance.
'subsample_freq': 1, # frequence of subsample, <=0 means no enable
'colsample_bytree': 0.9, # Subsample ratio of columns when constructing each tree.
'min_child_weight': 0, # Minimum sum of instance weight(hessian) needed in a child(leaf)
'scale_pos_weight':200 # because training data is extremely unbalanced
}
(bst,best_iteration) = lgb_modelfit_nocv(params,
train_df,
val_df,
predictors,
target,
objective='binary',
metrics='auc',
early_stopping_rounds=30,
verbose_eval=True,
num_boost_round=1000,
categorical_features=categorical)
print('[{}]: model training time'.format(time.time() - start_time))
del train_df
del val_df
gc.collect()
print('Plot feature importances...')
ax = lgb.plot_importance(bst, max_num_features=100)
plt.show()
print("Predicting...")
sub['is_attributed'] = bst.predict(test_df[predictors],num_iteration=best_iteration)
if not debug:
print("writing...")
sub.to_csv('sub_it%d.csv.gz'%(fileno),index=False,compression='gzip')
print("done...")
return sub
nrows=184903891-1
nchunk=40000000
val_size=2500000
frm=nrows-75000000
if debug:
frm=0
nchunk=100000
val_size=10000
to=frm+nchunk
sub=DO(frm,to,0)
| apache-2.0 |
DmitryADP/diff_qc750 | external/webkit/Tools/Scripts/webkitpy/tool/steps/applypatch.py | 15 | 2139 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.tool.steps.abstractstep import AbstractStep
from webkitpy.tool.steps.options import Options
from webkitpy.common.system.deprecated_logging import log
class ApplyPatch(AbstractStep):
@classmethod
def options(cls):
return AbstractStep.options() + [
Options.non_interactive,
Options.force_patch,
]
def run(self, state):
log("Processing patch %s from bug %s." % (state["patch"].id(), state["patch"].bug_id()))
self._tool.checkout().apply_patch(state["patch"], force=self._options.non_interactive or self._options.force_patch)
| gpl-2.0 |
alirizakeles/zato | code/zato-web-admin/src/zato/admin/static/brython/_brython/Lib/select.py | 9 | 9708 | """
borrowed from jython
https://bitbucket.org/jython/jython/raw/28a66ba038620292520470a0bb4dc9bb8ac2e403/Lib/select.py
"""
#import java.nio.channels.SelectableChannel
#import java.nio.channels.SelectionKey
#import java.nio.channels.Selector
#from java.nio.channels.SelectionKey import OP_ACCEPT, OP_CONNECT, OP_WRITE, OP_READ
import errno
import os
import queue
import socket
class error(Exception): pass
ALL = None
_exception_map = {
# (<javaexception>, <circumstance>) : lambda: <code that raises the python equivalent>
#(java.nio.channels.ClosedChannelException, ALL) : error(errno.ENOTCONN, 'Socket is not connected'),
#(java.nio.channels.CancelledKeyException, ALL) : error(errno.ENOTCONN, 'Socket is not connected'),
#(java.nio.channels.IllegalBlockingModeException, ALL) : error(errno.ESOCKISBLOCKING, 'socket must be in non-blocking mode'),
}
def _map_exception(exc, circumstance=ALL):
try:
mapped_exception = _exception_map[(exc.__class__, circumstance)]
mapped_exception.java_exception = exc
return mapped_exception
except KeyError:
return error(-1, 'Unmapped java exception: <%s:%s>' % (exc.toString(), circumstance))
POLLIN = 1
POLLOUT = 2
# The following event types are completely ignored on jython
# Java does not support them, AFAICT
# They are declared only to support code compatibility with cpython
POLLPRI = 4
POLLERR = 8
POLLHUP = 16
POLLNVAL = 32
def _getselectable(selectable_object):
try:
channel = selectable_object.getchannel()
except:
try:
channel = selectable_object.fileno().getChannel()
except:
raise TypeError("Object '%s' is not watchable" % selectable_object,
errno.ENOTSOCK)
if channel and not isinstance(channel, java.nio.channels.SelectableChannel):
raise TypeError("Object '%s' is not watchable" % selectable_object,
errno.ENOTSOCK)
return channel
class poll:
def __init__(self):
self.selector = java.nio.channels.Selector.open()
self.chanmap = {}
self.unconnected_sockets = []
def _register_channel(self, socket_object, channel, mask):
jmask = 0
if mask & POLLIN:
# Note that OP_READ is NOT a valid event on server socket channels.
if channel.validOps() & OP_ACCEPT:
jmask = OP_ACCEPT
else:
jmask = OP_READ
if mask & POLLOUT:
if channel.validOps() & OP_WRITE:
jmask |= OP_WRITE
if channel.validOps() & OP_CONNECT:
jmask |= OP_CONNECT
selectionkey = channel.register(self.selector, jmask)
self.chanmap[channel] = (socket_object, selectionkey)
def _check_unconnected_sockets(self):
temp_list = []
for socket_object, mask in self.unconnected_sockets:
channel = _getselectable(socket_object)
if channel is not None:
self._register_channel(socket_object, channel, mask)
else:
temp_list.append( (socket_object, mask) )
self.unconnected_sockets = temp_list
def register(self, socket_object, mask = POLLIN|POLLOUT|POLLPRI):
try:
channel = _getselectable(socket_object)
if channel is None:
# The socket is not yet connected, and thus has no channel
# Add it to a pending list, and return
self.unconnected_sockets.append( (socket_object, mask) )
return
self._register_channel(socket_object, channel, mask)
except BaseException:
#except java.lang.Exception, jlx:
raise _map_exception(jlx)
def unregister(self, socket_object):
try:
channel = _getselectable(socket_object)
self.chanmap[channel][1].cancel()
del self.chanmap[channel]
except BaseException:
#except java.lang.Exception, jlx:
raise _map_exception(jlx)
def _dopoll(self, timeout):
if timeout is None or timeout < 0:
self.selector.select()
else:
try:
timeout = int(timeout)
if not timeout:
self.selector.selectNow()
else:
# No multiplication required: both cpython and java use millisecond timeouts
self.selector.select(timeout)
except ValueError as vx:
raise error("poll timeout must be a number of milliseconds or None", errno.EINVAL)
# The returned selectedKeys cannot be used from multiple threads!
return self.selector.selectedKeys()
def poll(self, timeout=None):
try:
self._check_unconnected_sockets()
selectedkeys = self._dopoll(timeout)
results = []
for k in selectedkeys.iterator():
jmask = k.readyOps()
pymask = 0
if jmask & OP_READ: pymask |= POLLIN
if jmask & OP_WRITE: pymask |= POLLOUT
if jmask & OP_ACCEPT: pymask |= POLLIN
if jmask & OP_CONNECT: pymask |= POLLOUT
# Now return the original userobject, and the return event mask
results.append( (self.chanmap[k.channel()][0], pymask) )
return results
except BaseException:
#except java.lang.Exception, jlx:
raise _map_exception(jlx)
def _deregister_all(self):
try:
for k in self.selector.keys():
k.cancel()
# Keys are not actually removed from the selector until the next select operation.
self.selector.selectNow()
except BaseException:
#except java.lang.Exception, jlx:
raise _map_exception(jlx)
def close(self):
try:
self._deregister_all()
self.selector.close()
except BaseException:
#except java.lang.Exception, jlx:
raise _map_exception(jlx)
def _calcselecttimeoutvalue(value):
if value is None:
return None
try:
floatvalue = float(value)
except Exception as x:
raise TypeError("Select timeout value must be a number or None")
if value < 0:
raise error("Select timeout value cannot be negative", errno.EINVAL)
if floatvalue < 0.000001:
return 0
return int(floatvalue * 1000) # Convert to milliseconds
# This cache for poll objects is required because of a bug in java on MS Windows
# http://bugs.jython.org/issue1291
class poll_object_cache:
def __init__(self):
self.is_windows = os.name == 'nt'
if self.is_windows:
self.poll_object_queue = Queue.Queue()
import atexit
atexit.register(self.finalize)
def get_poll_object(self):
if not self.is_windows:
return poll()
try:
return self.poll_object_queue.get(False)
except Queue.Empty:
return poll()
def release_poll_object(self, pobj):
if self.is_windows:
pobj._deregister_all()
self.poll_object_queue.put(pobj)
else:
pobj.close()
def finalize(self):
if self.is_windows:
while True:
try:
p = self.poll_object_queue.get(False)
p.close()
except Queue.Empty:
return
_poll_object_cache = poll_object_cache()
def native_select(read_fd_list, write_fd_list, outofband_fd_list, timeout=None):
timeout = _calcselecttimeoutvalue(timeout)
# First create a poll object to do the actual watching.
pobj = _poll_object_cache.get_poll_object()
try:
registered_for_read = {}
# Check the read list
for fd in read_fd_list:
pobj.register(fd, POLLIN)
registered_for_read[fd] = 1
# And now the write list
for fd in write_fd_list:
if fd in registered_for_read:
# registering a second time overwrites the first
pobj.register(fd, POLLIN|POLLOUT)
else:
pobj.register(fd, POLLOUT)
results = pobj.poll(timeout)
# Now start preparing the results
read_ready_list, write_ready_list, oob_ready_list = [], [], []
for fd, mask in results:
if mask & POLLIN:
read_ready_list.append(fd)
if mask & POLLOUT:
write_ready_list.append(fd)
return read_ready_list, write_ready_list, oob_ready_list
finally:
_poll_object_cache.release_poll_object(pobj)
select = native_select
def cpython_compatible_select(read_fd_list, write_fd_list, outofband_fd_list, timeout=None):
# First turn all sockets to non-blocking
# keeping track of which ones have changed
modified_channels = []
try:
for socket_list in [read_fd_list, write_fd_list, outofband_fd_list]:
for s in socket_list:
channel = _getselectable(s)
if channel.isBlocking():
modified_channels.append(channel)
channel.configureBlocking(0)
return native_select(read_fd_list, write_fd_list, outofband_fd_list, timeout)
finally:
for channel in modified_channels:
channel.configureBlocking(1)
| gpl-3.0 |
praneethkumarpidugu/matchmaking | lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py | 61 | 2040 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import hashes
@utils.register_interface(hashes.HashContext)
class _HashContext(object):
def __init__(self, backend, algorithm, ctx=None):
self._algorithm = algorithm
self._backend = backend
if ctx is None:
try:
methods = self._backend._hash_mapping[self.algorithm.name]
except KeyError:
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
ctx = self._backend._ffi.new(methods.ctx)
res = methods.hash_init(ctx)
assert res == 1
self._ctx = ctx
algorithm = utils.read_only_property("_algorithm")
def copy(self):
methods = self._backend._hash_mapping[self.algorithm.name]
new_ctx = self._backend._ffi.new(methods.ctx)
# CommonCrypto has no APIs for copying hashes, so we have to copy the
# underlying struct.
new_ctx[0] = self._ctx[0]
return _HashContext(self._backend, self.algorithm, ctx=new_ctx)
def update(self, data):
methods = self._backend._hash_mapping[self.algorithm.name]
res = methods.hash_update(self._ctx, data, len(data))
assert res == 1
def finalize(self):
methods = self._backend._hash_mapping[self.algorithm.name]
buf = self._backend._ffi.new("unsigned char[]",
self.algorithm.digest_size)
res = methods.hash_final(buf, self._ctx)
assert res == 1
return self._backend._ffi.buffer(buf)[:]
| mit |
da1z/intellij-community | python/lib/Lib/site-packages/django/contrib/contenttypes/management.py | 315 | 2458 | from django.contrib.contenttypes.models import ContentType
from django.db.models import get_apps, get_models, signals
from django.utils.encoding import smart_unicode
def update_contenttypes(app, created_models, verbosity=2, **kwargs):
"""
Creates content types for models in the given app, removing any model
entries that no longer have a matching model class.
"""
ContentType.objects.clear_cache()
content_types = list(ContentType.objects.filter(app_label=app.__name__.split('.')[-2]))
app_models = get_models(app)
if not app_models:
return
for klass in app_models:
opts = klass._meta
try:
ct = ContentType.objects.get(app_label=opts.app_label,
model=opts.object_name.lower())
content_types.remove(ct)
except ContentType.DoesNotExist:
ct = ContentType(name=smart_unicode(opts.verbose_name_raw),
app_label=opts.app_label, model=opts.object_name.lower())
ct.save()
if verbosity >= 2:
print "Adding content type '%s | %s'" % (ct.app_label, ct.model)
# The presence of any remaining content types means the supplied app has an
# undefined model. Confirm that the content type is stale before deletion.
if content_types:
if kwargs.get('interactive', False):
content_type_display = '\n'.join([' %s | %s' % (ct.app_label, ct.model) for ct in content_types])
ok_to_delete = raw_input("""The following content types are stale and need to be deleted:
%s
Any objects related to these content types by a foreign key will also
be deleted. Are you sure you want to delete these content types?
If you're unsure, answer 'no'.
Type 'yes' to continue, or 'no' to cancel: """ % content_type_display)
else:
ok_to_delete = False
if ok_to_delete == 'yes':
for ct in content_types:
if verbosity >= 2:
print "Deleting stale content type '%s | %s'" % (ct.app_label, ct.model)
ct.delete()
else:
if verbosity >= 2:
print "Stale content types remain."
def update_all_contenttypes(verbosity=2, **kwargs):
for app in get_apps():
update_contenttypes(app, None, verbosity, **kwargs)
signals.post_syncdb.connect(update_contenttypes)
if __name__ == "__main__":
update_all_contenttypes()
| apache-2.0 |
koniiiik/django | tests/generic_views/views.py | 61 | 8443 | from __future__ import unicode_literals
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator
from django.urls import reverse, reverse_lazy
from django.utils.decorators import method_decorator
from django.views import generic
from .forms import AuthorForm, ContactForm
from .models import Artist, Author, Book, BookSigning, Page
class CustomTemplateView(generic.TemplateView):
template_name = 'generic_views/about.html'
def get_context_data(self, **kwargs):
context = super(CustomTemplateView, self).get_context_data(**kwargs)
context.update({'key': 'value'})
return context
class ObjectDetail(generic.DetailView):
template_name = 'generic_views/detail.html'
def get_object(self):
return {'foo': 'bar'}
class ArtistDetail(generic.DetailView):
queryset = Artist.objects.all()
class AuthorDetail(generic.DetailView):
queryset = Author.objects.all()
class AuthorCustomDetail(generic.DetailView):
template_name = 'generic_views/author_detail.html'
queryset = Author.objects.all()
def get(self, request, *args, **kwargs):
# Ensures get_context_object_name() doesn't reference self.object.
author = self.get_object()
context = {'custom_' + self.get_context_object_name(author): author}
return self.render_to_response(context)
class PageDetail(generic.DetailView):
queryset = Page.objects.all()
template_name_field = 'template'
class DictList(generic.ListView):
"""A ListView that doesn't use a model."""
queryset = [
{'first': 'John', 'last': 'Lennon'},
{'first': 'Yoko', 'last': 'Ono'}
]
template_name = 'generic_views/list.html'
class ArtistList(generic.ListView):
template_name = 'generic_views/list.html'
queryset = Artist.objects.all()
class AuthorList(generic.ListView):
queryset = Author.objects.all()
class BookList(generic.ListView):
model = Book
class CustomPaginator(Paginator):
def __init__(self, queryset, page_size, orphans=0, allow_empty_first_page=True):
super(CustomPaginator, self).__init__(
queryset,
page_size,
orphans=2,
allow_empty_first_page=allow_empty_first_page)
class AuthorListCustomPaginator(AuthorList):
paginate_by = 5
def get_paginator(self, queryset, page_size, orphans=0, allow_empty_first_page=True):
return super(AuthorListCustomPaginator, self).get_paginator(
queryset,
page_size,
orphans=2,
allow_empty_first_page=allow_empty_first_page)
class ContactView(generic.FormView):
form_class = ContactForm
success_url = reverse_lazy('authors_list')
template_name = 'generic_views/form.html'
class ArtistCreate(generic.CreateView):
model = Artist
fields = '__all__'
class NaiveAuthorCreate(generic.CreateView):
queryset = Author.objects.all()
fields = '__all__'
class TemplateResponseWithoutTemplate(generic.detail.SingleObjectTemplateResponseMixin, generic.View):
# we don't define the usual template_name here
def __init__(self):
# Dummy object, but attr is required by get_template_name()
self.object = None
class AuthorCreate(generic.CreateView):
model = Author
success_url = '/list/authors/'
fields = '__all__'
class SpecializedAuthorCreate(generic.CreateView):
model = Author
form_class = AuthorForm
template_name = 'generic_views/form.html'
context_object_name = 'thingy'
def get_success_url(self):
return reverse('author_detail', args=[self.object.id])
class AuthorCreateRestricted(AuthorCreate):
post = method_decorator(login_required)(AuthorCreate.post)
class ArtistUpdate(generic.UpdateView):
model = Artist
fields = '__all__'
class NaiveAuthorUpdate(generic.UpdateView):
queryset = Author.objects.all()
fields = '__all__'
class AuthorUpdate(generic.UpdateView):
get_form_called_count = 0 # Used to ensure get_form() is called once.
model = Author
success_url = '/list/authors/'
fields = '__all__'
def get_form(self, *args, **kwargs):
self.get_form_called_count += 1
return super(AuthorUpdate, self).get_form(*args, **kwargs)
class OneAuthorUpdate(generic.UpdateView):
success_url = '/list/authors/'
fields = '__all__'
def get_object(self):
return Author.objects.get(pk=1)
class SpecializedAuthorUpdate(generic.UpdateView):
model = Author
form_class = AuthorForm
template_name = 'generic_views/form.html'
context_object_name = 'thingy'
def get_success_url(self):
return reverse('author_detail', args=[self.object.id])
class NaiveAuthorDelete(generic.DeleteView):
queryset = Author.objects.all()
class AuthorDelete(generic.DeleteView):
model = Author
success_url = '/list/authors/'
class SpecializedAuthorDelete(generic.DeleteView):
queryset = Author.objects.all()
template_name = 'generic_views/confirm_delete.html'
context_object_name = 'thingy'
success_url = reverse_lazy('authors_list')
class BookConfig(object):
queryset = Book.objects.all()
date_field = 'pubdate'
class BookArchive(BookConfig, generic.ArchiveIndexView):
pass
class BookYearArchive(BookConfig, generic.YearArchiveView):
pass
class BookMonthArchive(BookConfig, generic.MonthArchiveView):
pass
class BookWeekArchive(BookConfig, generic.WeekArchiveView):
pass
class BookDayArchive(BookConfig, generic.DayArchiveView):
pass
class BookTodayArchive(BookConfig, generic.TodayArchiveView):
pass
class BookDetail(BookConfig, generic.DateDetailView):
pass
class AuthorGetQuerySetFormView(generic.edit.ModelFormMixin):
fields = '__all__'
def get_queryset(self):
return Author.objects.all()
class BookDetailGetObjectCustomQueryset(BookDetail):
def get_object(self, queryset=None):
return super(BookDetailGetObjectCustomQueryset, self).get_object(
queryset=Book.objects.filter(pk=self.kwargs['pk']))
class CustomMultipleObjectMixinView(generic.list.MultipleObjectMixin, generic.View):
queryset = [
{'name': 'John'},
{'name': 'Yoko'},
]
def get(self, request):
self.object_list = self.get_queryset()
class CustomContextView(generic.detail.SingleObjectMixin, generic.View):
model = Book
object = Book(name='dummy')
def get_object(self):
return Book(name="dummy")
def get_context_data(self, **kwargs):
context = {'custom_key': 'custom_value'}
context.update(kwargs)
return super(CustomContextView, self).get_context_data(**context)
def get_context_object_name(self, obj):
return "test_name"
class CustomSingleObjectView(generic.detail.SingleObjectMixin, generic.View):
model = Book
object = Book(name="dummy")
class BookSigningConfig(object):
model = BookSigning
date_field = 'event_date'
# use the same templates as for books
def get_template_names(self):
return ['generic_views/book%s.html' % self.template_name_suffix]
class BookSigningArchive(BookSigningConfig, generic.ArchiveIndexView):
pass
class BookSigningYearArchive(BookSigningConfig, generic.YearArchiveView):
pass
class BookSigningMonthArchive(BookSigningConfig, generic.MonthArchiveView):
pass
class BookSigningWeekArchive(BookSigningConfig, generic.WeekArchiveView):
pass
class BookSigningDayArchive(BookSigningConfig, generic.DayArchiveView):
pass
class BookSigningTodayArchive(BookSigningConfig, generic.TodayArchiveView):
pass
class BookSigningDetail(BookSigningConfig, generic.DateDetailView):
context_object_name = 'book'
class NonModel(object):
id = "non_model_1"
_meta = None
class NonModelDetail(generic.DetailView):
template_name = 'generic_views/detail.html'
model = NonModel
def get_object(self, queryset=None):
return NonModel()
class ObjectDoesNotExistDetail(generic.DetailView):
def get_queryset(self):
return Book.does_not_exist.all()
class LateValidationView(generic.FormView):
form_class = ContactForm
success_url = reverse_lazy('authors_list')
template_name = 'generic_views/form.html'
def form_valid(self, form):
form.add_error(None, 'There is an error')
return self.form_invalid(form)
| bsd-3-clause |
sudheesh001/oh-mainline | vendor/packages/mechanize/test/test_pickle.py | 22 | 1042 | import cPickle
import cStringIO as StringIO
import pickle
import mechanize
import mechanize._response
import mechanize._testcase
def pickle_and_unpickle(obj, implementation):
return implementation.loads(implementation.dumps(obj))
def test_pickling(obj, check=lambda unpickled: None):
check(pickle_and_unpickle(obj, cPickle))
check(pickle_and_unpickle(obj, pickle))
class PickleTest(mechanize._testcase.TestCase):
def test_pickle_cookie(self):
cookiejar = mechanize.CookieJar()
url = "http://example.com/"
request = mechanize.Request(url)
response = mechanize._response.test_response(
headers=[("Set-Cookie", "spam=eggs")],
url=url)
[cookie] = cookiejar.make_cookies(response, request)
check_equality = lambda unpickled: self.assertEqual(unpickled, cookie)
test_pickling(cookie, check_equality)
def test_pickle_cookiejar(self):
test_pickling(mechanize.CookieJar())
if __name__ == "__main__":
mechanize._testcase.main()
| agpl-3.0 |
Nicop06/ansible | contrib/inventory/abiquo.py | 117 | 8834 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
External inventory script for Abiquo
====================================
Shamelessly copied from an existing inventory script.
This script generates an inventory that Ansible can understand by making API requests to Abiquo API
Requires some python libraries, ensure to have them installed when using this script.
This script has been tested in Abiquo 3.0 but it may work also for Abiquo 2.6.
Before using this script you may want to modify abiquo.ini config file.
This script generates an Ansible hosts file with these host groups:
ABQ_xxx: Defines a hosts itself by Abiquo VM name label
all: Contains all hosts defined in Abiquo user's enterprise
virtualdatecenter: Creates a host group for each virtualdatacenter containing all hosts defined on it
virtualappliance: Creates a host group for each virtualappliance containing all hosts defined on it
imagetemplate: Creates a host group for each image template containing all hosts using it
'''
# (c) 2014, Daniel Beneyto <daniel.beneyto@abiquo.com>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import ConfigParser
try:
import json
except ImportError:
import simplejson as json
from ansible.module_utils.urls import open_url
def api_get(link, config):
try:
if link is None:
url = config.get('api', 'uri') + config.get('api', 'login_path')
headers = {"Accept": config.get('api', 'login_type')}
else:
url = link['href'] + '?limit=0'
headers = {"Accept": link['type']}
result = open_url(url, headers=headers, url_username=config.get('auth', 'apiuser').replace('\n', ''),
url_password=config.get('auth', 'apipass').replace('\n', ''))
return json.loads(result.read())
except:
return None
def save_cache(data, config):
''' saves item to cache '''
dpath = config.get('cache', 'cache_dir')
try:
cache = open('/'.join([dpath, 'inventory']), 'w')
cache.write(json.dumps(data))
cache.close()
except IOError as e:
pass # not really sure what to do here
def get_cache(cache_item, config):
''' returns cached item '''
dpath = config.get('cache', 'cache_dir')
inv = {}
try:
cache = open('/'.join([dpath, 'inventory']), 'r')
inv = cache.read()
cache.close()
except IOError as e:
pass # not really sure what to do here
return inv
def cache_available(config):
''' checks if we have a 'fresh' cache available for item requested '''
if config.has_option('cache', 'cache_dir'):
dpath = config.get('cache', 'cache_dir')
try:
existing = os.stat('/'.join([dpath, 'inventory']))
except:
# cache doesn't exist or isn't accessible
return False
if config.has_option('cache', 'cache_max_age'):
maxage = config.get('cache', 'cache_max_age')
if (int(time.time()) - int(existing.st_mtime)) <= int(maxage):
return True
return False
def generate_inv_from_api(enterprise_entity, config):
try:
inventory['all'] = {}
inventory['all']['children'] = []
inventory['all']['hosts'] = []
inventory['_meta'] = {}
inventory['_meta']['hostvars'] = {}
enterprise = api_get(enterprise_entity, config)
vms_entity = next(link for link in enterprise['links'] if link['rel'] == 'virtualmachines')
vms = api_get(vms_entity, config)
for vmcollection in vms['collection']:
for link in vmcollection['links']:
if link['rel'] == 'virtualappliance':
vm_vapp = link['title'].replace('[', '').replace(']', '').replace(' ', '_')
elif link['rel'] == 'virtualdatacenter':
vm_vdc = link['title'].replace('[', '').replace(']', '').replace(' ', '_')
elif link['rel'] == 'virtualmachinetemplate':
vm_template = link['title'].replace('[', '').replace(']', '').replace(' ', '_')
# From abiquo.ini: Only adding to inventory VMs with public IP
if config.getboolean('defaults', 'public_ip_only') is True:
for link in vmcollection['links']:
if link['type'] == 'application/vnd.abiquo.publicip+json' and link['rel'] == 'ip':
vm_nic = link['title']
break
else:
vm_nic = None
# Otherwise, assigning defined network interface IP address
else:
for link in vmcollection['links']:
if link['rel'] == config.get('defaults', 'default_net_interface'):
vm_nic = link['title']
break
else:
vm_nic = None
vm_state = True
# From abiquo.ini: Only adding to inventory VMs deployed
if config.getboolean('defaults', 'deployed_only') is True and vmcollection['state'] == 'NOT_ALLOCATED':
vm_state = False
if vm_nic is not None and vm_state:
if vm_vapp not in inventory:
inventory[vm_vapp] = {}
inventory[vm_vapp]['children'] = []
inventory[vm_vapp]['hosts'] = []
if vm_vdc not in inventory:
inventory[vm_vdc] = {}
inventory[vm_vdc]['hosts'] = []
inventory[vm_vdc]['children'] = []
if vm_template not in inventory:
inventory[vm_template] = {}
inventory[vm_template]['children'] = []
inventory[vm_template]['hosts'] = []
if config.getboolean('defaults', 'get_metadata') is True:
meta_entity = next(link for link in vmcollection['links'] if link['rel'] == 'metadata')
try:
metadata = api_get(meta_entity, config)
if (config.getfloat("api", "version") >= 3.0):
vm_metadata = metadata['metadata']
else:
vm_metadata = metadata['metadata']['metadata']
inventory['_meta']['hostvars'][vm_nic] = vm_metadata
except Exception as e:
pass
inventory[vm_vapp]['children'].append(vmcollection['name'])
inventory[vm_vdc]['children'].append(vmcollection['name'])
inventory[vm_template]['children'].append(vmcollection['name'])
inventory['all']['children'].append(vmcollection['name'])
inventory[vmcollection['name']] = []
inventory[vmcollection['name']].append(vm_nic)
return inventory
except Exception as e:
# Return empty hosts output
return {'all': {'hosts': []}, '_meta': {'hostvars': {}}}
def get_inventory(enterprise, config):
''' Reads the inventory from cache or Abiquo api '''
if cache_available(config):
inv = get_cache('inventory', config)
else:
default_group = os.path.basename(sys.argv[0]).rstrip('.py')
# MAKE ABIQUO API CALLS #
inv = generate_inv_from_api(enterprise, config)
save_cache(inv, config)
return json.dumps(inv)
if __name__ == '__main__':
inventory = {}
enterprise = {}
# Read config
config = ConfigParser.SafeConfigParser()
for configfilename in [os.path.abspath(sys.argv[0]).rstrip('.py') + '.ini', 'abiquo.ini']:
if os.path.exists(configfilename):
config.read(configfilename)
break
try:
login = api_get(None, config)
enterprise = next(link for link in login['links'] if link['rel'] == 'enterprise')
except Exception as e:
enterprise = None
if cache_available(config):
inventory = get_cache('inventory', config)
else:
inventory = get_inventory(enterprise, config)
# return to ansible
sys.stdout.write(str(inventory))
sys.stdout.flush()
| gpl-3.0 |
evansd/django | tests/httpwrappers/tests.py | 29 | 30359 | import copy
import json
import os
import pickle
import unittest
import uuid
from django.core.exceptions import DisallowedRedirect
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import request_finished
from django.db import close_old_connections
from django.http import (
BadHeaderError, HttpResponse, HttpResponseNotAllowed,
HttpResponseNotModified, HttpResponsePermanentRedirect,
HttpResponseRedirect, JsonResponse, QueryDict, SimpleCookie,
StreamingHttpResponse, parse_cookie,
)
from django.test import SimpleTestCase
from django.utils.functional import lazystr
class QueryDictTests(SimpleTestCase):
def test_create_with_no_args(self):
self.assertEqual(QueryDict(), QueryDict(''))
def test_missing_key(self):
q = QueryDict()
with self.assertRaises(KeyError):
q.__getitem__('foo')
def test_immutability(self):
q = QueryDict()
with self.assertRaises(AttributeError):
q.__setitem__('something', 'bar')
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar'])
with self.assertRaises(AttributeError):
q.appendlist('foo', ['bar'])
with self.assertRaises(AttributeError):
q.update({'foo': 'bar'})
with self.assertRaises(AttributeError):
q.pop('foo')
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
def test_immutable_get_with_default(self):
q = QueryDict()
self.assertEqual(q.get('foo', 'default'), 'default')
def test_immutable_basic_operations(self):
q = QueryDict()
self.assertEqual(q.getlist('foo'), [])
self.assertNotIn('foo', q)
self.assertEqual(list(q), [])
self.assertEqual(list(q.items()), [])
self.assertEqual(list(q.lists()), [])
self.assertEqual(list(q.keys()), [])
self.assertEqual(list(q.values()), [])
self.assertEqual(len(q), 0)
self.assertEqual(q.urlencode(), '')
def test_single_key_value(self):
"""Test QueryDict with one key/value pair"""
q = QueryDict('foo=bar')
self.assertEqual(q['foo'], 'bar')
with self.assertRaises(KeyError):
q.__getitem__('bar')
with self.assertRaises(AttributeError):
q.__setitem__('something', 'bar')
self.assertEqual(q.get('foo', 'default'), 'bar')
self.assertEqual(q.get('bar', 'default'), 'default')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertEqual(q.getlist('bar'), [])
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar'])
with self.assertRaises(AttributeError):
q.appendlist('foo', ['bar'])
self.assertIn('foo', q)
self.assertNotIn('bar', q)
self.assertEqual(list(q), ['foo'])
self.assertEqual(list(q.items()), [('foo', 'bar')])
self.assertEqual(list(q.lists()), [('foo', ['bar'])])
self.assertEqual(list(q.keys()), ['foo'])
self.assertEqual(list(q.values()), ['bar'])
self.assertEqual(len(q), 1)
with self.assertRaises(AttributeError):
q.update({'foo': 'bar'})
with self.assertRaises(AttributeError):
q.pop('foo')
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
with self.assertRaises(AttributeError):
q.setdefault('foo', 'bar')
self.assertEqual(q.urlencode(), 'foo=bar')
def test_urlencode(self):
q = QueryDict(mutable=True)
q['next'] = '/a&b/'
self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/')
q = QueryDict(mutable=True)
q['next'] = '/t\xebst&key/'
self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/')
def test_mutable_copy(self):
"""A copy of a QueryDict is mutable."""
q = QueryDict().copy()
with self.assertRaises(KeyError):
q.__getitem__("foo")
q['name'] = 'john'
self.assertEqual(q['name'], 'john')
def test_mutable_delete(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
del q['name']
self.assertNotIn('name', q)
def test_basic_mutable_operations(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.get('name', 'default'), 'john')
self.assertEqual(q.getlist('name'), ['john'])
self.assertEqual(q.getlist('foo'), [])
q.setlist('foo', ['bar', 'baz'])
self.assertEqual(q.get('foo', 'default'), 'baz')
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
q.appendlist('foo', 'another')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
self.assertEqual(q['foo'], 'another')
self.assertIn('foo', q)
self.assertCountEqual(q, ['foo', 'name'])
self.assertCountEqual(q.items(), [('foo', 'another'), ('name', 'john')])
self.assertCountEqual(q.lists(), [('foo', ['bar', 'baz', 'another']), ('name', ['john'])])
self.assertCountEqual(q.keys(), ['foo', 'name'])
self.assertCountEqual(q.values(), ['another', 'john'])
q.update({'foo': 'hello'})
self.assertEqual(q['foo'], 'hello')
self.assertEqual(q.get('foo', 'not available'), 'hello')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo', 'not there'), 'not there')
self.assertEqual(q.get('foo', 'not there'), 'not there')
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
self.assertEqual(q['foo'], 'bar')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertIn(q.urlencode(), ['foo=bar&name=john', 'name=john&foo=bar'])
q.clear()
self.assertEqual(len(q), 0)
def test_multiple_keys(self):
"""Test QueryDict with two key/value pairs with same keys."""
q = QueryDict('vote=yes&vote=no')
self.assertEqual(q['vote'], 'no')
with self.assertRaises(AttributeError):
q.__setitem__('something', 'bar')
self.assertEqual(q.get('vote', 'default'), 'no')
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.getlist('vote'), ['yes', 'no'])
self.assertEqual(q.getlist('foo'), [])
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar', 'baz'])
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar', 'baz'])
with self.assertRaises(AttributeError):
q.appendlist('foo', ['bar'])
self.assertIn('vote', q)
self.assertNotIn('foo', q)
self.assertEqual(list(q), ['vote'])
self.assertEqual(list(q.items()), [('vote', 'no')])
self.assertEqual(list(q.lists()), [('vote', ['yes', 'no'])])
self.assertEqual(list(q.keys()), ['vote'])
self.assertEqual(list(q.values()), ['no'])
self.assertEqual(len(q), 1)
with self.assertRaises(AttributeError):
q.update({'foo': 'bar'})
with self.assertRaises(AttributeError):
q.pop('foo')
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
with self.assertRaises(AttributeError):
q.setdefault('foo', 'bar')
with self.assertRaises(AttributeError):
q.__delitem__('vote')
def test_pickle(self):
q = QueryDict()
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
q = QueryDict('a=b&c=d')
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
q = QueryDict('a=b&c=d&a=1')
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
def test_update_from_querydict(self):
"""Regression test for #8278: QueryDict.update(QueryDict)"""
x = QueryDict("a=1&a=2", mutable=True)
y = QueryDict("a=3&a=4")
x.update(y)
self.assertEqual(x.getlist('a'), ['1', '2', '3', '4'])
def test_non_default_encoding(self):
"""#13572 - QueryDict with a non-default encoding"""
q = QueryDict('cur=%A4', encoding='iso-8859-15')
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(q.items()), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
q = q.copy()
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(q.items()), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
self.assertEqual(copy.copy(q).encoding, 'iso-8859-15')
self.assertEqual(copy.deepcopy(q).encoding, 'iso-8859-15')
def test_querydict_fromkeys(self):
self.assertEqual(QueryDict.fromkeys(['key1', 'key2', 'key3']), QueryDict('key1&key2&key3'))
def test_fromkeys_with_nonempty_value(self):
self.assertEqual(
QueryDict.fromkeys(['key1', 'key2', 'key3'], value='val'),
QueryDict('key1=val&key2=val&key3=val')
)
def test_fromkeys_is_immutable_by_default(self):
# Match behavior of __init__() which is also immutable by default.
q = QueryDict.fromkeys(['key1', 'key2', 'key3'])
with self.assertRaisesMessage(AttributeError, 'This QueryDict instance is immutable'):
q['key4'] = 'nope'
def test_fromkeys_mutable_override(self):
q = QueryDict.fromkeys(['key1', 'key2', 'key3'], mutable=True)
q['key4'] = 'yep'
self.assertEqual(q, QueryDict('key1&key2&key3&key4=yep'))
def test_duplicates_in_fromkeys_iterable(self):
self.assertEqual(QueryDict.fromkeys('xyzzy'), QueryDict('x&y&z&z&y'))
def test_fromkeys_with_nondefault_encoding(self):
key_utf16 = b'\xff\xfe\x8e\x02\xdd\x01\x9e\x02'
value_utf16 = b'\xff\xfe\xdd\x01n\x00l\x00P\x02\x8c\x02'
q = QueryDict.fromkeys([key_utf16], value=value_utf16, encoding='utf-16')
expected = QueryDict('', mutable=True)
expected['ʎǝʞ'] = 'ǝnlɐʌ'
self.assertEqual(q, expected)
def test_fromkeys_empty_iterable(self):
self.assertEqual(QueryDict.fromkeys([]), QueryDict(''))
def test_fromkeys_noniterable(self):
with self.assertRaises(TypeError):
QueryDict.fromkeys(0)
class HttpResponseTests(unittest.TestCase):
def test_headers_type(self):
r = HttpResponse()
# ASCII strings or bytes values are converted to strings.
r['key'] = 'test'
self.assertEqual(r['key'], 'test')
r['key'] = 'test'.encode('ascii')
self.assertEqual(r['key'], 'test')
self.assertIn(b'test', r.serialize_headers())
# Non-ASCII values are serialized to Latin-1.
r['key'] = 'café'
self.assertIn('café'.encode('latin-1'), r.serialize_headers())
# Other unicode values are MIME-encoded (there's no way to pass them as bytes).
r['key'] = '†'
self.assertEqual(r['key'], '=?utf-8?b?4oCg?=')
self.assertIn(b'=?utf-8?b?4oCg?=', r.serialize_headers())
# The response also converts string or bytes keys to strings, but requires
# them to contain ASCII
r = HttpResponse()
del r['Content-Type']
r['foo'] = 'bar'
headers = list(r.items())
self.assertEqual(len(headers), 1)
self.assertEqual(headers[0], ('foo', 'bar'))
r = HttpResponse()
del r['Content-Type']
r[b'foo'] = 'bar'
headers = list(r.items())
self.assertEqual(len(headers), 1)
self.assertEqual(headers[0], ('foo', 'bar'))
self.assertIsInstance(headers[0][0], str)
r = HttpResponse()
with self.assertRaises(UnicodeError):
r.__setitem__('føø', 'bar')
with self.assertRaises(UnicodeError):
r.__setitem__('føø'.encode(), 'bar')
def test_long_line(self):
# Bug #20889: long lines trigger newlines to be added to headers
# (which is not allowed due to bug #10188)
h = HttpResponse()
f = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88'.encode('latin-1')
f = f.decode('utf-8')
h['Content-Disposition'] = 'attachment; filename="%s"' % f
# This one is triggering http://bugs.python.org/issue20747, that is Python
# will itself insert a newline in the header
h['Content-Disposition'] = 'attachment; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
def test_newlines_in_headers(self):
# Bug #10188: Do not allow newlines in headers (CR or LF)
r = HttpResponse()
with self.assertRaises(BadHeaderError):
r.__setitem__('test\rstr', 'test')
with self.assertRaises(BadHeaderError):
r.__setitem__('test\nstr', 'test')
def test_dict_behavior(self):
"""
Test for bug #14020: Make HttpResponse.get work like dict.get
"""
r = HttpResponse()
self.assertIsNone(r.get('test'))
def test_non_string_content(self):
# Bug 16494: HttpResponse should behave consistently with non-strings
r = HttpResponse(12345)
self.assertEqual(r.content, b'12345')
# test content via property
r = HttpResponse()
r.content = 12345
self.assertEqual(r.content, b'12345')
def test_iter_content(self):
r = HttpResponse(['abc', 'def', 'ghi'])
self.assertEqual(r.content, b'abcdefghi')
# test iter content via property
r = HttpResponse()
r.content = ['idan', 'alex', 'jacob']
self.assertEqual(r.content, b'idanalexjacob')
r = HttpResponse()
r.content = [1, 2, 3]
self.assertEqual(r.content, b'123')
# test odd inputs
r = HttpResponse()
r.content = ['1', '2', 3, '\u079e']
# '\xde\x9e' == unichr(1950).encode()
self.assertEqual(r.content, b'123\xde\x9e')
# .content can safely be accessed multiple times.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, r.content)
self.assertEqual(r.content, b'helloworld')
# __iter__ can safely be called multiple times (#20187).
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(b''.join(r), b'helloworld')
# Accessing .content still works.
self.assertEqual(r.content, b'helloworld')
# Accessing .content also works if the response was iterated first.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(r.content, b'helloworld')
# Additional content can be written to the response.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, b'helloworld')
r.write('!')
self.assertEqual(r.content, b'helloworld!')
def test_iterator_isnt_rewound(self):
# Regression test for #13222
r = HttpResponse('abc')
i = iter(r)
self.assertEqual(list(i), [b'abc'])
self.assertEqual(list(i), [])
def test_lazy_content(self):
r = HttpResponse(lazystr('helloworld'))
self.assertEqual(r.content, b'helloworld')
def test_file_interface(self):
r = HttpResponse()
r.write(b"hello")
self.assertEqual(r.tell(), 5)
r.write("привет")
self.assertEqual(r.tell(), 17)
r = HttpResponse(['abc'])
r.write('def')
self.assertEqual(r.tell(), 6)
self.assertEqual(r.content, b'abcdef')
# with Content-Encoding header
r = HttpResponse()
r['Content-Encoding'] = 'winning'
r.write(b'abc')
r.write(b'def')
self.assertEqual(r.content, b'abcdef')
def test_stream_interface(self):
r = HttpResponse('asdf')
self.assertEqual(r.getvalue(), b'asdf')
r = HttpResponse()
self.assertIs(r.writable(), True)
r.writelines(['foo\n', 'bar\n', 'baz\n'])
self.assertEqual(r.content, b'foo\nbar\nbaz\n')
def test_unsafe_redirect(self):
bad_urls = [
'data:text/html,<script>window.alert("xss")</script>',
'mailto:test@example.com',
'file:///etc/passwd',
]
for url in bad_urls:
with self.assertRaises(DisallowedRedirect):
HttpResponseRedirect(url)
with self.assertRaises(DisallowedRedirect):
HttpResponsePermanentRedirect(url)
class HttpResponseSubclassesTests(SimpleTestCase):
def test_redirect(self):
response = HttpResponseRedirect('/redirected/')
self.assertEqual(response.status_code, 302)
# Standard HttpResponse init args can be used
response = HttpResponseRedirect(
'/redirected/',
content='The resource has temporarily moved',
content_type='text/html',
)
self.assertContains(response, 'The resource has temporarily moved', status_code=302)
self.assertEqual(response.url, response['Location'])
def test_redirect_lazy(self):
"""Make sure HttpResponseRedirect works with lazy strings."""
r = HttpResponseRedirect(lazystr('/redirected/'))
self.assertEqual(r.url, '/redirected/')
def test_redirect_repr(self):
response = HttpResponseRedirect('/redirected/')
expected = '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", url="/redirected/">'
self.assertEqual(repr(response), expected)
def test_invalid_redirect_repr(self):
"""
If HttpResponseRedirect raises DisallowedRedirect, its __repr__()
should work (in the debug view, for example).
"""
response = HttpResponseRedirect.__new__(HttpResponseRedirect)
with self.assertRaisesMessage(DisallowedRedirect, "Unsafe redirect to URL with protocol 'ssh'"):
HttpResponseRedirect.__init__(response, 'ssh://foo')
expected = '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", url="ssh://foo">'
self.assertEqual(repr(response), expected)
def test_not_modified(self):
response = HttpResponseNotModified()
self.assertEqual(response.status_code, 304)
# 304 responses should not have content/content-type
with self.assertRaises(AttributeError):
response.content = "Hello dear"
self.assertNotIn('content-type', response)
def test_not_modified_repr(self):
response = HttpResponseNotModified()
self.assertEqual(repr(response), '<HttpResponseNotModified status_code=304>')
def test_not_allowed(self):
response = HttpResponseNotAllowed(['GET'])
self.assertEqual(response.status_code, 405)
# Standard HttpResponse init args can be used
response = HttpResponseNotAllowed(['GET'], content='Only the GET method is allowed', content_type='text/html')
self.assertContains(response, 'Only the GET method is allowed', status_code=405)
def test_not_allowed_repr(self):
response = HttpResponseNotAllowed(['GET', 'OPTIONS'], content_type='text/plain')
expected = '<HttpResponseNotAllowed [GET, OPTIONS] status_code=405, "text/plain">'
self.assertEqual(repr(response), expected)
def test_not_allowed_repr_no_content_type(self):
response = HttpResponseNotAllowed(('GET', 'POST'))
del response['Content-Type']
self.assertEqual(repr(response), '<HttpResponseNotAllowed [GET, POST] status_code=405>')
class JsonResponseTests(SimpleTestCase):
def test_json_response_non_ascii(self):
data = {'key': 'łóżko'}
response = JsonResponse(data)
self.assertEqual(json.loads(response.content.decode()), data)
def test_json_response_raises_type_error_with_default_setting(self):
with self.assertRaisesMessage(
TypeError,
'In order to allow non-dict objects to be serialized set the '
'safe parameter to False'
):
JsonResponse([1, 2, 3])
def test_json_response_text(self):
response = JsonResponse('foobar', safe=False)
self.assertEqual(json.loads(response.content.decode()), 'foobar')
def test_json_response_list(self):
response = JsonResponse(['foo', 'bar'], safe=False)
self.assertEqual(json.loads(response.content.decode()), ['foo', 'bar'])
def test_json_response_uuid(self):
u = uuid.uuid4()
response = JsonResponse(u, safe=False)
self.assertEqual(json.loads(response.content.decode()), str(u))
def test_json_response_custom_encoder(self):
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
def encode(self, o):
return json.dumps({'foo': 'bar'})
response = JsonResponse({}, encoder=CustomDjangoJSONEncoder)
self.assertEqual(json.loads(response.content.decode()), {'foo': 'bar'})
def test_json_response_passing_arguments_to_json_dumps(self):
response = JsonResponse({'foo': 'bar'}, json_dumps_params={'indent': 2})
self.assertEqual(response.content.decode(), '{\n "foo": "bar"\n}')
class StreamingHttpResponseTests(SimpleTestCase):
def test_streaming_response(self):
r = StreamingHttpResponse(iter(['hello', 'world']))
# iterating over the response itself yields bytestring chunks.
chunks = list(r)
self.assertEqual(chunks, [b'hello', b'world'])
for chunk in chunks:
self.assertIsInstance(chunk, bytes)
# and the response can only be iterated once.
self.assertEqual(list(r), [])
# even when a sequence that can be iterated many times, like a list,
# is given as content.
r = StreamingHttpResponse(['abc', 'def'])
self.assertEqual(list(r), [b'abc', b'def'])
self.assertEqual(list(r), [])
# iterating over strings still yields bytestring chunks.
r.streaming_content = iter(['hello', 'café'])
chunks = list(r)
# '\xc3\xa9' == unichr(233).encode()
self.assertEqual(chunks, [b'hello', b'caf\xc3\xa9'])
for chunk in chunks:
self.assertIsInstance(chunk, bytes)
# streaming responses don't have a `content` attribute.
self.assertFalse(hasattr(r, 'content'))
# and you can't accidentally assign to a `content` attribute.
with self.assertRaises(AttributeError):
r.content = 'xyz'
# but they do have a `streaming_content` attribute.
self.assertTrue(hasattr(r, 'streaming_content'))
# that exists so we can check if a response is streaming, and wrap or
# replace the content iterator.
r.streaming_content = iter(['abc', 'def'])
r.streaming_content = (chunk.upper() for chunk in r.streaming_content)
self.assertEqual(list(r), [b'ABC', b'DEF'])
# coercing a streaming response to bytes doesn't return a complete HTTP
# message like a regular response does. it only gives us the headers.
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(bytes(r), b'Content-Type: text/html; charset=utf-8')
# and this won't consume its content.
self.assertEqual(list(r), [b'hello', b'world'])
# additional content cannot be written to the response.
r = StreamingHttpResponse(iter(['hello', 'world']))
with self.assertRaises(Exception):
r.write('!')
# and we can't tell the current position.
with self.assertRaises(Exception):
r.tell()
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(r.getvalue(), b'helloworld')
class FileCloseTests(SimpleTestCase):
def setUp(self):
# Disable the request_finished signal during this test
# to avoid interfering with the database connection.
request_finished.disconnect(close_old_connections)
def tearDown(self):
request_finished.connect(close_old_connections)
def test_response(self):
filename = os.path.join(os.path.dirname(__file__), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertTrue(file1.closed)
r.close()
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = HttpResponse(file1)
r.content = file2
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
def test_streaming_response(self):
filename = os.path.join(os.path.dirname(__file__), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = StreamingHttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = StreamingHttpResponse(file1)
r.streaming_content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
class CookieTests(unittest.TestCase):
def test_encode(self):
"""Semicolons and commas are encoded."""
c = SimpleCookie()
c['test'] = "An,awkward;value"
self.assertNotIn(";", c.output().rstrip(';')) # IE compat
self.assertNotIn(",", c.output().rstrip(';')) # Safari compat
def test_decode(self):
"""Semicolons and commas are decoded."""
c = SimpleCookie()
c['test'] = "An,awkward;value"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
c3 = parse_cookie(c.output()[12:])
self.assertEqual(c['test'].value, c3['test'])
def test_nonstandard_keys(self):
"""
A single non-standard cookie name doesn't affect all cookies (#13007).
"""
self.assertIn('good_cookie', parse_cookie('good_cookie=yes;bad:cookie=yes'))
def test_repeated_nonstandard_keys(self):
"""
A repeated non-standard name doesn't affect all cookies (#15852).
"""
self.assertIn('good_cookie', parse_cookie('a:=b; a:=c; good_cookie=yes'))
def test_python_cookies(self):
"""
Test cases copied from Python's Lib/test/test_http_cookies.py
"""
self.assertEqual(parse_cookie('chips=ahoy; vienna=finger'), {'chips': 'ahoy', 'vienna': 'finger'})
# Here parse_cookie() differs from Python's cookie parsing in that it
# treats all semicolons as delimiters, even within quotes.
self.assertEqual(
parse_cookie('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'),
{'keebler': '"E=mc2', 'L': '\\"Loves\\"', 'fudge': '\\012', '': '"'}
)
# Illegal cookies that have an '=' char in an unquoted value.
self.assertEqual(parse_cookie('keebler=E=mc2'), {'keebler': 'E=mc2'})
# Cookies with ':' character in their name.
self.assertEqual(parse_cookie('key:term=value:term'), {'key:term': 'value:term'})
# Cookies with '[' and ']'.
self.assertEqual(parse_cookie('a=b; c=[; d=r; f=h'), {'a': 'b', 'c': '[', 'd': 'r', 'f': 'h'})
def test_cookie_edgecases(self):
# Cookies that RFC6265 allows.
self.assertEqual(parse_cookie('a=b; Domain=example.com'), {'a': 'b', 'Domain': 'example.com'})
# parse_cookie() has historically kept only the last cookie with the
# same name.
self.assertEqual(parse_cookie('a=b; h=i; a=c'), {'a': 'c', 'h': 'i'})
def test_invalid_cookies(self):
"""
Cookie strings that go against RFC6265 but browsers will send if set
via document.cookie.
"""
# Chunks without an equals sign appear as unnamed values per
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
self.assertIn('django_language', parse_cookie('abc=def; unnamed; django_language=en'))
# Even a double quote may be an unamed value.
self.assertEqual(parse_cookie('a=b; "; c=d'), {'a': 'b', '': '"', 'c': 'd'})
# Spaces in names and values, and an equals sign in values.
self.assertEqual(parse_cookie('a b c=d e = f; gh=i'), {'a b c': 'd e = f', 'gh': 'i'})
# More characters the spec forbids.
self.assertEqual(parse_cookie('a b,c<>@:/[]?{}=d " =e,f g'), {'a b,c<>@:/[]?{}': 'd " =e,f g'})
# Unicode characters. The spec only allows ASCII.
self.assertEqual(parse_cookie('saint=André Bessette'), {'saint': 'André Bessette'})
# Browsers don't send extra whitespace or semicolons in Cookie headers,
# but parse_cookie() should parse whitespace the same way
# document.cookie parses whitespace.
self.assertEqual(parse_cookie(' = b ; ; = ; c = ; '), {'': 'b', 'c': ''})
def test_httponly_after_load(self):
c = SimpleCookie()
c.load("name=val")
c['name']['httponly'] = True
self.assertTrue(c['name']['httponly'])
def test_load_dict(self):
c = SimpleCookie()
c.load({'name': 'val'})
self.assertEqual(c['name'].value, 'val')
def test_pickle(self):
rawdata = 'Customer="WILE_E_COYOTE"; Path=/acme; Version=1'
expected_output = 'Set-Cookie: %s' % rawdata
C = SimpleCookie()
C.load(rawdata)
self.assertEqual(C.output(), expected_output)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
C1 = pickle.loads(pickle.dumps(C, protocol=proto))
self.assertEqual(C1.output(), expected_output)
| bsd-3-clause |
Lujeni/ansible | lib/ansible/modules/storage/netapp/na_elementsw_snapshot.py | 52 | 13120 | #!/usr/bin/python
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or
# https://www.gnu.org/licenses/gpl-3.0.txt)
'''
Element OS Software Snapshot Manager
'''
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_elementsw_snapshot
short_description: NetApp Element Software Manage Snapshots
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.7'
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Create, Modify or Delete Snapshot on Element OS Cluster.
options:
name:
description:
- Name of new snapshot create.
- If unspecified, date and time when the snapshot was taken is used.
state:
description:
- Whether the specified snapshot should exist or not.
choices: ['present', 'absent']
default: 'present'
src_volume_id:
description:
- ID or Name of active volume.
required: true
account_id:
description:
- Account ID or Name of Parent/Source Volume.
required: true
retention:
description:
- Retention period for the snapshot.
- Format is 'HH:mm:ss'.
src_snapshot_id:
description:
- ID or Name of an existing snapshot.
- Required when C(state=present), to modify snapshot properties.
- Required when C(state=present), to create snapshot from another snapshot in the volume.
- Required when C(state=absent), to delete snapshot.
enable_remote_replication:
description:
- Flag, whether to replicate the snapshot created to a remote replication cluster.
- To enable specify 'true' value.
type: bool
snap_mirror_label:
description:
- Label used by SnapMirror software to specify snapshot retention policy on SnapMirror endpoint.
expiration_time:
description:
- The date and time (format ISO 8601 date string) at which this snapshot will expire.
password:
description:
- Element OS access account password
aliases:
- pass
username:
description:
- Element OS access account user-name
aliases:
- user
'''
EXAMPLES = """
- name: Create snapshot
tags:
- elementsw_create_snapshot
na_elementsw_snapshot:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: present
src_volume_id: 118
account_id: sagarsh
name: newsnapshot-1
- name: Modify Snapshot
tags:
- elementsw_modify_snapshot
na_elementsw_snapshot:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: present
src_volume_id: sagarshansivolume
src_snapshot_id: test1
account_id: sagarsh
expiration_time: '2018-06-16T12:24:56Z'
enable_remote_replication: false
- name: Delete Snapshot
tags:
- elementsw_delete_snapshot
na_elementsw_snapshot:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: absent
src_snapshot_id: deltest1
account_id: sagarsh
src_volume_id: sagarshansivolume
"""
RETURN = """
msg:
description: Success message
returned: success
type: str
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.netapp_elementsw_module import NaElementSWModule
HAS_SF_SDK = netapp_utils.has_sf_sdk()
class ElementOSSnapshot(object):
"""
Element OS Snapshot Manager
"""
def __init__(self):
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=False, choices=['present', 'absent'], default='present'),
account_id=dict(required=True, type='str'),
name=dict(required=False, type='str'),
src_volume_id=dict(required=True, type='str'),
retention=dict(required=False, type='str'),
src_snapshot_id=dict(required=False, type='str'),
enable_remote_replication=dict(required=False, type='bool'),
expiration_time=dict(required=False, type='str'),
snap_mirror_label=dict(required=False, type='str')
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
input_params = self.module.params
self.state = input_params['state']
self.name = input_params['name']
self.account_id = input_params['account_id']
self.src_volume_id = input_params['src_volume_id']
self.src_snapshot_id = input_params['src_snapshot_id']
self.retention = input_params['retention']
self.properties_provided = False
self.expiration_time = input_params['expiration_time']
if input_params['expiration_time'] is not None:
self.properties_provided = True
self.enable_remote_replication = input_params['enable_remote_replication']
if input_params['enable_remote_replication'] is not None:
self.properties_provided = True
self.snap_mirror_label = input_params['snap_mirror_label']
if input_params['snap_mirror_label'] is not None:
self.properties_provided = True
if self.state == 'absent' and self.src_snapshot_id is None:
self.module.fail_json(
msg="Please provide required parameter : snapshot_id")
if HAS_SF_SDK is False:
self.module.fail_json(
msg="Unable to import the SolidFire Python SDK")
else:
self.sfe = netapp_utils.create_sf_connection(module=self.module)
self.elementsw_helper = NaElementSWModule(self.sfe)
# add telemetry attributes
self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_snapshot')
def get_account_id(self):
"""
Return account id if found
"""
try:
# Update and return self.account_id
self.account_id = self.elementsw_helper.account_exists(self.account_id)
return self.account_id
except Exception as err:
self.module.fail_json(msg="Error: account_id %s does not exist" % self.account_id, exception=to_native(err))
def get_src_volume_id(self):
"""
Return volume id if found
"""
src_vol_id = self.elementsw_helper.volume_exists(self.src_volume_id, self.account_id)
if src_vol_id is not None:
# Update and return self.volume_id
self.src_volume_id = src_vol_id
# Return src_volume_id
return self.src_volume_id
return None
def get_snapshot(self, name=None):
"""
Return snapshot details if found
"""
src_snapshot = None
if name is not None:
src_snapshot = self.elementsw_helper.get_snapshot(name, self.src_volume_id)
elif self.src_snapshot_id is not None:
src_snapshot = self.elementsw_helper.get_snapshot(self.src_snapshot_id, self.src_volume_id)
if src_snapshot is not None:
# Update self.src_snapshot_id
self.src_snapshot_id = src_snapshot.snapshot_id
# Return src_snapshot
return src_snapshot
def create_snapshot(self):
"""
Create Snapshot
"""
try:
self.sfe.create_snapshot(volume_id=self.src_volume_id,
snapshot_id=self.src_snapshot_id,
name=self.name,
enable_remote_replication=self.enable_remote_replication,
retention=self.retention,
snap_mirror_label=self.snap_mirror_label,
attributes=self.attributes)
except Exception as exception_object:
self.module.fail_json(
msg='Error creating snapshot %s' % (
to_native(exception_object)),
exception=traceback.format_exc())
def modify_snapshot(self):
"""
Modify Snapshot Properties
"""
try:
self.sfe.modify_snapshot(snapshot_id=self.src_snapshot_id,
expiration_time=self.expiration_time,
enable_remote_replication=self.enable_remote_replication,
snap_mirror_label=self.snap_mirror_label)
except Exception as exception_object:
self.module.fail_json(
msg='Error modify snapshot %s' % (
to_native(exception_object)),
exception=traceback.format_exc())
def delete_snapshot(self):
"""
Delete Snapshot
"""
try:
self.sfe.delete_snapshot(snapshot_id=self.src_snapshot_id)
except Exception as exception_object:
self.module.fail_json(
msg='Error delete snapshot %s' % (
to_native(exception_object)),
exception=traceback.format_exc())
def apply(self):
"""
Check, process and initiate snapshot operation
"""
changed = False
snapshot_delete = False
snapshot_create = False
snapshot_modify = False
result_message = None
self.get_account_id()
# Dont proceed if source volume is not found
if self.get_src_volume_id() is None:
self.module.fail_json(msg="Volume id not found %s" % self.src_volume_id)
# Get snapshot details using source volume
snapshot_detail = self.get_snapshot()
if snapshot_detail:
if self.properties_provided:
if self.expiration_time != snapshot_detail.expiration_time:
changed = True
else: # To preserve value in case parameter expiration_time is not defined/provided.
self.expiration_time = snapshot_detail.expiration_time
if self.enable_remote_replication != snapshot_detail.enable_remote_replication:
changed = True
else: # To preserve value in case parameter enable_remote_Replication is not defined/provided.
self.enable_remote_replication = snapshot_detail.enable_remote_replication
if self.snap_mirror_label != snapshot_detail.snap_mirror_label:
changed = True
else: # To preserve value in case parameter snap_mirror_label is not defined/provided.
self.snap_mirror_label = snapshot_detail.snap_mirror_label
if self.account_id is None or self.src_volume_id is None or self.module.check_mode:
changed = False
result_message = "Check mode, skipping changes"
elif self.state == 'absent' and snapshot_detail is not None:
self.delete_snapshot()
changed = True
elif self.state == 'present' and snapshot_detail is not None:
if changed:
self.modify_snapshot() # Modify Snapshot properties
elif not self.properties_provided:
if self.name is not None:
snapshot = self.get_snapshot(self.name)
# If snapshot with name already exists return without performing any action
if snapshot is None:
self.create_snapshot() # Create Snapshot using parent src_snapshot_id
changed = True
else:
self.create_snapshot()
changed = True
elif self.state == 'present':
if self.name is not None:
snapshot = self.get_snapshot(self.name)
# If snapshot with name already exists return without performing any action
if snapshot is None:
self.create_snapshot() # Create Snapshot using parent src_snapshot_id
changed = True
else:
self.create_snapshot()
changed = True
else:
changed = False
result_message = "No changes requested, skipping changes"
self.module.exit_json(changed=changed, msg=result_message)
def main():
"""
Main function
"""
na_elementsw_snapshot = ElementOSSnapshot()
na_elementsw_snapshot.apply()
if __name__ == '__main__':
main()
| gpl-3.0 |
eptmp3/Sick-Beard | lib/hachoir_parser/image/wmf.py | 90 | 23796 | """
Hachoir parser of Microsoft Windows Metafile (WMF) file format.
Documentation:
- Microsoft Windows Metafile; also known as: WMF,
Enhanced Metafile, EMF, APM
http://wvware.sourceforge.net/caolan/ora-wmf.html
- libwmf source code:
- include/libwmf/defs.h: enums
- src/player/meta.h: arguments parsers
- libemf source code
Author: Victor Stinner
Creation date: 26 december 2006
"""
MAX_FILESIZE = 50 * 1024 * 1024
from lib.hachoir_parser import Parser
from lib.hachoir_core.field import (FieldSet, StaticFieldSet, Enum,
MissingField, ParserError,
UInt32, Int32, UInt16, Int16, UInt8, NullBytes, RawBytes, String)
from lib.hachoir_core.endian import LITTLE_ENDIAN
from lib.hachoir_core.text_handler import textHandler, hexadecimal
from lib.hachoir_core.tools import createDict
from lib.hachoir_parser.image.common import RGBA
POLYFILL_MODE = {1: "Alternate", 2: "Winding"}
BRUSH_STYLE = {
0: u"Solid",
1: u"Null",
2: u"Hollow",
3: u"Pattern",
4: u"Indexed",
5: u"DIB pattern",
6: u"DIB pattern point",
7: u"Pattern 8x8",
8: u"DIB pattern 8x8",
}
HATCH_STYLE = {
0: u"Horizontal", # -----
1: u"Vertical", # |||||
2: u"FDIAGONAL", # \\\\\
3: u"BDIAGONAL", # /////
4: u"Cross", # +++++
5: u"Diagonal cross", # xxxxx
}
PEN_STYLE = {
0: u"Solid",
1: u"Dash", # -------
2: u"Dot", # .......
3: u"Dash dot", # _._._._
4: u"Dash dot dot", # _.._.._
5: u"Null",
6: u"Inside frame",
7: u"User style",
8: u"Alternate",
}
# Binary raster operations
ROP2_DESC = {
1: u"Black (0)",
2: u"Not merge pen (DPon)",
3: u"Mask not pen (DPna)",
4: u"Not copy pen (PN)",
5: u"Mask pen not (PDna)",
6: u"Not (Dn)",
7: u"Xor pen (DPx)",
8: u"Not mask pen (DPan)",
9: u"Mask pen (DPa)",
10: u"Not xor pen (DPxn)",
11: u"No operation (D)",
12: u"Merge not pen (DPno)",
13: u"Copy pen (P)",
14: u"Merge pen not (PDno)",
15: u"Merge pen (DPo)",
16: u"White (1)",
}
def parseXY(parser):
yield Int16(parser, "x")
yield Int16(parser, "y")
def parseCreateBrushIndirect(parser):
yield Enum(UInt16(parser, "brush_style"), BRUSH_STYLE)
yield RGBA(parser, "color")
yield Enum(UInt16(parser, "brush_hatch"), HATCH_STYLE)
def parsePenIndirect(parser):
yield Enum(UInt16(parser, "pen_style"), PEN_STYLE)
yield UInt16(parser, "pen_width")
yield UInt16(parser, "pen_height")
yield RGBA(parser, "color")
def parsePolyFillMode(parser):
yield Enum(UInt16(parser, "operation"), POLYFILL_MODE)
def parseROP2(parser):
yield Enum(UInt16(parser, "operation"), ROP2_DESC)
def parseObjectID(parser):
yield UInt16(parser, "object_id")
class Point(FieldSet):
static_size = 32
def createFields(self):
yield Int16(self, "x")
yield Int16(self, "y")
def createDescription(self):
return "Point (%s, %s)" % (self["x"].value, self["y"].value)
def parsePolygon(parser):
yield UInt16(parser, "count")
for index in xrange(parser["count"].value):
yield Point(parser, "point[]")
META = {
0x0000: ("EOF", u"End of file", None),
0x001E: ("SAVEDC", u"Save device context", None),
0x0035: ("REALIZEPALETTE", u"Realize palette", None),
0x0037: ("SETPALENTRIES", u"Set palette entries", None),
0x00f7: ("CREATEPALETTE", u"Create palette", None),
0x0102: ("SETBKMODE", u"Set background mode", None),
0x0103: ("SETMAPMODE", u"Set mapping mode", None),
0x0104: ("SETROP2", u"Set foreground mix mode", parseROP2),
0x0106: ("SETPOLYFILLMODE", u"Set polygon fill mode", parsePolyFillMode),
0x0107: ("SETSTRETCHBLTMODE", u"Set bitmap streching mode", None),
0x0108: ("SETTEXTCHAREXTRA", u"Set text character extra", None),
0x0127: ("RESTOREDC", u"Restore device context", None),
0x012A: ("INVERTREGION", u"Invert region", None),
0x012B: ("PAINTREGION", u"Paint region", None),
0x012C: ("SELECTCLIPREGION", u"Select clipping region", None),
0x012D: ("SELECTOBJECT", u"Select object", parseObjectID),
0x012E: ("SETTEXTALIGN", u"Set text alignment", None),
0x0142: ("CREATEDIBPATTERNBRUSH", u"Create DIB brush with specified pattern", None),
0x01f0: ("DELETEOBJECT", u"Delete object", parseObjectID),
0x0201: ("SETBKCOLOR", u"Set background color", None),
0x0209: ("SETTEXTCOLOR", u"Set text color", None),
0x020A: ("SETTEXTJUSTIFICATION", u"Set text justification", None),
0x020B: ("SETWINDOWORG", u"Set window origin", parseXY),
0x020C: ("SETWINDOWEXT", u"Set window extends", parseXY),
0x020D: ("SETVIEWPORTORG", u"Set view port origin", None),
0x020E: ("SETVIEWPORTEXT", u"Set view port extends", None),
0x020F: ("OFFSETWINDOWORG", u"Offset window origin", None),
0x0211: ("OFFSETVIEWPORTORG", u"Offset view port origin", None),
0x0213: ("LINETO", u"Draw a line to", None),
0x0214: ("MOVETO", u"Move to", None),
0x0220: ("OFFSETCLIPRGN", u"Offset clipping rectangle", None),
0x0228: ("FILLREGION", u"Fill region", None),
0x0231: ("SETMAPPERFLAGS", u"Set mapper flags", None),
0x0234: ("SELECTPALETTE", u"Select palette", None),
0x02FB: ("CREATEFONTINDIRECT", u"Create font indirect", None),
0x02FA: ("CREATEPENINDIRECT", u"Create pen indirect", parsePenIndirect),
0x02FC: ("CREATEBRUSHINDIRECT", u"Create brush indirect", parseCreateBrushIndirect),
0x0324: ("POLYGON", u"Draw a polygon", parsePolygon),
0x0325: ("POLYLINE", u"Draw a polyline", None),
0x0410: ("SCALEWINDOWEXT", u"Scale window extends", None),
0x0412: ("SCALEVIEWPORTEXT", u"Scale view port extends", None),
0x0415: ("EXCLUDECLIPRECT", u"Exclude clipping rectangle", None),
0x0416: ("INTERSECTCLIPRECT", u"Intersect clipping rectangle", None),
0x0418: ("ELLIPSE", u"Draw an ellipse", None),
0x0419: ("FLOODFILL", u"Flood fill", None),
0x041B: ("RECTANGLE", u"Draw a rectangle", None),
0x041F: ("SETPIXEL", u"Set pixel", None),
0x0429: ("FRAMEREGION", u"Fram region", None),
0x0521: ("TEXTOUT", u"Draw text", None),
0x0538: ("POLYPOLYGON", u"Draw multiple polygons", None),
0x0548: ("EXTFLOODFILL", u"Extend flood fill", None),
0x061C: ("ROUNDRECT", u"Draw a rounded rectangle", None),
0x061D: ("PATBLT", u"Pattern blitting", None),
0x0626: ("ESCAPE", u"Escape", None),
0x06FF: ("CREATEREGION", u"Create region", None),
0x0817: ("ARC", u"Draw an arc", None),
0x081A: ("PIE", u"Draw a pie", None),
0x0830: ("CHORD", u"Draw a chord", None),
0x0940: ("DIBBITBLT", u"DIB bit blitting", None),
0x0a32: ("EXTTEXTOUT", u"Draw text (extra)", None),
0x0b41: ("DIBSTRETCHBLT", u"DIB stretch blitting", None),
0x0d33: ("SETDIBTODEV", u"Set DIB to device", None),
0x0f43: ("STRETCHDIB", u"Stretch DIB", None),
}
META_NAME = createDict(META, 0)
META_DESC = createDict(META, 1)
#----------------------------------------------------------------------------
# EMF constants
# EMF mapping modes
EMF_MAPPING_MODE = {
1: "TEXT",
2: "LOMETRIC",
3: "HIMETRIC",
4: "LOENGLISH",
5: "HIENGLISH",
6: "TWIPS",
7: "ISOTROPIC",
8: "ANISOTROPIC",
}
#----------------------------------------------------------------------------
# EMF parser
def parseEmfMappingMode(parser):
yield Enum(Int32(parser, "mapping_mode"), EMF_MAPPING_MODE)
def parseXY32(parser):
yield Int32(parser, "x")
yield Int32(parser, "y")
def parseObjectID32(parser):
yield textHandler(UInt32(parser, "object_id"), hexadecimal)
def parseBrushIndirect(parser):
yield UInt32(parser, "ihBrush")
yield UInt32(parser, "style")
yield RGBA(parser, "color")
yield Int32(parser, "hatch")
class Point16(FieldSet):
static_size = 32
def createFields(self):
yield Int16(self, "x")
yield Int16(self, "y")
def createDescription(self):
return "Point16: (%i,%i)" % (self["x"].value, self["y"].value)
def parsePoint16array(parser):
yield RECT32(parser, "bounds")
yield UInt32(parser, "count")
for index in xrange(parser["count"].value):
yield Point16(parser, "point[]")
def parseGDIComment(parser):
yield UInt32(parser, "data_size")
size = parser["data_size"].value
if size:
yield RawBytes(parser, "data", size)
def parseICMMode(parser):
yield UInt32(parser, "icm_mode")
def parseExtCreatePen(parser):
yield UInt32(parser, "ihPen")
yield UInt32(parser, "offBmi")
yield UInt32(parser, "cbBmi")
yield UInt32(parser, "offBits")
yield UInt32(parser, "cbBits")
yield UInt32(parser, "pen_style")
yield UInt32(parser, "width")
yield UInt32(parser, "brush_style")
yield RGBA(parser, "color")
yield UInt32(parser, "hatch")
yield UInt32(parser, "nb_style")
for index in xrange(parser["nb_style"].value):
yield UInt32(parser, "style")
EMF_META = {
1: ("HEADER", u"Header", None),
2: ("POLYBEZIER", u"Draw poly bezier", None),
3: ("POLYGON", u"Draw polygon", None),
4: ("POLYLINE", u"Draw polyline", None),
5: ("POLYBEZIERTO", u"Draw poly bezier to", None),
6: ("POLYLINETO", u"Draw poly line to", None),
7: ("POLYPOLYLINE", u"Draw poly polyline", None),
8: ("POLYPOLYGON", u"Draw poly polygon", None),
9: ("SETWINDOWEXTEX", u"Set window extend EX", parseXY32),
10: ("SETWINDOWORGEX", u"Set window origin EX", parseXY32),
11: ("SETVIEWPORTEXTEX", u"Set viewport extend EX", parseXY32),
12: ("SETVIEWPORTORGEX", u"Set viewport origin EX", parseXY32),
13: ("SETBRUSHORGEX", u"Set brush org EX", None),
14: ("EOF", u"End of file", None),
15: ("SETPIXELV", u"Set pixel V", None),
16: ("SETMAPPERFLAGS", u"Set mapper flags", None),
17: ("SETMAPMODE", u"Set mapping mode", parseEmfMappingMode),
18: ("SETBKMODE", u"Set background mode", None),
19: ("SETPOLYFILLMODE", u"Set polyfill mode", None),
20: ("SETROP2", u"Set ROP2", None),
21: ("SETSTRETCHBLTMODE", u"Set stretching blitting mode", None),
22: ("SETTEXTALIGN", u"Set text align", None),
23: ("SETCOLORADJUSTMENT", u"Set color adjustment", None),
24: ("SETTEXTCOLOR", u"Set text color", None),
25: ("SETBKCOLOR", u"Set background color", None),
26: ("OFFSETCLIPRGN", u"Offset clipping region", None),
27: ("MOVETOEX", u"Move to EX", parseXY32),
28: ("SETMETARGN", u"Set meta region", None),
29: ("EXCLUDECLIPRECT", u"Exclude clipping rectangle", None),
30: ("INTERSECTCLIPRECT", u"Intersect clipping rectangle", None),
31: ("SCALEVIEWPORTEXTEX", u"Scale viewport extend EX", None),
32: ("SCALEWINDOWEXTEX", u"Scale window extend EX", None),
33: ("SAVEDC", u"Save device context", None),
34: ("RESTOREDC", u"Restore device context", None),
35: ("SETWORLDTRANSFORM", u"Set world transform", None),
36: ("MODIFYWORLDTRANSFORM", u"Modify world transform", None),
37: ("SELECTOBJECT", u"Select object", parseObjectID32),
38: ("CREATEPEN", u"Create pen", None),
39: ("CREATEBRUSHINDIRECT", u"Create brush indirect", parseBrushIndirect),
40: ("DELETEOBJECT", u"Delete object", parseObjectID32),
41: ("ANGLEARC", u"Draw angle arc", None),
42: ("ELLIPSE", u"Draw ellipse", None),
43: ("RECTANGLE", u"Draw rectangle", None),
44: ("ROUNDRECT", u"Draw rounded rectangle", None),
45: ("ARC", u"Draw arc", None),
46: ("CHORD", u"Draw chord", None),
47: ("PIE", u"Draw pie", None),
48: ("SELECTPALETTE", u"Select palette", None),
49: ("CREATEPALETTE", u"Create palette", None),
50: ("SETPALETTEENTRIES", u"Set palette entries", None),
51: ("RESIZEPALETTE", u"Resize palette", None),
52: ("REALIZEPALETTE", u"Realize palette", None),
53: ("EXTFLOODFILL", u"EXT flood fill", None),
54: ("LINETO", u"Draw line to", parseXY32),
55: ("ARCTO", u"Draw arc to", None),
56: ("POLYDRAW", u"Draw poly draw", None),
57: ("SETARCDIRECTION", u"Set arc direction", None),
58: ("SETMITERLIMIT", u"Set miter limit", None),
59: ("BEGINPATH", u"Begin path", None),
60: ("ENDPATH", u"End path", None),
61: ("CLOSEFIGURE", u"Close figure", None),
62: ("FILLPATH", u"Fill path", None),
63: ("STROKEANDFILLPATH", u"Stroke and fill path", None),
64: ("STROKEPATH", u"Stroke path", None),
65: ("FLATTENPATH", u"Flatten path", None),
66: ("WIDENPATH", u"Widen path", None),
67: ("SELECTCLIPPATH", u"Select clipping path", None),
68: ("ABORTPATH", u"Arbort path", None),
70: ("GDICOMMENT", u"GDI comment", parseGDIComment),
71: ("FILLRGN", u"Fill region", None),
72: ("FRAMERGN", u"Frame region", None),
73: ("INVERTRGN", u"Invert region", None),
74: ("PAINTRGN", u"Paint region", None),
75: ("EXTSELECTCLIPRGN", u"EXT select clipping region", None),
76: ("BITBLT", u"Bit blitting", None),
77: ("STRETCHBLT", u"Stretch blitting", None),
78: ("MASKBLT", u"Mask blitting", None),
79: ("PLGBLT", u"PLG blitting", None),
80: ("SETDIBITSTODEVICE", u"Set DIB bits to device", None),
81: ("STRETCHDIBITS", u"Stretch DIB bits", None),
82: ("EXTCREATEFONTINDIRECTW", u"EXT create font indirect W", None),
83: ("EXTTEXTOUTA", u"EXT text out A", None),
84: ("EXTTEXTOUTW", u"EXT text out W", None),
85: ("POLYBEZIER16", u"Draw poly bezier (16-bit)", None),
86: ("POLYGON16", u"Draw polygon (16-bit)", parsePoint16array),
87: ("POLYLINE16", u"Draw polyline (16-bit)", parsePoint16array),
88: ("POLYBEZIERTO16", u"Draw poly bezier to (16-bit)", parsePoint16array),
89: ("POLYLINETO16", u"Draw polyline to (16-bit)", parsePoint16array),
90: ("POLYPOLYLINE16", u"Draw poly polyline (16-bit)", None),
91: ("POLYPOLYGON16", u"Draw poly polygon (16-bit)", parsePoint16array),
92: ("POLYDRAW16", u"Draw poly draw (16-bit)", None),
93: ("CREATEMONOBRUSH", u"Create monobrush", None),
94: ("CREATEDIBPATTERNBRUSHPT", u"Create DIB pattern brush PT", None),
95: ("EXTCREATEPEN", u"EXT create pen", parseExtCreatePen),
96: ("POLYTEXTOUTA", u"Poly text out A", None),
97: ("POLYTEXTOUTW", u"Poly text out W", None),
98: ("SETICMMODE", u"Set ICM mode", parseICMMode),
99: ("CREATECOLORSPACE", u"Create color space", None),
100: ("SETCOLORSPACE", u"Set color space", None),
101: ("DELETECOLORSPACE", u"Delete color space", None),
102: ("GLSRECORD", u"GLS record", None),
103: ("GLSBOUNDEDRECORD", u"GLS bound ED record", None),
104: ("PIXELFORMAT", u"Pixel format", None),
}
EMF_META_NAME = createDict(EMF_META, 0)
EMF_META_DESC = createDict(EMF_META, 1)
class Function(FieldSet):
def __init__(self, *args):
FieldSet.__init__(self, *args)
if self.root.isEMF():
self._size = self["size"].value * 8
else:
self._size = self["size"].value * 16
def createFields(self):
if self.root.isEMF():
yield Enum(UInt32(self, "function"), EMF_META_NAME)
yield UInt32(self, "size")
try:
parser = EMF_META[self["function"].value][2]
except KeyError:
parser = None
else:
yield UInt32(self, "size")
yield Enum(UInt16(self, "function"), META_NAME)
try:
parser = META[self["function"].value][2]
except KeyError:
parser = None
if parser:
for field in parser(self):
yield field
else:
size = (self.size - self.current_size) // 8
if size:
yield RawBytes(self, "data", size)
def isValid(self):
func = self["function"]
return func.value in func.getEnum()
def createDescription(self):
if self.root.isEMF():
return EMF_META_DESC[self["function"].value]
try:
return META_DESC[self["function"].value]
except KeyError:
return "Function %s" % self["function"].display
class RECT16(StaticFieldSet):
format = (
(Int16, "left"),
(Int16, "top"),
(Int16, "right"),
(Int16, "bottom"),
)
def createDescription(self):
return "%s: %ux%u at (%u,%u)" % (
self.__class__.__name__,
self["right"].value-self["left"].value,
self["bottom"].value-self["top"].value,
self["left"].value,
self["top"].value)
class RECT32(RECT16):
format = (
(Int32, "left"),
(Int32, "top"),
(Int32, "right"),
(Int32, "bottom"),
)
class PlaceableHeader(FieldSet):
"""
Header of Placeable Metafile (file extension .APM),
created by Aldus Corporation
"""
MAGIC = "\xD7\xCD\xC6\x9A\0\0" # (magic, handle=0x0000)
def createFields(self):
yield textHandler(UInt32(self, "signature", "Placeable Metafiles signature (0x9AC6CDD7)"), hexadecimal)
yield UInt16(self, "handle")
yield RECT16(self, "rect")
yield UInt16(self, "inch")
yield NullBytes(self, "reserved", 4)
yield textHandler(UInt16(self, "checksum"), hexadecimal)
class EMF_Header(FieldSet):
MAGIC = "\x20\x45\x4D\x46\0\0" # (magic, min_ver=0x0000)
def __init__(self, *args):
FieldSet.__init__(self, *args)
self._size = self["size"].value * 8
def createFields(self):
LONG = Int32
yield UInt32(self, "type", "Record type (always 1)")
yield UInt32(self, "size", "Size of the header in bytes")
yield RECT32(self, "Bounds", "Inclusive bounds")
yield RECT32(self, "Frame", "Inclusive picture frame")
yield textHandler(UInt32(self, "signature", "Signature ID (always 0x464D4520)"), hexadecimal)
yield UInt16(self, "min_ver", "Minor version")
yield UInt16(self, "maj_ver", "Major version")
yield UInt32(self, "file_size", "Size of the file in bytes")
yield UInt32(self, "NumOfRecords", "Number of records in the metafile")
yield UInt16(self, "NumOfHandles", "Number of handles in the handle table")
yield NullBytes(self, "reserved", 2)
yield UInt32(self, "desc_size", "Size of description in 16-bit words")
yield UInt32(self, "desc_ofst", "Offset of description string in metafile")
yield UInt32(self, "nb_colors", "Number of color palette entries")
yield LONG(self, "width_px", "Width of reference device in pixels")
yield LONG(self, "height_px", "Height of reference device in pixels")
yield LONG(self, "width_mm", "Width of reference device in millimeters")
yield LONG(self, "height_mm", "Height of reference device in millimeters")
# Read description (if any)
offset = self["desc_ofst"].value
current = (self.absolute_address + self.current_size) // 8
size = self["desc_size"].value * 2
if offset == current and size:
yield String(self, "description", size, charset="UTF-16-LE", strip="\0 ")
# Read padding (if any)
size = self["size"].value - self.current_size//8
if size:
yield RawBytes(self, "padding", size)
class WMF_File(Parser):
PARSER_TAGS = {
"id": "wmf",
"category": "image",
"file_ext": ("wmf", "apm", "emf"),
"mime": (
u"image/wmf", u"image/x-wmf", u"image/x-win-metafile",
u"application/x-msmetafile", u"application/wmf", u"application/x-wmf",
u"image/x-emf"),
"magic": (
(PlaceableHeader.MAGIC, 0),
(EMF_Header.MAGIC, 40*8),
# WMF: file_type=memory, header size=9, version=3.0
("\0\0\x09\0\0\3", 0),
# WMF: file_type=disk, header size=9, version=3.0
("\1\0\x09\0\0\3", 0),
),
"min_size": 40*8,
"description": u"Microsoft Windows Metafile (WMF)",
}
endian = LITTLE_ENDIAN
FILE_TYPE = {0: "memory", 1: "disk"}
def validate(self):
if self.isEMF():
# Check EMF header
emf = self["emf_header"]
if emf["signature"].value != 0x464D4520:
return "Invalid signature"
if emf["type"].value != 1:
return "Invalid record type"
if emf["reserved"].value != "\0\0":
return "Invalid reserved"
else:
# Check AMF header
if self.isAPM():
amf = self["amf_header"]
if amf["handle"].value != 0:
return "Invalid handle"
if amf["reserved"].value != "\0\0\0\0":
return "Invalid reserved"
# Check common header
if self["file_type"].value not in (0, 1):
return "Invalid file type"
if self["header_size"].value != 9:
return "Invalid header size"
if self["nb_params"].value != 0:
return "Invalid number of parameters"
# Check first functions
for index in xrange(5):
try:
func = self["func[%u]" % index]
except MissingField:
if self.done:
return True
return "Unable to get function #%u" % index
except ParserError:
return "Unable to create function #%u" % index
# Check first frame values
if not func.isValid():
return "Function #%u is invalid" % index
return True
def createFields(self):
if self.isEMF():
yield EMF_Header(self, "emf_header")
else:
if self.isAPM():
yield PlaceableHeader(self, "amf_header")
yield Enum(UInt16(self, "file_type"), self.FILE_TYPE)
yield UInt16(self, "header_size", "Size of header in 16-bit words (always 9)")
yield UInt8(self, "win_ver_min", "Minor version of Microsoft Windows")
yield UInt8(self, "win_ver_maj", "Major version of Microsoft Windows")
yield UInt32(self, "file_size", "Total size of the metafile in 16-bit words")
yield UInt16(self, "nb_obj", "Number of objects in the file")
yield UInt32(self, "max_record_size", "The size of largest record in 16-bit words")
yield UInt16(self, "nb_params", "Not Used (always 0)")
while not(self.eof):
yield Function(self, "func[]")
def isEMF(self):
"""File is in EMF format?"""
if 1 <= self.current_length:
return self[0].name == "emf_header"
if self.size < 44*8:
return False
magic = EMF_Header.MAGIC
return self.stream.readBytes(40*8, len(magic)) == magic
def isAPM(self):
"""File is in Aldus Placeable Metafiles format?"""
if 1 <= self.current_length:
return self[0].name == "amf_header"
else:
magic = PlaceableHeader.MAGIC
return (self.stream.readBytes(0, len(magic)) == magic)
def createDescription(self):
if self.isEMF():
return u"Microsoft Enhanced Metafile (EMF) picture"
elif self.isAPM():
return u"Aldus Placeable Metafile (APM) picture"
else:
return u"Microsoft Windows Metafile (WMF) picture"
def createMimeType(self):
if self.isEMF():
return u"image/x-emf"
else:
return u"image/wmf"
def createContentSize(self):
if self.isEMF():
return None
start = self["func[0]"].absolute_address
end = self.stream.searchBytes("\3\0\0\0\0\0", start, MAX_FILESIZE * 8)
if end is not None:
return end + 6*8
return None
| gpl-3.0 |
GNOME/pygobject | tests/test_cairo.py | 1 | 11911 | # -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
import unittest
import pytest
import gi
try:
gi.require_foreign('cairo')
import cairo
has_cairo = True
except ImportError:
has_cairo = False
has_region = has_cairo and hasattr(cairo, "Region")
try:
from gi.repository import Gtk, Gdk
Gtk, Gdk # pyflakes
except:
Gtk = None
Gdk = None
from gi.repository import GObject, Regress
@unittest.skipUnless(has_cairo, 'built without cairo support')
class Test(unittest.TestCase):
def test_gvalue_converters(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
matrix = cairo.Matrix()
objects = {
'CairoContext': context,
'CairoSurface': surface,
'CairoFontFace': context.get_font_face(),
'CairoScaledFont': context.get_scaled_font(),
'CairoPattern': context.get_source(),
'CairoMatrix': matrix,
}
for type_name, cairo_obj in objects.items():
gtype = GObject.type_from_name(type_name)
v = GObject.Value()
assert v.init(gtype) is None
assert v.get_value() is None
v.set_value(None)
assert v.get_value() is None
v.set_value(cairo_obj)
assert v.get_value() == cairo_obj
def test_cairo_context(self):
context = Regress.test_cairo_context_full_return()
self.assertTrue(isinstance(context, cairo.Context))
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
Regress.test_cairo_context_none_in(context)
def test_cairo_context_full_in(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
Regress.test_cairo_context_full_in(context)
with pytest.raises(TypeError):
Regress.test_cairo_context_full_in(object())
def test_cairo_context_none_return(self):
context = Regress.test_cairo_context_none_return()
self.assertTrue(isinstance(context, cairo.Context))
def test_cairo_path_full_return(self):
path = Regress.test_cairo_path_full_return()
if hasattr(cairo, "Path"): # pycairo 1.15.1+
assert isinstance(path, cairo.Path)
def test_cairo_path_none_in(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
path = context.copy_path()
Regress.test_cairo_path_none_in(path)
surface.finish()
with pytest.raises(TypeError):
Regress.test_cairo_path_none_in(object())
def test_cairo_path_full_in_full_return(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
context.move_to(10, 10)
context.curve_to(10, 10, 3, 4, 5, 6)
path = context.copy_path()
new_path = Regress.test_cairo_path_full_in_full_return(path)
assert list(path) == list(new_path)
surface.finish()
def test_cairo_font_options_full_return(self):
options = Regress.test_cairo_font_options_full_return()
assert isinstance(options, cairo.FontOptions)
def test_cairo_font_options_none_return(self):
options = Regress.test_cairo_font_options_none_return()
assert isinstance(options, cairo.FontOptions)
def test_cairo_font_options_full_in(self):
options = cairo.FontOptions()
Regress.test_cairo_font_options_full_in(options)
with pytest.raises(TypeError):
Regress.test_cairo_font_options_full_in(object())
def test_cairo_font_options_none_in(self):
options = cairo.FontOptions()
Regress.test_cairo_font_options_none_in(options)
def test_cairo_pattern_full_in(self):
pattern = cairo.SolidPattern(1, 1, 1, 1)
Regress.test_cairo_pattern_full_in(pattern)
with pytest.raises(TypeError):
Regress.test_cairo_pattern_full_in(object())
def test_cairo_pattern_none_in(self):
pattern = cairo.SolidPattern(1, 1, 1, 1)
Regress.test_cairo_pattern_none_in(pattern)
def test_cairo_pattern_full_return(self):
pattern = Regress.test_cairo_pattern_full_return()
self.assertTrue(isinstance(pattern, cairo.Pattern))
self.assertTrue(isinstance(pattern, cairo.SolidPattern))
def test_cairo_pattern_none_return(self):
pattern = Regress.test_cairo_pattern_none_return()
self.assertTrue(isinstance(pattern, cairo.Pattern))
self.assertTrue(isinstance(pattern, cairo.SolidPattern))
def test_cairo_region_full_in(self):
region = cairo.Region()
Regress.test_cairo_region_full_in(region)
with pytest.raises(TypeError):
Regress.test_cairo_region_full_in(object())
def test_cairo_matrix_none_in(self):
matrix = cairo.Matrix()
Regress.test_cairo_matrix_none_in(matrix)
with pytest.raises(TypeError):
Regress.test_cairo_matrix_none_in(object())
def test_cairo_matrix_none_return(self):
matrix = Regress.test_cairo_matrix_none_return()
assert matrix == cairo.Matrix()
def test_cairo_matrix_out_caller_allocates(self):
matrix = Regress.test_cairo_matrix_out_caller_allocates()
assert matrix == cairo.Matrix()
def test_cairo_surface(self):
surface = Regress.test_cairo_surface_none_return()
self.assertTrue(isinstance(surface, cairo.ImageSurface))
self.assertTrue(isinstance(surface, cairo.Surface))
self.assertEqual(surface.get_format(), cairo.FORMAT_ARGB32)
self.assertEqual(surface.get_width(), 10)
self.assertEqual(surface.get_height(), 10)
surface = Regress.test_cairo_surface_full_return()
self.assertTrue(isinstance(surface, cairo.ImageSurface))
self.assertTrue(isinstance(surface, cairo.Surface))
self.assertEqual(surface.get_format(), cairo.FORMAT_ARGB32)
self.assertEqual(surface.get_width(), 10)
self.assertEqual(surface.get_height(), 10)
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
Regress.test_cairo_surface_none_in(surface)
surface = Regress.test_cairo_surface_full_out()
self.assertTrue(isinstance(surface, cairo.ImageSurface))
self.assertTrue(isinstance(surface, cairo.Surface))
self.assertEqual(surface.get_format(), cairo.FORMAT_ARGB32)
self.assertEqual(surface.get_width(), 10)
self.assertEqual(surface.get_height(), 10)
def test_cairo_surface_full_in(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
Regress.test_cairo_surface_full_in(surface)
with pytest.raises(TypeError):
Regress.test_cairo_surface_full_in(object())
def test_require_foreign(self):
self.assertEqual(gi.require_foreign('cairo'), None)
self.assertEqual(gi.require_foreign('cairo', 'Context'), None)
self.assertRaises(ImportError, gi.require_foreign, 'invalid_module')
self.assertRaises(ImportError, gi.require_foreign, 'invalid_module', 'invalid_symbol')
self.assertRaises(ImportError, gi.require_foreign, 'cairo', 'invalid_symbol')
@unittest.skipUnless(has_cairo, 'built without cairo support')
@unittest.skipUnless(has_region, 'built without cairo.Region support')
@unittest.skipUnless(Gdk, 'Gdk not available')
class TestRegion(unittest.TestCase):
def test_region_to_py(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
context.paint()
region = Gdk.cairo_region_create_from_surface(surface)
r = region.get_extents()
self.assertEqual((r.height, r.width), (10, 10))
def test_region_from_py(self):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
region = cairo.Region(cairo.RectangleInt(0, 0, 42, 42))
Gdk.cairo_region(context, region)
self.assertTrue("42" in repr(list(context.copy_path())))
@unittest.skipUnless(has_cairo, 'built without cairo support')
@unittest.skipUnless(Gtk, 'Gtk not available')
class TestPango(unittest.TestCase):
def test_cairo_font_options(self):
window = Gtk.Window()
if Gtk._version == "4.0":
window.set_font_options(cairo.FontOptions())
font_opts = window.get_font_options()
else:
screen = window.get_screen()
font_opts = screen.get_font_options()
assert font_opts is not None
self.assertTrue(isinstance(font_opts.get_subpixel_order(), int))
if has_cairo:
from gi.repository import cairo as CairoGObject
# Use PyGI signals to test non-introspected foreign marshaling.
class CairoSignalTester(GObject.Object):
sig_context = GObject.Signal(arg_types=[CairoGObject.Context])
sig_surface = GObject.Signal(arg_types=[CairoGObject.Surface])
sig_font_face = GObject.Signal(arg_types=[CairoGObject.FontFace])
sig_scaled_font = GObject.Signal(arg_types=[CairoGObject.ScaledFont])
sig_pattern = GObject.Signal(arg_types=[CairoGObject.Pattern])
@unittest.skipUnless(has_cairo, 'built without cairo support')
class TestSignalMarshaling(unittest.TestCase):
# Tests round tripping of cairo objects through non-introspected signals.
def setUp(self):
self.surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
self.context = cairo.Context(self.surface)
self.tester = CairoSignalTester()
def pass_object_through_signal(self, obj, signal):
"""Pass the given `obj` through the `signal` emission storing the
`obj` passed through the signal and returning it."""
passthrough_result = []
def callback(instance, passthrough):
passthrough_result.append(passthrough)
signal.connect(callback)
signal.emit(obj)
return passthrough_result[0]
def test_context(self):
result = self.pass_object_through_signal(self.context, self.tester.sig_context)
self.assertTrue(isinstance(result, cairo.Context))
with pytest.raises(TypeError):
self.pass_object_through_signal(object(), self.tester.sig_context)
def test_surface(self):
result = self.pass_object_through_signal(self.surface, self.tester.sig_surface)
self.assertTrue(isinstance(result, cairo.Surface))
def test_font_face(self):
font_face = self.context.get_font_face()
result = self.pass_object_through_signal(font_face, self.tester.sig_font_face)
self.assertTrue(isinstance(result, cairo.FontFace))
with pytest.raises(TypeError):
self.pass_object_through_signal(object(), self.tester.sig_font_face)
def test_scaled_font(self):
scaled_font = cairo.ScaledFont(self.context.get_font_face(),
cairo.Matrix(),
cairo.Matrix(),
self.context.get_font_options())
result = self.pass_object_through_signal(scaled_font, self.tester.sig_scaled_font)
self.assertTrue(isinstance(result, cairo.ScaledFont))
with pytest.raises(TypeError):
result = self.pass_object_through_signal(object(), self.tester.sig_scaled_font)
def test_pattern(self):
pattern = cairo.SolidPattern(1, 1, 1, 1)
result = self.pass_object_through_signal(pattern, self.tester.sig_pattern)
self.assertTrue(isinstance(result, cairo.Pattern))
self.assertTrue(isinstance(result, cairo.SolidPattern))
with pytest.raises(TypeError):
result = self.pass_object_through_signal(object(), self.tester.sig_pattern)
| lgpl-2.1 |
hojel/calibre | src/calibre/ebooks/docx/cleanup.py | 11 | 6742 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import os
NBSP = '\xa0'
def mergeable(previous, current):
if previous.tail or current.tail:
return False
if previous.get('class', None) != current.get('class', None):
return False
if current.get('id', False):
return False
try:
return next(previous.itersiblings()) is current
except StopIteration:
return False
def append_text(parent, text):
if len(parent) > 0:
parent[-1].tail = (parent[-1].tail or '') + text
else:
parent.text = (parent.text or '') + text
def merge(parent, span):
if span.text:
append_text(parent, span.text)
for child in span:
parent.append(child)
if span.tail:
append_text(parent, span.tail)
span.getparent().remove(span)
def merge_run(run):
parent = run[0]
for span in run[1:]:
merge(parent, span)
def liftable(css):
# A <span> is liftable if all its styling would work just as well if it is
# specified on the parent element.
prefixes = {x.partition('-')[0] for x in css.iterkeys()}
return not (prefixes - {'text', 'font', 'letter', 'color', 'background'})
def add_text(elem, attr, text):
old = getattr(elem, attr) or ''
setattr(elem, attr, old + text)
def lift(span):
# Replace an element by its content (text, children and tail)
parent = span.getparent()
idx = parent.index(span)
try:
last_child = span[-1]
except IndexError:
last_child = None
if span.text:
if idx == 0:
add_text(parent, 'text', span.text)
else:
add_text(parent[idx - 1], 'tail', span.text)
for child in reversed(span):
parent.insert(idx, child)
parent.remove(span)
if span.tail:
if last_child is None:
if idx == 0:
add_text(parent, 'text', span.tail)
else:
add_text(parent[idx - 1], 'tail', span.tail)
else:
add_text(last_child, 'tail', span.tail)
def before_count(root, tag, limit=10):
body = root.xpath('//body[1]')
if not body:
return limit
ans = 0
for elem in body[0].iterdescendants():
if elem is tag:
return ans
ans += 1
if ans > limit:
return limit
def cleanup_markup(log, root, styles, dest_dir, detect_cover, XPath):
# Move <hr>s outside paragraphs, if possible.
pancestor = XPath('|'.join('ancestor::%s[1]' % x for x in ('p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6')))
for hr in root.xpath('//span/hr'):
p = pancestor(hr)
if p:
p = p[0]
descendants = tuple(p.iterdescendants())
if descendants[-1] is hr:
parent = p.getparent()
idx = parent.index(p)
parent.insert(idx+1, hr)
hr.tail = '\n\t'
# Merge consecutive spans that have the same styling
current_run = []
for span in root.xpath('//span[not(@style or @lang)]'):
if not current_run:
current_run.append(span)
else:
last = current_run[-1]
if mergeable(last, span):
current_run.append(span)
else:
if len(current_run) > 1:
merge_run(current_run)
current_run = [span]
# Remove unnecessary span tags that are the only child of a parent block
# element
class_map = dict(styles.classes.itervalues())
parents = ('p', 'div') + tuple('h%d' % i for i in xrange(1, 7))
for parent in root.xpath('//*[(%s) and count(span)=1]' % ' or '.join('name()="%s"' % t for t in parents)):
if len(parent) == 1 and not parent.text and not parent[0].tail and not parent[0].get('id', None):
# We have a block whose contents are entirely enclosed in a <span>
span = parent[0]
span_class = span.get('class', None)
span_css = class_map.get(span_class, {})
if liftable(span_css):
pclass = parent.get('class', None)
if span_class:
pclass = (pclass + ' ' + span_class) if pclass else span_class
parent.set('class', pclass)
parent.text = span.text
parent.remove(span)
if span.get('lang'):
parent.set('lang', span.get('lang'))
for child in span:
parent.append(child)
# Make spans whose only styling is bold or italic into <b> and <i> tags
for span in root.xpath('//span[@class and not(@style)]'):
css = class_map.get(span.get('class', None), {})
if len(css) == 1:
if css == {'font-style':'italic'}:
span.tag = 'i'
del span.attrib['class']
elif css == {'font-weight':'bold'}:
span.tag = 'b'
del span.attrib['class']
# Get rid of <span>s that have no styling
for span in root.xpath('//span[not(@class or @id or @style or @lang)]'):
lift(span)
# Convert <p><br style="page-break-after:always"> </p> style page breaks
# into something the viewer will render as a page break
for p in root.xpath('//p[br[@style="page-break-after:always"]]'):
if len(p) == 1 and (not p[0].tail or not p[0].tail.strip()):
p.remove(p[0])
prefix = p.get('style', '')
if prefix:
prefix += '; '
p.set('style', prefix + 'page-break-after:always')
p.text = NBSP if not p.text else p.text
if detect_cover:
# Check if the first image in the document is possibly a cover
img = root.xpath('//img[@src][1]')
if img:
img = img[0]
path = os.path.join(dest_dir, img.get('src'))
if os.path.exists(path) and before_count(root, img, limit=10) < 5:
from calibre.utils.magick.draw import identify
try:
width, height, fmt = identify(path)
except:
width, height, fmt = 0, 0, None
del fmt
try:
is_cover = 0.8 <= height/width <= 1.8 and height*width >= 160000
except ZeroDivisionError:
is_cover = False
if is_cover:
log.debug('Detected an image that looks like a cover')
img.getparent().remove(img)
return path
| gpl-3.0 |
2013Commons/HUE-SHARK | desktop/core/ext-py/django-extensions-0.5/build/lib.linux-i686-2.7/django_extensions/management/commands/sqldiff.py | 7 | 29796 | """
sqldiff.py - Prints the (approximated) difference between models and database
TODO:
- better support for relations
- better support for constraints (mainly postgresql?)
- support for table spaces with postgresql
- when a table is not managed (meta.managed==False) then only do a one-way
sqldiff ? show differences from db->table but not the other way around since
it's not managed.
KNOWN ISSUES:
- MySQL has by far the most problems with introspection. Please be
carefull when using MySQL with sqldiff.
- Booleans are reported back as Integers, so there's know way to know if
there was a real change.
- Varchar sizes are reported back without unicode support so there size
may change in comparison to the real length of the varchar.
- Some of the 'fixes' to counter these problems might create false
positives or false negatives.
"""
from django.core.management.base import BaseCommand
from django.core.management import sql as _sql
from django.core.management import CommandError
from django.core.management.color import no_style
from django.db import transaction, connection
from django.db.models.fields import IntegerField
from optparse import make_option
ORDERING_FIELD = IntegerField('_order', null=True)
def flatten(l, ltypes=(list, tuple)):
ltype = type(l)
l = list(l)
i = 0
while i < len(l):
while isinstance(l[i], ltypes):
if not l[i]:
l.pop(i)
i -= 1
break
else:
l[i:i + 1] = l[i]
i += 1
return ltype(l)
def all_local_fields(meta):
all_fields = meta.local_fields[:]
for parent in meta.parents:
all_fields.extend(all_local_fields(parent._meta))
return all_fields
class SQLDiff(object):
DATA_TYPES_REVERSE_OVERRIDE = {
}
DIFF_TYPES = [
'comment',
'table-missing-in-db',
'field-missing-in-db',
'field-missing-in-model',
'index-missing-in-db',
'index-missing-in-model',
'unique-missing-in-db',
'unique-missing-in-model',
'field-type-differ',
'field-parameter-differ',
]
DIFF_TEXTS = {
'comment': 'comment: %(0)s',
'table-missing-in-db': "table '%(0)s' missing in database",
'field-missing-in-db' : "field '%(1)s' defined in model but missing in database",
'field-missing-in-model' : "field '%(1)s' defined in database but missing in model",
'index-missing-in-db' : "field '%(1)s' INDEX defined in model but missing in database",
'index-missing-in-model' : "field '%(1)s' INDEX defined in database schema but missing in model",
'unique-missing-in-db' : "field '%(1)s' UNIQUE defined in model but missing in database",
'unique-missing-in-model' : "field '%(1)s' UNIQUE defined in database schema but missing in model",
'field-type-differ' : "field '%(1)s' not of same type: db='%(3)s', model='%(2)s'",
'field-parameter-differ' : "field '%(1)s' parameters differ: db='%(3)s', model='%(2)s'",
}
SQL_FIELD_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ADD'), style.SQL_FIELD(qn(args[1])), style.SQL_COLTYPE(args[2]))
SQL_FIELD_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s\n\t%s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('DROP COLUMN'), style.SQL_FIELD(qn(args[1])))
SQL_INDEX_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s (%s);" % (style.SQL_KEYWORD('CREATE INDEX'), style.SQL_TABLE(qn("%s_idx" % '_'.join(args[0:2]))), style.SQL_KEYWORD('ON'), style.SQL_TABLE(qn(args[0])), style.SQL_FIELD(qn(args[1])))
# FIXME: need to lookup index name instead of just appending _idx to table + fieldname
SQL_INDEX_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s;" % (style.SQL_KEYWORD('DROP INDEX'), style.SQL_TABLE(qn("%s_idx" % '_'.join(args[0:2]))))
SQL_UNIQUE_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s (%s);" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ADD'), style.SQL_KEYWORD('UNIQUE'), style.SQL_FIELD(qn(args[1])))
# FIXME: need to lookup unique constraint name instead of appending _key to table + fieldname
SQL_UNIQUE_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('DROP'), style.SQL_KEYWORD('CONSTRAINT'), style.SQL_TABLE(qn("%s_key" % ('_'.join(args[:2])))))
SQL_FIELD_TYPE_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD("MODIFY"), style.SQL_FIELD(qn(args[1])), style.SQL_COLTYPE(args[2]))
SQL_FIELD_PARAMETER_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD("MODIFY"), style.SQL_FIELD(qn(args[1])), style.SQL_COLTYPE(args[2]))
SQL_COMMENT = lambda self, style, qn, args: style.NOTICE('-- Comment: %s' % style.SQL_TABLE(args[0]))
SQL_TABLE_MISSING_IN_DB = lambda self, style, qn, args: style.NOTICE('-- Table missing: %s' % args[0])
def __init__(self, app_models, options):
self.app_models = app_models
self.options = options
self.dense = options.get('dense_output', False)
try:
self.introspection = connection.introspection
except AttributeError:
from django.db import get_introspection_module
self.introspection = get_introspection_module()
self.cursor = connection.cursor()
self.django_tables = self.get_django_tables(options.get('only_existing', True))
self.db_tables = self.introspection.get_table_list(self.cursor)
self.differences = []
self.unknown_db_fields = {}
self.DIFF_SQL = {
'comment': self.SQL_COMMENT,
'table-missing-in-db': self.SQL_TABLE_MISSING_IN_DB,
'field-missing-in-db': self.SQL_FIELD_MISSING_IN_DB,
'field-missing-in-model': self.SQL_FIELD_MISSING_IN_MODEL,
'index-missing-in-db': self.SQL_INDEX_MISSING_IN_DB,
'index-missing-in-model': self.SQL_INDEX_MISSING_IN_MODEL,
'unique-missing-in-db': self.SQL_UNIQUE_MISSING_IN_DB,
'unique-missing-in-model': self.SQL_UNIQUE_MISSING_IN_MODEL,
'field-type-differ': self.SQL_FIELD_TYPE_DIFFER,
'field-parameter-differ': self.SQL_FIELD_PARAMETER_DIFFER,
}
def add_app_model_marker(self, app_label, model_name):
self.differences.append((app_label, model_name, []))
def add_difference(self, diff_type, *args):
assert diff_type in self.DIFF_TYPES, 'Unknown difference type'
self.differences[-1][-1].append((diff_type, args))
def get_django_tables(self, only_existing):
try:
django_tables = self.introspection.django_table_names(only_existing=only_existing)
except AttributeError:
# backwards compatibility for before introspection refactoring (r8296)
try:
django_tables = _sql.django_table_names(only_existing=only_existing)
except AttributeError:
# backwards compatibility for before svn r7568
django_tables = _sql.django_table_list(only_existing=only_existing)
return django_tables
def sql_to_dict(self, query,param):
""" sql_to_dict(query, param) -> list of dicts
code from snippet at http://www.djangosnippets.org/snippets/1383/
"""
cursor = connection.cursor()
cursor.execute(query,param)
fieldnames = [name[0] for name in cursor.description]
result = []
for row in cursor.fetchall():
rowset = []
for field in zip(fieldnames, row):
rowset.append(field)
result.append(dict(rowset))
return result
def get_field_model_type(self, field):
return field.db_type()
def get_field_db_type(self, description, field=None, table_name=None):
from django.db import models
# DB-API cursor.description
#(name, type_code, display_size, internal_size, precision, scale, null_ok) = description
type_code = description[1]
if type_code in self.DATA_TYPES_REVERSE_OVERRIDE:
reverse_type = self.DATA_TYPES_REVERSE_OVERRIDE[type_code]
else:
try:
try:
reverse_type = self.introspection.data_types_reverse[type_code]
except AttributeError:
# backwards compatibility for before introspection refactoring (r8296)
reverse_type = self.introspection.DATA_TYPES_REVERSE.get(type_code)
except KeyError:
# type_code not found in data_types_reverse map
key = (self.differences[-1][:2], description[:2])
if key not in self.unknown_db_fields:
self.unknown_db_fields[key] = 1
self.add_difference('comment', "Unknown database type for field '%s' (%s)" % (description[0], type_code))
return None
kwargs = {}
if isinstance(reverse_type, tuple):
kwargs.update(reverse_type[1])
reverse_type = reverse_type[0]
if reverse_type == "CharField" and description[3]:
kwargs['max_length'] = description[3]
if reverse_type == "DecimalField":
kwargs['max_digits'] = description[4]
kwargs['decimal_places'] = abs(description[5])
if description[6]:
kwargs['blank'] = True
if not reverse_type in ('TextField', 'CharField'):
kwargs['null'] = True
if '.' in reverse_type:
from django.utils import importlib
# TODO: when was importlib added to django.utils ? and do we
# need to add backwards compatibility code ?
module_path, package_name = reverse_type.rsplit('.', 1)
module = importlib.import_module(module_path)
field_db_type = getattr(module, package_name)(**kwargs).db_type()
else:
field_db_type = getattr(models, reverse_type)(**kwargs).db_type()
return field_db_type
def strip_parameters(self, field_type):
if field_type:
return field_type.split(" ")[0].split("(")[0]
return field_type
def find_unique_missing_in_db(self, meta, table_indexes, table_name):
for field in all_local_fields(meta):
if field.unique:
attname = field.db_column or field.attname
if attname in table_indexes and table_indexes[attname]['unique']:
continue
self.add_difference('unique-missing-in-db', table_name, attname)
def find_unique_missing_in_model(self, meta, table_indexes, table_name):
# TODO: Postgresql does not list unique_togethers in table_indexes
# MySQL does
fields = dict([(field.db_column or field.name, field.unique) for field in all_local_fields(meta)])
for att_name, att_opts in table_indexes.iteritems():
if att_opts['unique'] and att_name in fields and not fields[att_name]:
if att_name in flatten(meta.unique_together): continue
self.add_difference('unique-missing-in-model', table_name, att_name)
def find_index_missing_in_db(self, meta, table_indexes, table_name):
for field in all_local_fields(meta):
if field.db_index:
attname = field.db_column or field.attname
if not attname in table_indexes:
self.add_difference('index-missing-in-db', table_name, attname)
def find_index_missing_in_model(self, meta, table_indexes, table_name):
fields = dict([(field.name, field) for field in all_local_fields(meta)])
for att_name, att_opts in table_indexes.iteritems():
if att_name in fields:
field = fields[att_name]
if field.db_index: continue
if att_opts['primary_key'] and field.primary_key: continue
if att_opts['unique'] and field.unique: continue
if att_opts['unique'] and att_name in flatten(meta.unique_together): continue
self.add_difference('index-missing-in-model', table_name, att_name)
def find_field_missing_in_model(self, fieldmap, table_description, table_name):
for row in table_description:
if row[0] not in fieldmap:
self.add_difference('field-missing-in-model', table_name, row[0])
def find_field_missing_in_db(self, fieldmap, table_description, table_name):
db_fields = [row[0] for row in table_description]
for field_name, field in fieldmap.iteritems():
if field_name not in db_fields:
self.add_difference('field-missing-in-db', table_name, field_name, field.db_type())
def find_field_type_differ(self, meta, table_description, table_name, func=None):
db_fields = dict([(row[0], row) for row in table_description])
for field in all_local_fields(meta):
if field.name not in db_fields: continue
description = db_fields[field.name]
model_type = self.strip_parameters(self.get_field_model_type(field))
db_type = self.strip_parameters(self.get_field_db_type(description, field))
# use callback function if defined
if func:
model_type, db_type = func(field, description, model_type, db_type)
if not model_type==db_type:
self.add_difference('field-type-differ', table_name, field.name, model_type, db_type)
def find_field_parameter_differ(self, meta, table_description, table_name, func=None):
db_fields = dict([(row[0], row) for row in table_description])
for field in all_local_fields(meta):
if field.name not in db_fields: continue
description = db_fields[field.name]
model_type = self.get_field_model_type(field)
db_type = self.get_field_db_type(description, field, table_name)
if not self.strip_parameters(model_type)==self.strip_parameters(db_type):
continue
# use callback function if defined
if func:
model_type, db_type = func(field, description, model_type, db_type)
if not model_type==db_type:
self.add_difference('field-parameter-differ', table_name, field.name, model_type, db_type)
@transaction.commit_manually
def find_differences(self):
cur_app_label = None
for app_model in self.app_models:
meta = app_model._meta
table_name = meta.db_table
app_label = meta.app_label
if cur_app_label!=app_label:
# Marker indicating start of difference scan for this table_name
self.add_app_model_marker(app_label, app_model.__name__)
#if not table_name in self.django_tables:
if not table_name in self.db_tables:
# Table is missing from database
self.add_difference('table-missing-in-db', table_name)
continue
table_indexes = self.introspection.get_indexes(self.cursor, table_name)
fieldmap = dict([(field.db_column or field.get_attname(), field) for field in all_local_fields(meta)])
# add ordering field if model uses order_with_respect_to
if meta.order_with_respect_to:
fieldmap['_order'] = ORDERING_FIELD
try:
table_description = self.introspection.get_table_description(self.cursor, table_name)
except Exception, e:
model_diffs.append((app_model.__name__, [str(e).strip()]))
transaction.rollback() # reset transaction
continue
# Fields which are defined in database but not in model
# 1) find: 'unique-missing-in-model'
self.find_unique_missing_in_model(meta, table_indexes, table_name)
# 2) find: 'index-missing-in-model'
self.find_index_missing_in_model(meta, table_indexes, table_name)
# 3) find: 'field-missing-in-model'
self.find_field_missing_in_model(fieldmap, table_description, table_name)
# Fields which are defined in models but not in database
# 4) find: 'field-missing-in-db'
self.find_field_missing_in_db(fieldmap, table_description, table_name)
# 5) find: 'unique-missing-in-db'
self.find_unique_missing_in_db(meta, table_indexes, table_name)
# 6) find: 'index-missing-in-db'
self.find_index_missing_in_db(meta, table_indexes, table_name)
# Fields which have a different type or parameters
# 7) find: 'type-differs'
self.find_field_type_differ(meta, table_description, table_name)
# 8) find: 'type-parameter-differs'
self.find_field_parameter_differ(meta, table_description, table_name)
def print_diff(self, style=no_style()):
""" print differences to stdout """
if self.options.get('sql', True):
self.print_diff_sql(style)
else:
self.print_diff_text(style)
def print_diff_text(self, style):
cur_app_label = None
for app_label, model_name, diffs in self.differences:
if not diffs: continue
if not self.dense and cur_app_label != app_label:
print style.NOTICE("+ Application:"), style.SQL_TABLE(app_label)
cur_app_label = app_label
if not self.dense:
print style.NOTICE("|-+ Differences for model:"), style.SQL_TABLE(model_name)
for diff in diffs:
diff_type, diff_args = diff
text = self.DIFF_TEXTS[diff_type] % dict((str(i), style.SQL_TABLE(e)) for i, e in enumerate(diff_args))
text = "'".join(i%2==0 and style.ERROR(e) or e for i, e in enumerate(text.split("'")))
if not self.dense:
print style.NOTICE("|--+"), text
else:
print style.NOTICE("App"), style.SQL_TABLE(app_name), style.NOTICE('Model'), style.SQL_TABLE(model_name), text
def print_diff_sql(self, style):
cur_app_label = None
qn = connection.ops.quote_name
print style.SQL_KEYWORD("BEGIN;")
for app_label, model_name, diffs in self.differences:
if not diffs: continue
if not self.dense and cur_app_label != app_label:
print style.NOTICE("-- Application: %s" % style.SQL_TABLE(app_label))
cur_app_label = app_label
if not self.dense:
print style.NOTICE("-- Model: %s" % style.SQL_TABLE(model_name))
for diff in diffs:
diff_type, diff_args = diff
text = self.DIFF_SQL[diff_type](style, qn, diff_args)
if self.dense:
text = text.replace("\n\t", " ")
print text
print style.SQL_KEYWORD("COMMIT;")
class GenericSQLDiff(SQLDiff):
pass
class MySQLDiff(SQLDiff):
# All the MySQL hacks together create something of a problem
# Fixing one bug in MySQL creates another issue. So just keep in mind
# that this is way unreliable for MySQL atm.
def get_field_db_type(self, description, field=None, table_name=None):
from MySQLdb.constants import FIELD_TYPE
# weird bug? in mysql db-api where it returns three times the correct value for field length
# if i remember correctly it had something todo with unicode strings
# TODO: Fix this is a more meaningful and better understood manner
description = list(description)
if description[1] not in [FIELD_TYPE.TINY, FIELD_TYPE.SHORT]: # exclude tinyints from conversion.
description[3] = description[3]/3
description[4] = description[4]/3
db_type = super(MySQLDiff, self).get_field_db_type(description)
if not db_type:
return
if field:
if field.primary_key and (db_type=='integer' or db_type=='bigint'):
db_type += ' AUTO_INCREMENT'
# MySQL isn't really sure about char's and varchar's like sqlite
field_type = self.get_field_model_type(field)
# Fix char/varchar inconsistencies
if self.strip_parameters(field_type)=='char' and self.strip_parameters(db_type)=='varchar':
db_type = db_type.lstrip("var")
# They like to call 'bool's 'tinyint(1)' and introspection makes that a integer
# just convert it back to it's proper type, a bool is a bool and nothing else.
if db_type=='integer' and description[1]==FIELD_TYPE.TINY and description[4]==1:
db_type = 'bool'
if db_type=='integer' and description[1]==FIELD_TYPE.SHORT:
db_type = 'smallint UNSIGNED' # FIXME: what about if it's not UNSIGNED ?
return db_type
class SqliteSQLDiff(SQLDiff):
# Unique does not seem to be implied on Sqlite for Primary_key's
# if this is more generic among databases this might be usefull
# to add to the superclass's find_unique_missing_in_db method
def find_unique_missing_in_db(self, meta, table_indexes, table_name):
for field in all_local_fields(meta):
if field.unique:
attname = field.db_column or field.attname
if attname in table_indexes and table_indexes[attname]['unique']:
continue
if table_indexes[attname]['primary_key']:
continue
self.add_difference('unique-missing-in-db', table_name, attname)
# Finding Indexes by using the get_indexes dictionary doesn't seem to work
# for sqlite.
def find_index_missing_in_db(self, meta, table_indexes, table_name):
pass
def find_index_missing_in_model(self, meta, table_indexes, table_name):
pass
def get_field_db_type(self, description, field=None, table_name=None):
db_type = super(SqliteSQLDiff, self).get_field_db_type(description)
if not db_type:
return
if field:
field_type = self.get_field_model_type(field)
# Fix char/varchar inconsistencies
if self.strip_parameters(field_type)=='char' and self.strip_parameters(db_type)=='varchar':
db_type = db_type.lstrip("var")
return db_type
class PostgresqlSQLDiff(SQLDiff):
DATA_TYPES_REVERSE_OVERRIDE = {
20: 'IntegerField',
1042: 'CharField',
# postgis types (TODO: support is very incomplete)
17506: 'django.contrib.gis.db.models.fields.PointField',
55902: 'django.contrib.gis.db.models.fields.MultiPolygonField',
}
# Hopefully in the future we can add constraint checking and other more
# advanced checks based on this database.
SQL_LOAD_CONSTRAINTS = """
SELECT nspname, relname, conname, attname, pg_get_constraintdef(pg_constraint.oid)
FROM pg_constraint
INNER JOIN pg_attribute ON pg_constraint.conrelid = pg_attribute.attrelid AND pg_attribute.attnum = any(pg_constraint.conkey)
INNER JOIN pg_class ON conrelid=pg_class.oid
INNER JOIN pg_namespace ON pg_namespace.oid=pg_class.relnamespace
ORDER BY CASE WHEN contype='f' THEN 0 ELSE 1 END,contype,nspname,relname,conname;
"""
SQL_FIELD_TYPE_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ALTER'), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD("TYPE"), style.SQL_COLTYPE(args[2]))
SQL_FIELD_PARAMETER_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ALTER'), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD("TYPE"), style.SQL_COLTYPE(args[2]))
def __init__(self, app_models, options):
SQLDiff.__init__(self, app_models, options)
self.check_constraints = {}
self.load_constraints()
def load_constraints(self):
for dct in self.sql_to_dict(self.SQL_LOAD_CONSTRAINTS, []):
key = (dct['nspname'], dct['relname'], dct['attname'])
if 'CHECK' in dct['pg_get_constraintdef']:
self.check_constraints[key] = dct
def get_field_db_type(self, description, field=None, table_name=None):
db_type = super(PostgresqlSQLDiff, self).get_field_db_type(description)
if not db_type:
return
if field:
if field.primary_key and db_type=='integer':
db_type = 'serial'
if table_name:
tablespace = field.db_tablespace
if tablespace=="":
tablespace = "public"
check_constraint = self.check_constraints.get((tablespace, table_name, field.attname),{}).get('pg_get_constraintdef', None)
if check_constraint:
check_constraint = check_constraint.replace("((", "(")
check_constraint = check_constraint.replace("))", ")")
check_constraint = '("'.join([')' in e and '" '.join(e.split(" ", 1)) or e for e in check_constraint.split("(")])
# TODO: might be more then one constraint in definition ?
db_type += ' '+check_constraint
return db_type
"""
def find_field_type_differ(self, meta, table_description, table_name):
def callback(field, description, model_type, db_type):
if field.primary_key and db_type=='integer':
db_type = 'serial'
return model_type, db_type
super(PostgresqlSQLDiff, self).find_field_type_differs(meta, table_description, table_name, callback)
"""
DATABASE_SQLDIFF_CLASSES = {
'postgresql_psycopg2' : PostgresqlSQLDiff,
'postgresql': PostgresqlSQLDiff,
'mysql': MySQLDiff,
'sqlite3': SqliteSQLDiff,
'oracle': GenericSQLDiff
}
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--all-applications', '-a', action='store_true', dest='all_applications',
help="Automaticly include all application from INSTALLED_APPS."),
make_option('--not-only-existing', '-e', action='store_false', dest='only_existing',
help="Check all tables that exist in the database, not only tables that should exist based on models."),
make_option('--dense-output', '-d', action='store_true', dest='dense_output',
help="Shows the output in dense format, normally output is spreaded over multiple lines."),
make_option('--output_text', '-t', action='store_false', dest='sql', default=True,
help="Outputs the differences as descriptive text instead of SQL"),
)
help = """Prints the (approximated) difference between models and fields in the database for the given app name(s).
It indicates how columns in the database are different from the sql that would
be generated by Django. This command is not a database migration tool. (Though
it can certainly help) It's purpose is to show the current differences as a way
to check/debug ur models compared to the real database tables and columns."""
output_transaction = False
args = '<appname appname ...>'
def handle(self, *app_labels, **options):
from django import VERSION
if VERSION[:2]<(1,0):
raise CommandError("SQLDiff only support Django 1.0 or higher!")
from django.db import models
from django.conf import settings
if settings.DATABASE_ENGINE =='dummy':
# This must be the "dummy" database backend, which means the user
# hasn't set DATABASE_ENGINE.
raise CommandError("Django doesn't know which syntax to use for your SQL statements,\n" +
"because you haven't specified the DATABASE_ENGINE setting.\n" +
"Edit your settings file and change DATABASE_ENGINE to something like 'postgresql' or 'mysql'.")
if options.get('all_applications', False):
app_models = models.get_models()
else:
if not app_labels:
raise CommandError('Enter at least one appname.')
try:
app_list = [models.get_app(app_label) for app_label in app_labels]
except (models.ImproperlyConfigured, ImportError), e:
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
app_models = []
for app in app_list:
app_models.extend(models.get_models(app))
## remove all models that are not managed by Django
#app_models = [model for model in app_models if getattr(model._meta, 'managed', True)]
if not app_models:
raise CommandError('Unable to execute sqldiff no models founds.')
cls = DATABASE_SQLDIFF_CLASSES.get(settings.DATABASE_ENGINE, GenericSQLDiff)
sqldiff_instance = cls(app_models, options)
sqldiff_instance.find_differences()
sqldiff_instance.print_diff(self.style)
return
| apache-2.0 |
huchoi/edx-platform | cms/djangoapps/contentstore/management/commands/clone_course.py | 13 | 2005 | """
Script for cloning a course
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.django import modulestore
from student.roles import CourseInstructorRole, CourseStaffRole
from opaque_keys.edx.keys import CourseKey
from opaque_keys import InvalidKeyError
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xmodule.modulestore import ModuleStoreEnum
#
# To run from command line: ./manage.py cms clone_course --settings=dev master/300/cough edx/111/foo
#
class Command(BaseCommand):
"""Clone a MongoDB-backed course to another location"""
help = 'Clone a MongoDB backed course to another location'
def course_key_from_arg(self, arg):
"""
Convert the command line arg into a course key
"""
try:
return CourseKey.from_string(arg)
except InvalidKeyError:
return SlashSeparatedCourseKey.from_deprecated_string(arg)
def handle(self, *args, **options):
"Execute the command"
if len(args) != 2:
raise CommandError("clone requires 2 arguments: <source-course_id> <dest-course_id>")
source_course_id = self.course_key_from_arg(args[0])
dest_course_id = self.course_key_from_arg(args[1])
mstore = modulestore()
print("Cloning course {0} to {1}".format(source_course_id, dest_course_id))
with mstore.bulk_write_operations(dest_course_id):
if mstore.clone_course(source_course_id, dest_course_id, ModuleStoreEnum.UserID.mgmt_command):
print("copying User permissions...")
# purposely avoids auth.add_user b/c it doesn't have a caller to authorize
CourseInstructorRole(dest_course_id).add_users(
*CourseInstructorRole(source_course_id).users_with_role()
)
CourseStaffRole(dest_course_id).add_users(
*CourseStaffRole(source_course_id).users_with_role()
)
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.