blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
017913159afed747b1270dae62aabff70e28a5e2 | 677208aa111589719e24313f0c0eef6267c54cb1 | /yolagcy/urls.py | 79acc086821daad6a2618e9e95d2ad09a71a4534 | [] | no_license | amanmyrats/taksi | e97172063fb7861fb243b14142f01baf5b326423 | bef5e743c0528f14507d453cdde4617914c2c5fc | refs/heads/main | 2023-08-22T14:03:18.849574 | 2021-10-01T08:59:08 | 2021-10-01T08:59:08 | 346,410,631 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py |
from django.urls import path
from .views import search_view
urlpatterns = [
path('search/', search_view, name='search'),
path('search/<str:nira>/', search_view, name='searchcategory'),
]
| [
"A.SOYUNJALIYEV@bycn.bouygues-construction.com"
] | A.SOYUNJALIYEV@bycn.bouygues-construction.com |
da1f403db95e038688dc9c46c5fa9a028823e73c | 564d6a4d305a8ac6a7e01c761831fb2081c02d0f | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_04_01/aio/operations/_network_management_client_operations.py | b5ef06c1443c98c5807d9023cf421816c15aa794 | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] | permissive | paultaiton/azure-sdk-for-python | 69af4d889bac8012b38f5b7e8108707be679b472 | d435a1a25fd6097454b7fdfbbdefd53e05029160 | refs/heads/master | 2023-01-30T16:15:10.647335 | 2020-11-14T01:09:50 | 2020-11-14T01:09:50 | 283,343,691 | 0 | 0 | MIT | 2020-07-28T22:43:43 | 2020-07-28T22:43:43 | null | UTF-8 | Python | false | false | 47,161 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkManagementClientOperationsMixin:
async def _put_bastion_shareable_link_initial(
self,
resource_group_name: str,
bastion_host_name: str,
bsl_request: "models.BastionShareableLinkListRequest",
**kwargs
) -> Optional["models.BastionShareableLinkListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.BastionShareableLinkListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._put_bastion_shareable_link_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BastionShareableLinkListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_put_bastion_shareable_link_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/createShareableLinks'} # type: ignore
async def begin_put_bastion_shareable_link(
self,
resource_group_name: str,
bastion_host_name: str,
bsl_request: "models.BastionShareableLinkListRequest",
**kwargs
) -> AsyncLROPoller[AsyncItemPaged["models.BastionShareableLinkListResult"]]:
"""Creates a Bastion Shareable Links for all the VMs specified in the request.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param bsl_request: Post request for all the Bastion Shareable Link endpoints.
:type bsl_request: ~azure.mgmt.network.v2020_04_01.models.BastionShareableLinkListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns an iterator like instance of either BastionShareableLinkListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.BastionShareableLinkListResult]]
:raises ~azure.core.exceptions.HttpResponseError:
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionShareableLinkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.put_bastion_shareable_link.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BastionShareableLinkListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionShareableLinkListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._put_bastion_shareable_link_initial(
resource_group_name=resource_group_name,
bastion_host_name=bastion_host_name,
bsl_request=bsl_request,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
async def internal_get_next(next_link=None):
if next_link is None:
return pipeline_response
else:
return await get_next(next_link)
return AsyncItemPaged(
internal_get_next, extract_data
)
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_put_bastion_shareable_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/createShareableLinks'} # type: ignore
async def _delete_bastion_shareable_link_initial(
self,
resource_group_name: str,
bastion_host_name: str,
bsl_request: "models.BastionShareableLinkListRequest",
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._delete_bastion_shareable_link_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_bastion_shareable_link_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/deleteShareableLinks'} # type: ignore
async def begin_delete_bastion_shareable_link(
self,
resource_group_name: str,
bastion_host_name: str,
bsl_request: "models.BastionShareableLinkListRequest",
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the Bastion Shareable Links for all the VMs specified in the request.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param bsl_request: Post request for all the Bastion Shareable Link endpoints.
:type bsl_request: ~azure.mgmt.network.v2020_04_01.models.BastionShareableLinkListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_bastion_shareable_link_initial(
resource_group_name=resource_group_name,
bastion_host_name=bastion_host_name,
bsl_request=bsl_request,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete_bastion_shareable_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/deleteShareableLinks'} # type: ignore
def get_bastion_shareable_link(
self,
resource_group_name: str,
bastion_host_name: str,
bsl_request: "models.BastionShareableLinkListRequest",
**kwargs
) -> AsyncIterable["models.BastionShareableLinkListResult"]:
"""Return the Bastion Shareable Links for all the VMs specified in the request.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param bsl_request: Post request for all the Bastion Shareable Link endpoints.
:type bsl_request: ~azure.mgmt.network.v2020_04_01.models.BastionShareableLinkListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BastionShareableLinkListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.BastionShareableLinkListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionShareableLinkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_bastion_shareable_link.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BastionShareableLinkListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_bastion_shareable_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/getShareableLinks'} # type: ignore
async def _get_active_sessions_initial(
self,
resource_group_name: str,
bastion_host_name: str,
**kwargs
) -> Optional["models.BastionActiveSessionListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.BastionActiveSessionListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._get_active_sessions_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BastionActiveSessionListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_active_sessions_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/getActiveSessions'} # type: ignore
async def begin_get_active_sessions(
self,
resource_group_name: str,
bastion_host_name: str,
**kwargs
) -> AsyncLROPoller[AsyncItemPaged["models.BastionActiveSessionListResult"]]:
"""Returns the list of currently active sessions on the Bastion.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns an iterator like instance of either BastionActiveSessionListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.BastionActiveSessionListResult]]
:raises ~azure.core.exceptions.HttpResponseError:
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionActiveSessionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_active_sessions.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BastionActiveSessionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionActiveSessionListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_active_sessions_initial(
resource_group_name=resource_group_name,
bastion_host_name=bastion_host_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
async def internal_get_next(next_link=None):
if next_link is None:
return pipeline_response
else:
return await get_next(next_link)
return AsyncItemPaged(
internal_get_next, extract_data
)
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_active_sessions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/getActiveSessions'} # type: ignore
def disconnect_active_sessions(
self,
resource_group_name: str,
bastion_host_name: str,
session_ids: "models.SessionIds",
**kwargs
) -> AsyncIterable["models.BastionSessionDeleteResult"]:
"""Returns the list of currently active sessions on the Bastion.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param session_ids: The list of sessionids to disconnect.
:type session_ids: ~azure.mgmt.network.v2020_04_01.models.SessionIds
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BastionSessionDeleteResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.BastionSessionDeleteResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionSessionDeleteResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.disconnect_active_sessions.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(session_ids, 'SessionIds')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(session_ids, 'SessionIds')
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BastionSessionDeleteResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
disconnect_active_sessions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/disconnectActiveSessions'} # type: ignore
async def check_dns_name_availability(
self,
location: str,
domain_name_label: str,
**kwargs
) -> "models.DnsNameAvailabilityResult":
"""Checks whether a domain name in the cloudapp.azure.com zone is available for use.
:param location: The location of the domain name.
:type location: str
:param domain_name_label: The domain name to be verified. It must conform to the following
regular expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$.
:type domain_name_label: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DnsNameAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.DnsNameAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DnsNameAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.check_dns_name_availability.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['domainNameLabel'] = self._serialize.query("domain_name_label", domain_name_label, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DnsNameAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_dns_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/CheckDnsNameAvailability'} # type: ignore
async def supported_security_providers(
self,
resource_group_name: str,
virtual_wan_name: str,
**kwargs
) -> "models.VirtualWanSecurityProviders":
"""Gives the supported security providers for the virtual wan.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN for which supported security providers are
needed.
:type virtual_wan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualWanSecurityProviders, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.VirtualWanSecurityProviders
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualWanSecurityProviders"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.supported_security_providers.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualWANName': self._serialize.url("virtual_wan_name", virtual_wan_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualWanSecurityProviders', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
supported_security_providers.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{virtualWANName}/supportedSecurityProviders'} # type: ignore
async def _generatevirtualwanvpnserverconfigurationvpnprofile_initial(
self,
resource_group_name: str,
virtual_wan_name: str,
vpn_client_params: "models.VirtualWanVpnProfileParameters",
**kwargs
) -> Optional["models.VpnProfileResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.VpnProfileResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._generatevirtualwanvpnserverconfigurationvpnprofile_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualWANName': self._serialize.url("virtual_wan_name", virtual_wan_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpn_client_params, 'VirtualWanVpnProfileParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnProfileResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generatevirtualwanvpnserverconfigurationvpnprofile_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{virtualWANName}/GenerateVpnProfile'} # type: ignore
async def begin_generatevirtualwanvpnserverconfigurationvpnprofile(
self,
resource_group_name: str,
virtual_wan_name: str,
vpn_client_params: "models.VirtualWanVpnProfileParameters",
**kwargs
) -> AsyncLROPoller["models.VpnProfileResponse"]:
"""Generates a unique VPN profile for P2S clients for VirtualWan and associated
VpnServerConfiguration combination in the specified resource group.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN whose associated VpnServerConfigurations is
needed.
:type virtual_wan_name: str
:param vpn_client_params: Parameters supplied to the generate VirtualWan VPN profile generation
operation.
:type vpn_client_params: ~azure.mgmt.network.v2020_04_01.models.VirtualWanVpnProfileParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnProfileResponse or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_04_01.models.VpnProfileResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VpnProfileResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._generatevirtualwanvpnserverconfigurationvpnprofile_initial(
resource_group_name=resource_group_name,
virtual_wan_name=virtual_wan_name,
vpn_client_params=vpn_client_params,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnProfileResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generatevirtualwanvpnserverconfigurationvpnprofile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{virtualWANName}/GenerateVpnProfile'} # type: ignore
| [
"noreply@github.com"
] | paultaiton.noreply@github.com |
c7aa0242abeaf5f3eaa96ceb9b88bbb05f027b94 | 19f673a34ec4aabbb67e9cd4b469f4591f5b9cc0 | /word2vec.py | 557d6c9720b523f7aa541a38b76e58149745b7c9 | [] | no_license | lucasjinreal/tensorflow_lstm | 9f409915b20443c4dcee17c3883725b7831d2096 | a50e530cab05d49d27944c0460b363a3ae4e322a | refs/heads/master | 2023-04-28T14:09:55.402372 | 2017-03-05T15:31:35 | 2017-03-05T15:31:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,405 | py | # -*- coding: utf-8 -*-
# file: word2vec.py
# author: JinTian
# time: 05/03/2017 11:12 AM
# Copyright 2017 JinTian. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
"""Multi-threaded word2vec mini-batched skip-gram model.
Trains the model described in:
(Mikolov, et. al.) Efficient Estimation of Word Representations in Vector Space
ICLR 2013.
http://arxiv.org/abs/1301.3781
This model does traditional minibatching.
The key ops used are:
* placeholder for feeding in tensors for each example.
* embedding_lookup for fetching rows from the embedding matrix.
* sigmoid_cross_entropy_with_logits to calculate the loss.
* GradientDescentOptimizer for optimizing the loss.
* skipgram custom op that does input processing.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import threading
import time
import tensorflow.python.platform
from six.moves import xrange # pylint: disable=redefined-builtin
import numpy as np
import tensorflow as tf
from tensorflow.models.embedding import gen_word2vec as word2vec
flags = tf.app.flags
flags.DEFINE_string("save_path", None, "Directory to write the model and "
"training summaries.")
flags.DEFINE_string("train_data", None, "Training text file. "
"E.g., unzipped file http://mattmahoney.net/dc/text8.zip.")
flags.DEFINE_string(
"eval_data", None, "File consisting of analogies of four tokens."
"embedding 2 - embedding 1 + embedding 3 should be close "
"to embedding 4."
"E.g. https://word2vec.googlecode.com/svn/trunk/questions-words.txt.")
flags.DEFINE_integer("embedding_size", 200, "The embedding dimension size.")
flags.DEFINE_integer(
"epochs_to_train", 15,
"Number of epochs to train. Each epoch processes the training data once "
"completely.")
flags.DEFINE_float("learning_rate", 0.2, "Initial learning rate.")
flags.DEFINE_integer("num_neg_samples", 100,
"Negative samples per training example.")
flags.DEFINE_integer("batch_size", 16,
"Number of training examples processed per step "
"(size of a minibatch).")
flags.DEFINE_integer("concurrent_steps", 12,
"The number of concurrent training steps.")
flags.DEFINE_integer("window_size", 5,
"The number of words to predict to the left and right "
"of the target word.")
flags.DEFINE_integer("min_count", 5,
"The minimum number of word occurrences for it to be "
"included in the vocabulary.")
flags.DEFINE_float("subsample", 1e-3,
"Subsample threshold for word occurrence. Words that appear "
"with higher frequency will be randomly down-sampled. Set "
"to 0 to disable.")
flags.DEFINE_boolean(
"interactive", False,
"If true, enters an IPython interactive session to play with the trained "
"model. E.g., try model.analogy('france', 'paris', 'russia') and "
"model.nearby(['proton', 'elephant', 'maxwell']")
flags.DEFINE_integer("statistics_interval", 5,
"Print statistics every n seconds.")
flags.DEFINE_integer("summary_interval", 5,
"Save training summary to file every n seconds (rounded "
"up to statistics interval.")
flags.DEFINE_integer("checkpoint_interval", 600,
"Checkpoint the model (i.e. save the parameters) every n "
"seconds (rounded up to statistics interval.")
FLAGS = flags.FLAGS
class Options(object):
"""Options used by our word2vec model."""
def __init__(self):
# Model options.
# Embedding dimension.
self.emb_dim = FLAGS.embedding_size
# Training options.
# The training text file.
self.train_data = FLAGS.train_data
# Number of negative samples per example.
self.num_samples = FLAGS.num_neg_samples
# The initial learning rate.
self.learning_rate = FLAGS.learning_rate
# Number of epochs to train. After these many epochs, the learning
# rate decays linearly to zero and the training stops.
self.epochs_to_train = FLAGS.epochs_to_train
# Concurrent training steps.
self.concurrent_steps = FLAGS.concurrent_steps
# Number of examples for one training step.
self.batch_size = FLAGS.batch_size
# The number of words to predict to the left and right of the target word.
self.window_size = FLAGS.window_size
# The minimum number of word occurrences for it to be included in the
# vocabulary.
self.min_count = FLAGS.min_count
# Subsampling threshold for word occurrence.
self.subsample = FLAGS.subsample
# How often to print statistics.
self.statistics_interval = FLAGS.statistics_interval
# How often to write to the summary file (rounds up to the nearest
# statistics_interval).
self.summary_interval = FLAGS.summary_interval
# How often to write checkpoints (rounds up to the nearest statistics
# interval).
self.checkpoint_interval = FLAGS.checkpoint_interval
# Where to write out summaries.
self.save_path = FLAGS.save_path
# Eval options.
# The text file for eval.
self.eval_data = FLAGS.eval_data
class Word2Vec(object):
"""Word2Vec model (Skipgram)."""
def __init__(self, options, session):
self._options = options
self._session = session
self._word2id = {}
self._id2word = []
self.build_graph()
self.build_eval_graph()
self.save_vocab()
self._read_analogies()
def _read_analogies(self):
"""Reads through the analogy question file.
Returns:
questions: a [n, 4] numpy array containing the analogy question's
word ids.
questions_skipped: questions skipped due to unknown words.
"""
questions = []
questions_skipped = 0
with open(self._options.eval_data, "rb") as analogy_f:
for line in analogy_f:
if line.startswith(b":"): # Skip comments.
continue
words = line.strip().lower().split(b" ")
ids = [self._word2id.get(w.strip()) for w in words]
if None in ids or len(ids) != 4:
questions_skipped += 1
else:
questions.append(np.array(ids))
print("Eval analogy file: ", self._options.eval_data)
print("Questions: ", len(questions))
print("Skipped: ", questions_skipped)
self._analogy_questions = np.array(questions, dtype=np.int32)
def forward(self, examples, labels):
"""Build the graph for the forward pass."""
opts = self._options
# Declare all variables we need.
# Embedding: [vocab_size, emb_dim]
init_width = 0.5 / opts.emb_dim
emb = tf.Variable(
tf.random_uniform(
[opts.vocab_size, opts.emb_dim], -init_width, init_width),
name="emb")
self._emb = emb
# Softmax weight: [vocab_size, emb_dim]. Transposed.
sm_w_t = tf.Variable(
tf.zeros([opts.vocab_size, opts.emb_dim]),
name="sm_w_t")
# Softmax bias: [emb_dim].
sm_b = tf.Variable(tf.zeros([opts.vocab_size]), name="sm_b")
# Global step: scalar, i.e., shape [].
self.global_step = tf.Variable(0, name="global_step")
# Nodes to compute the nce loss w/ candidate sampling.
labels_matrix = tf.reshape(
tf.cast(labels,
dtype=tf.int64),
[opts.batch_size, 1])
# Negative sampling.
sampled_ids, _, _ = (tf.nn.fixed_unigram_candidate_sampler(
true_classes=labels_matrix,
num_true=1,
num_sampled=opts.num_samples,
unique=True,
range_max=opts.vocab_size,
distortion=0.75,
unigrams=opts.vocab_counts.tolist()))
# Embeddings for examples: [batch_size, emb_dim]
example_emb = tf.nn.embedding_lookup(emb, examples)
# Weights for labels: [batch_size, emb_dim]
true_w = tf.nn.embedding_lookup(sm_w_t, labels)
# Biases for labels: [batch_size, 1]
true_b = tf.nn.embedding_lookup(sm_b, labels)
# Weights for sampled ids: [num_sampled, emb_dim]
sampled_w = tf.nn.embedding_lookup(sm_w_t, sampled_ids)
# Biases for sampled ids: [num_sampled, 1]
sampled_b = tf.nn.embedding_lookup(sm_b, sampled_ids)
# True logits: [batch_size, 1]
true_logits = tf.reduce_sum(tf.mul(example_emb, true_w), 1) + true_b
# Sampled logits: [batch_size, num_sampled]
# We replicate sampled noise lables for all examples in the batch
# using the matmul.
sampled_b_vec = tf.reshape(sampled_b, [opts.num_samples])
sampled_logits = tf.matmul(example_emb,
sampled_w,
transpose_b=True) + sampled_b_vec
return true_logits, sampled_logits
def nce_loss(self, true_logits, sampled_logits):
"""Build the graph for the NCE loss."""
# cross-entropy(logits, labels)
opts = self._options
true_xent = tf.nn.sigmoid_cross_entropy_with_logits(
true_logits, tf.ones_like(true_logits))
sampled_xent = tf.nn.sigmoid_cross_entropy_with_logits(
sampled_logits, tf.zeros_like(sampled_logits))
# NCE-loss is the sum of the true and noise (sampled words)
# contributions, averaged over the batch.
nce_loss_tensor = (tf.reduce_sum(true_xent) +
tf.reduce_sum(sampled_xent)) / opts.batch_size
return nce_loss_tensor
def optimize(self, loss):
"""Build the graph to optimize the loss function."""
# Optimizer nodes.
# Linear learning rate decay.
opts = self._options
words_to_train = float(opts.words_per_epoch * opts.epochs_to_train)
lr = opts.learning_rate * tf.maximum(
0.0001, 1.0 - tf.cast(self._words, tf.float32) / words_to_train)
self._lr = lr
optimizer = tf.train.GradientDescentOptimizer(lr)
train = optimizer.minimize(loss,
global_step=self.global_step,
gate_gradients=optimizer.GATE_NONE)
self._train = train
def build_eval_graph(self):
"""Build the eval graph."""
# Eval graph
# Each analogy task is to predict the 4th word (d) given three
# words: a, b, c. E.g., a=italy, b=rome, c=france, we should
# predict d=paris.
# The eval feeds three vectors of word ids for a, b, c, each of
# which is of size N, where N is the number of analogies we want to
# evaluate in one batch.
analogy_a = tf.placeholder(dtype=tf.int32) # [N]
analogy_b = tf.placeholder(dtype=tf.int32) # [N]
analogy_c = tf.placeholder(dtype=tf.int32) # [N]
# Normalized word embeddings of shape [vocab_size, emb_dim].
nemb = tf.nn.l2_normalize(self._emb, 1)
# Each row of a_emb, b_emb, c_emb is a word's embedding vector.
# They all have the shape [N, emb_dim]
a_emb = tf.gather(nemb, analogy_a) # a's embs
b_emb = tf.gather(nemb, analogy_b) # b's embs
c_emb = tf.gather(nemb, analogy_c) # c's embs
# We expect that d's embedding vectors on the unit hyper-sphere is
# near: c_emb + (b_emb - a_emb), which has the shape [N, emb_dim].
target = c_emb + (b_emb - a_emb)
# Compute cosine distance between each pair of target and vocab.
# dist has shape [N, vocab_size].
dist = tf.matmul(target, nemb, transpose_b=True)
# For each question (row in dist), find the top 4 words.
_, pred_idx = tf.nn.top_k(dist, 4)
# Nodes for computing neighbors for a given word according to
# their cosine distance.
nearby_word = tf.placeholder(dtype=tf.int32) # word id
nearby_emb = tf.gather(nemb, nearby_word)
nearby_dist = tf.matmul(nearby_emb, nemb, transpose_b=True)
nearby_val, nearby_idx = tf.nn.top_k(nearby_dist,
min(1000, self._options.vocab_size))
# Nodes in the construct graph which are used by training and
# evaluation to run/feed/fetch.
self._analogy_a = analogy_a
self._analogy_b = analogy_b
self._analogy_c = analogy_c
self._analogy_pred_idx = pred_idx
self._nearby_word = nearby_word
self._nearby_val = nearby_val
self._nearby_idx = nearby_idx
def build_graph(self):
"""Build the graph for the full model."""
opts = self._options
# The training data. A text file.
(words, counts, words_per_epoch, self._epoch, self._words, examples,
labels) = word2vec.skipgram(filename=opts.train_data,
batch_size=opts.batch_size,
window_size=opts.window_size,
min_count=opts.min_count,
subsample=opts.subsample)
(opts.vocab_words, opts.vocab_counts,
opts.words_per_epoch) = self._session.run([words, counts, words_per_epoch])
opts.vocab_size = len(opts.vocab_words)
print("Data file: ", opts.train_data)
print("Vocab size: ", opts.vocab_size - 1, " + UNK")
print("Words per epoch: ", opts.words_per_epoch)
self._examples = examples
self._labels = labels
self._id2word = opts.vocab_words
for i, w in enumerate(self._id2word):
self._word2id[w] = i
true_logits, sampled_logits = self.forward(examples, labels)
loss = self.nce_loss(true_logits, sampled_logits)
tf.scalar_summary("NCE loss", loss)
self._loss = loss
self.optimize(loss)
# Properly initialize all variables.
tf.initialize_all_variables().run()
self.saver = tf.train.Saver()
def save_vocab(self):
"""Save the vocabulary to a file so the model can be reloaded."""
opts = self._options
with open(os.path.join(opts.save_path, "vocab.txt"), "w") as f:
for i in xrange(opts.vocab_size):
f.write("%s %d\n" % (tf.compat.as_text(opts.vocab_words[i]),
opts.vocab_counts[i]))
def _train_thread_body(self):
initial_epoch, = self._session.run([self._epoch])
while True:
_, epoch = self._session.run([self._train, self._epoch])
if epoch != initial_epoch:
break
def train(self):
"""Train the model."""
opts = self._options
initial_epoch, initial_words = self._session.run([self._epoch, self._words])
summary_op = tf.merge_all_summaries()
summary_writer = tf.train.SummaryWriter(opts.save_path,
graph_def=self._session.graph_def)
workers = []
for _ in xrange(opts.concurrent_steps):
t = threading.Thread(target=self._train_thread_body)
t.start()
workers.append(t)
last_words, last_time, last_summary_time = initial_words, time.time(), 0
last_checkpoint_time = 0
while True:
time.sleep(opts.statistics_interval) # Reports our progress once a while.
(epoch, step, loss, words, lr) = self._session.run(
[self._epoch, self.global_step, self._loss, self._words, self._lr])
now = time.time()
last_words, last_time, rate = words, now, (words - last_words) / (
now - last_time)
print("Epoch %4d Step %8d: lr = %5.3f loss = %6.2f words/sec = %8.0f\r" %
(epoch, step, lr, loss, rate), end="")
sys.stdout.flush()
if now - last_summary_time > opts.summary_interval:
summary_str = self._session.run(summary_op)
summary_writer.add_summary(summary_str, step)
last_summary_time = now
if now - last_checkpoint_time > opts.checkpoint_interval:
self.saver.save(self._session,
opts.save_path + "model",
global_step=step.astype(int))
last_checkpoint_time = now
if epoch != initial_epoch:
break
for t in workers:
t.join()
return epoch
def _predict(self, analogy):
"""Predict the top 4 answers for analogy questions."""
idx, = self._session.run([self._analogy_pred_idx], {
self._analogy_a: analogy[:, 0],
self._analogy_b: analogy[:, 1],
self._analogy_c: analogy[:, 2]
})
return idx
def eval(self):
"""Evaluate analogy questions and reports accuracy."""
# How many questions we get right at precision@1.
correct = 0
total = self._analogy_questions.shape[0]
start = 0
while start < total:
limit = start + 2500
sub = self._analogy_questions[start:limit, :]
idx = self._predict(sub)
start = limit
for question in xrange(sub.shape[0]):
for j in xrange(4):
if idx[question, j] == sub[question, 3]:
# Bingo! We predicted correctly. E.g., [italy, rome, france, paris].
correct += 1
break
elif idx[question, j] in sub[question, :3]:
# We need to skip words already in the question.
continue
else:
# The correct label is not the precision@1
break
print()
print("Eval %4d/%d accuracy = %4.1f%%" % (correct, total,
correct * 100.0 / total))
def analogy(self, w0, w1, w2):
"""Predict word w3 as in w0:w1 vs w2:w3."""
wid = np.array([[self._word2id.get(w, 0) for w in [w0, w1, w2]]])
idx = self._predict(wid)
for c in [self._id2word[i] for i in idx[0, :]]:
if c not in [w0, w1, w2]:
return c
return "unknown"
def nearby(self, words, num=20):
"""Prints out nearby words given a list of words."""
ids = np.array([self._word2id.get(x, 0) for x in words])
vals, idx = self._session.run(
[self._nearby_val, self._nearby_idx], {self._nearby_word: ids})
for i in xrange(len(words)):
print("\n%s\n=====================================" % (words[i]))
for (neighbor, distance) in zip(idx[i, :num], vals[i, :num]):
print("%-20s %6.4f" % (self._id2word[neighbor], distance))
def _start_shell(local_ns=None):
# An interactive shell is useful for debugging/development.
import IPython
user_ns = {}
if local_ns:
user_ns.update(local_ns)
user_ns.update(globals())
IPython.start_ipython(argv=[], user_ns=user_ns)
def main(_):
"""Train a word2vec model."""
if not FLAGS.train_data or not FLAGS.eval_data or not FLAGS.save_path:
print("--train_data --eval_data and --save_path must be specified.")
sys.exit(1)
opts = Options()
with tf.Graph().as_default(), tf.Session() as session:
model = Word2Vec(opts, session)
for _ in xrange(opts.epochs_to_train):
model.train() # Process one epoch
model.eval() # Eval analogies.
# Perform a final save.
model.saver.save(session,
os.path.join(opts.save_path, "model.ckpt"),
global_step=model.global_step)
if FLAGS.interactive:
# E.g.,
# [0]: model.analogy('france', 'paris', 'russia')
# [1]: model.nearby(['proton', 'elephant', 'maxwell'])
_start_shell(locals())
if __name__ == "__main__":
tf.app.run() | [
"nicholasjela@gmail.com"
] | nicholasjela@gmail.com |
d38a64980bb07c06db2cda09547eb0528b777da0 | e206cc00299804ce2271eb5d1513620e44ee9a9b | /course1-algorithm-toolbox/assignments/assignment_003_quick_sort3_way_partrition/sorting.py | 7c0e0b028b58859d54188ef65c487733d890039d | [] | no_license | dmitri-mamrukov/coursera-data-structures-and-algorithms | 15459cd160f7bbae5464bf53d995bca868a0b415 | 01dd6f0dadf62a520bcafafddf7bf2b79e8e2603 | refs/heads/master | 2020-05-24T18:27:00.665642 | 2019-05-21T20:45:37 | 2019-05-21T20:45:37 | 187,410,737 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,537 | py | #!/usr/bin/python3
import sys
import random
def __partition3(data, left, right):
"""This function partitions a[] in three parts:
a) a[left..l - 1] contains all elements smaller than the pivot element
b) a[l..r] contains all occurrences of the pivot element
c) a[r + 1..right] contains all elements greater than the pivot element
"""
l = left
r = right
k = left + 1
pivot_value = data[left]
while k <= r:
if data[k] < pivot_value:
data[l], data[k] = data[k], data[l]
l += 1
k += 1
elif data[k] > pivot_value:
data[k], data[r] = data[r], data[k]
r -= 1
else:
k += 1
return (l - 1, r + 1)
def __partition2(data, left, right):
x = data[left]
k = left;
for i in range(left + 1, right + 1):
if data[i] <= x:
k += 1
data[i], data[k] = data[k], data[i]
data[left], data[k] = data[k], data[left]
return k
def __randomized_quick_sort(data, left, right):
if left >= right:
return
k = random.randint(left, right)
data[left], data[k] = data[k], data[left]
i, j = __partition3(data, left, right)
__randomized_quick_sort(data, left, i);
__randomized_quick_sort(data, j, right);
def solve(data):
__randomized_quick_sort(data, 0, len(data) - 1)
if __name__ == '__main__':
input = sys.stdin.read()
n, *a = list(map(int, input.split()))
solve(a)
for x in a:
print(x, end = ' ')
print()
| [
"dmitri.mamrukov@gmail.com"
] | dmitri.mamrukov@gmail.com |
058c2972e70e67a0246e21c14b00768a128b7442 | 2a63be7cf3cd26e1dd06d93ca4a6aa8dd79e0f37 | /pythonex.py | e15cfbc878e56a6818570bc59fe6f60c7483f931 | [] | no_license | DylanGuidry/python102 | f16a4adb63b118bca68a88113b84f83926dcf553 | 11efe6dea33d1d089a597737337213155322dcb1 | refs/heads/master | 2023-05-29T19:51:23.408462 | 2021-06-08T15:39:51 | 2021-06-08T15:39:51 | 373,930,219 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 487 | py | #Finding the sum:
numbers = [1, 2, 3, 4, 5]
print(sum(numbers))
#Finding the largest number:
numbers = [1, 50, 92, 91048, 492]
highest_number = 0
for num in numbers:
if num > highest_number:
highest_number =num
print(highest_number)
#Finding the smallest number:
number = [2, 1, 3, 4, 5]
smallest = 5
for num in number:
if num < smallest:
smallest = num
print(smallest)
#Finding even numbers:
numbers = [30, 45, 57, 67, 51]
even_number = 0
for num in numbers: | [
"dylanguidry20@gmail.com"
] | dylanguidry20@gmail.com |
b2f3f678e8fbf93dd31fcfe427dfa72144cd96b2 | dc365f0bbbb97d50d20c273482d83b30bfae15d1 | /calculadora.py | 5986bb5f9a90ec93f2b5ff39eddbf2325f2b78ca | [] | no_license | SheilaCampanella/X-Serv-13.6-Calculadora | 3db7a9e735ae3a5bc02738915d2c67f726751ed9 | 2a25a896536df1428aa2e261896049210c0005d7 | refs/heads/master | 2021-09-06T08:55:18.058177 | 2018-02-04T15:46:31 | 2018-02-04T15:46:31 | 119,969,369 | 0 | 0 | null | 2018-02-02T10:42:31 | 2018-02-02T10:42:31 | null | UTF-8 | Python | false | false | 528 | py | #!/usr/bin/python3
import sys
try:
operando1 = float(sys.argv[2])
operando2 = float(sys.argv[3])
except ValueError:
sys.exit("Solo se permiten operandos float")
if sys.argv[1] == 'sumar':
print(operando1 + operando2)
elif sys.argv[1] == 'restar':
print(operando1 - operando2)
elif sys.argv[1] == 'multiplicar':
print(operando1 * operando2)
elif sys.argv[1] == 'dividir':
try:
print(operando1 / operando2)
except ZeroDivisionError:
sys.exit("No se puede dividir entre cero")
| [
"sm.campanella@alumnos.urjc.es"
] | sm.campanella@alumnos.urjc.es |
96d683c068fba436d9b291a2c0b5d2c252bf3a56 | eb706c79e2294d573e85bf6474c3be5b511e079f | /Card/EPF.py | d469b6df7ff9b197d8b3571ccf8cf3a17ce5a7cc | [] | no_license | aci456852/openCV | fa1b7ce4faca083316b21cfcb1b3cc1719ab5a01 | 174965cb718c9b14862c6299f4841fdde46bad01 | refs/heads/master | 2020-12-21T05:11:46.013104 | 2020-02-18T15:03:48 | 2020-02-18T15:03:48 | 236,315,633 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 549 | py | import cv2
import numpy as np
#边缘保留滤波 EPF
def bi_demo(image):
#高斯双边模糊 保留轮廓 相当于美颜相机的滤镜
dst=cv2.bilateralFilter(image,0,100,15)
cv2.imshow("bi demo",dst)
def shift_demo(image):
#均值迁移 类似油画效果
dst=cv2.pyrMeanShiftFiltering(image,10,50)
cv2.imshow("shift demo",dst)
img = cv2.imread('tx.jpg')
cv2.namedWindow("imshow",cv2.WINDOW_AUTOSIZE)
cv2.imshow('imshow', img)
bi_demo(img)
shift_demo(img)
cv2.waitKey(0)
cv2.destroyAllWindows() | [
"1084665808@qq.com"
] | 1084665808@qq.com |
0265602b0ed2b218fc0e14b8a694681805ca5e14 | b72356d9ab9f3cc73e2944ed6284232c462dea5d | /collectors/zuche/shenzhou.py | 7f247b8b0b903dbe8f238b0a2e4149a6b475422f | [] | no_license | flyg/timeline | 5b9dae5d847f8428f616aab4d656f5069e806e75 | cea93b89173b6950e48e2bb9e186fc99c23ec461 | refs/heads/master | 2021-01-17T23:25:25.901633 | 2012-05-07T02:57:00 | 2012-05-07T02:57:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,385 | py | #.-.coding=utf8
__author__ = 'konglingkai'
import urllib
import urllib2
import cookielib
import os
import xlwt
import json
import datetime
from lxml import etree
from kernel import collector
cookie_filename = 'shenzhou.cookies'
FIRST_PAGE_URL = 'http://www.zuche.com'
FORM_UNIQUE_ID_XPATH = '//*/input[@id="_form_uniq_id"]'
SECOND_PAGE_URL = 'http://www.zuche.com/order/OrderSecondJsonControl.do_'
THIRD_PAGE_URL = 'http://www.zuche.com/jsp/order/personalOrderSecond.jsp?cid=81152&origin=shortRent'
ITEM_XPATH = '//*/table[@class="order_list_tab"]/tr[position()>1]'
CAR_NAME = 'td[2]/p'
CAR_PRICE = 'td[3]/font[1]'
class ShenZhouCollector(collector.Collector):
def __init__(self):
self.cj = cookielib.MozillaCookieJar(cookie_filename)
if os.access(cookie_filename, os.F_OK):
self.cj.load()
self.opener = urllib2.build_opener(
urllib2.HTTPRedirectHandler(),
urllib2.HTTPHandler(debuglevel=0),
urllib2.HTTPSHandler(debuglevel=0),
urllib2.HTTPCookieProcessor(self.cj)
)
# self.opener.addheaders = [
# ('User-agent', 'Mozilla/5.0'),
# ('X-Requested-With', 'XMLHTTPRequest'),
# ('Referer', 'http://www.zuche.com/'),
# ('Content-Type', 'application/x-www-form-urlencoded; charset=UTF-8'),
# ('Pragma', 'no-cache'),
# ('Cache-Control', 'no-cache'),
# ('Connection', 'keep-alive'),
# ('Accept', 'application/json, text/javascript, */*')
# ]
def fetch(self):
month = datetime.datetime.now().month
day = datetime.datetime.now().day
book = xlwt.Workbook(encoding='utf-8')
text = self.opener.open('http://www.zuche.com/city/getCityJson.do_', 'cityname=').read()
cities = json.loads(text)
for city in cities:
city_id = city['code']
city_name = city['name']
if city_id == '-1': break
print '%s: %s' % (city_id, city_name)
sheet = book.add_sheet(city_name)
text = self.opener.open('http://www.zuche.com/department/getDepartmentJson.do_', 'cityId=%s' % city_id).read()
stores = json.loads(text)
row = 0
for store in stores:
store_id = store['code']
store_name = store['name']
store_addr = store['address']
service_type = store['serviceType']
print " [%s]%s: %s - %s" % (service_type, store_id, store_name, store_addr)
cars = self.search(month, day, city_id, store_id, service_type)
for car_name, car_price in cars:
sheet.write(row, 0, '2012-%02d-%02d' % (month, day))
sheet.write(row, 1, city_name)
sheet.write(row, 2, store_name)
sheet.write(row, 3, car_name)
sheet.write(row, 4, car_price)
row += 1
print '2012-%02d-%02d %s %s %s %s' % (month, day, city_name, store_name, car_name, car_price)
book.save('shenzhou_%02d_%02d.xls' % (month, day))
def search(self, month, day, city_id, store_id, service_type):
text = self.opener.open(FIRST_PAGE_URL).read()
print 'step 1 succeed...'
parser = etree.HTMLParser(encoding='utf8')
tree = etree.HTML(text, parser)
nodes = tree.xpath(FORM_UNIQUE_ID_XPATH)
unique_id = nodes[0].attrib['value']
params = {
'leaseterm_year' : '0',
'fromMinute' : '00',
'fromDate' : '2012-%02d-%02d' % (month, day),
'toMinute' : '00',
'toDate' : '2012-%02d-%02d' % (month, day),
'servicetype' : '%s' % service_type,
'fromstoreId' : '%s' % store_id,
'fromHour' : '10',
'vehiclebrand' : '0',
'tostoreId' : '%s' % store_id,
'shortColor' : 'shortRent',
'fromHourData' : '10',
'serviceMode' : '1',
'senttype' : '0',
'toHour' : '20',
'tocityid' : '%s' % city_id,
'fromcityid' : '%s' % city_id,
'fromTime' : '2012-%02d-%02d 10:00' % (month, day),
'leaseterm_month' : '0',
'rentDay' : '3',
'picktype' : '0',
'toTime' : '2012-%02d-%02d 20:00' % (month, day),
'vehiclemode' : '0',
}
search_params = {
'paramData': urllib.urlencode(params),
'step': 'first',
'_form_uniq_id': unique_id
}
data = urllib.urlencode(search_params)
text = self.opener.open(SECOND_PAGE_URL, data).read()
if text == '[]':
print 'Step 2 succeed...'
else:
print 'Step 2 Failed!...'
text = self.opener.open(THIRD_PAGE_URL).read()
print 'Step 3 succeed...'
result = []
tree = etree.HTML(text, parser)
nodes = tree.xpath(ITEM_XPATH)
for node in nodes:
car_name = node.find(CAR_NAME).text
car_price = node.find(CAR_PRICE).text
result.append((car_name, car_price))
print '%d items found.' % len(nodes)
self.cj.save()
return result
| [
"konglingkai@egibbon.com"
] | konglingkai@egibbon.com |
8acc0122b0113f851be70ee0616a6d824d1b604b | 6d8a8619eddcdc360174607658a5473b66832bcb | /message/admin.py | 94d3a3e4b6f44b5dce160d8aa2485956b00bf4cf | [] | no_license | NKCTF/NKCTF-Backend | 636508589261b377056c587cae6ba6989df58947 | d704cd5f594eff8e5248b1ccaf80ec0ab77fb59b | refs/heads/master | 2022-12-19T08:10:57.843855 | 2018-10-08T02:18:37 | 2018-10-08T02:18:37 | 150,583,931 | 1 | 0 | null | 2021-06-10T20:50:17 | 2018-09-27T12:33:30 | Python | UTF-8 | Python | false | false | 160 | py | from django.contrib import admin
# Register your models here.
from .models import Mail, JoinRequest
admin.site.register(Mail)
admin.site.register(JoinRequest) | [
"1184249450@qq.com"
] | 1184249450@qq.com |
eea3755b4336c080de33aea738527196da0e1213 | 3bd0f44a74bf8d15cbff67d92eda4a20bcc0d5c8 | /jw_testcase/testcasename003PostImpl.py | d3ee3f23c50a27b29f4c07ce2f6d6aad75f83211 | [] | no_license | cash2one/ctrip-automation | fc88f94c7956e0d471a8650b5f0b6edfe47e2d6c | df89196418c8a6e6a9b73cc04432023d6b0c0a5a | refs/heads/master | 2020-05-22T17:03:04.261193 | 2016-12-14T10:55:02 | 2016-12-14T10:55:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,524 | py |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# encoding:utf-8
import time,os,sys,datetime,unittest
import os
sys.path.append("../")
sys.path.append("../jw_modules")
import httplib
import unittest
import json
import urllib
class testcase_api_httpsendpost(unittest.TestCase):
def setUp(self):
# self.widget = Widget('The widget')
httpClient = None
self.httpClient = httplib.HTTPConnection('open09.edaixi.cn', 81, timeout=10)
def tearDown(self):
# self.widget.dispose()
# self.widget = None
self.httpClient.close()
def test_testcasename003(self):
try:
f=open("")
strcreateoder=json.load(f)
print strcreateoder
params = urllib.urlencode(strcreateoder)
headers = {'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain'}
self.httpClient.request('POST', "", params, headers)
#response是HTTPResponse对象
response = self.httpClient.getresponse()
print response.status
statucode=response.status
print response.read()
if statucode=='200' or statucode=='201':
print "The get_order_list status is 200 or 201"
else:
raise "The interface has exception"
print response.reason
print response.read()
#self.assertEqual(statucode, 200,'incorrect default size')
except Exception, e:
print e
| [
"j_cui@jointwisdom.cn"
] | j_cui@jointwisdom.cn |
ddde87e2014f09bb5219a53771ad76642888329c | 0bee73e5bde6b4899db8e38e3fb59a75caf71ad9 | /src/utils/environment.py | 323a6f9dde13f302adc255ba5ba4678b64ff29d9 | [] | no_license | mycal-tucker/stalker-drone | 8e7710045b4d57fb4ddf19197371f55c1e5acfac | 245695009a1a1a70a009c4acbaf8533ef6877ced | refs/heads/master | 2020-04-04T11:54:01.756265 | 2018-12-04T22:00:39 | 2018-12-04T22:02:55 | 155,907,258 | 0 | 0 | null | 2018-11-27T19:58:50 | 2018-11-02T18:34:44 | Python | UTF-8 | Python | false | false | 877 | py | # Representation of the environment, which consists of stationary obstacles.
class Environment:
def __init__(self, obstacles=[]):
"""
constructor for a new environment object
:param obstacles: a list of obstacles, where each obstacle is a list of points
"""
self.obstacles = obstacles
@staticmethod
def parse_file(filename):
"""
TODO determine how we want to save obstacles
"""
pass
def add_obstacles(self, obstacles):
"""
adds a list of obstacles to the environment object
:param obstacles: the obstacles to be added to the environment
"""
self.obstacles.extend(obstacles)
def get_obstacles(self):
"""
returns a list of obstacles in the environment
"""
return [obstacle[:] for obstacle in self.obstacles]
| [
"zach.duguid@gmail.com"
] | zach.duguid@gmail.com |
a55ce84d3a6c9d704dd97209345f06e6b84efeaf | c2db19a9040dbf13673bb103135ffd7dda9e623f | /microcart/microcart/settings.py | 96c16b7897cdcdf276f50359639d8619d146f0d1 | [] | no_license | MH-sabu/storefront_api | 726fce3f86fedcb61d11b2664d619b0f01bccc3a | 4492467dc681b9df4af646c29a7dd2ee2f92b439 | refs/heads/master | 2020-04-21T00:07:52.013702 | 2019-02-05T04:01:02 | 2019-02-05T04:01:02 | 169,187,281 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,097 | py | """
Django settings for microcart project.
Generated by 'django-admin startproject' using Django 2.1.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'e8_=!@&0@xnpgd0vxxfhn8%9u2q2ac@3c&*0&%19sbjzr-k*53'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'microcart.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'microcart.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| [
"sabu.prof@gmail.com"
] | sabu.prof@gmail.com |
29a38d5484f0ff397b468621d0e4122048d5a301 | 1d959f9dba8ff61d214ca74ed6885b11d8574ddb | /tests/test_valid_xhtml.py | d00a28c1ce5be3286c1d3081a26faf7b5743fc3f | [] | no_license | pediapress/mwlib.epub | 20ea516ed11926723922219cd6c4e3f174697893 | d0989cf34d58a0db95744c4c39578853eb5f40f1 | refs/heads/master | 2021-01-13T02:40:10.409044 | 2013-09-11T08:21:11 | 2013-09-11T08:21:11 | 5,872,351 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,703 | py | #! /usr/bin/env python
#! -*- coding:utf-8 -*-
# Copyright (c) 2012, PediaPress GmbH
# See README.txt for additional licensing information.
'''Test for valid xhtml 1.1 output
Since I have given up on producing valid xhtml 1.1 this is just a collection of
now disabled and non-functional tests/code.
'''
# def get_tidy_tree(html, dump=False):
# html = etree.tostring(etree.HTML(html), encoding='utf-8')
# html, errors = collection.tidy_xhtml(html)
# if dump:
# print '>'*40
# print html
# if errors:
# print '!'*40
# print errors
# return etree.HTML(html)
# def test_bare_text_trivial():
# html = '''\
# <blockquote>
# unmotivated text
# </blockquote>
# '''
# tree = get_tidy_tree(html)
# bq = tree.xpath('//blockquote')[0]
# assert len(bq.text.strip()) == 0
# @pytest.mark.xfail
# def test_bare_text_simple():
# html='''\
# <blockquote>
# <ol>
# <li>blub</li>
# </ol>
# unmotivated text
# </blockquote>
# '''
# tree = get_tidy_tree(html)
# ol = tree.xpath('//ol')[0]
# dump_tree(tree)
# assert ol.tail is None
# def test_convert_center():
# html='''\
# <center>
# this is centered text
# </center>
# '''
# tree = get_tidy_tree(html)
# assert len(tree.xpath('//center')) == 0
# def test_tidy_old_tags(tmpdir):
# frag = '''\
# <center>
# this is centered text
# </center>
# '''
# xhtml, ret, stdout, stderr = render_frag(frag, tmpdir, 'tidy_old_tags.epub')
# assert ret == 0
# def test_tidy_ids(tmpdir):
# frag = '''\
# <p id="blub17:42">bla</p>
# '''
# xhtml, ret, stdout, stderr = render_frag(frag, tmpdir, 'tidy_ids.epub')
# assert ret == 0
| [
"volker.haas@brainbot.com"
] | volker.haas@brainbot.com |
4ec94a92a53e13eac7363138ee50017b41aacc98 | 969d8ec3596844d930aa9b6ac77b3764adddc03e | /model.py | 7e841523aa51dea77f29b5e27f8aa431a62ddb50 | [] | no_license | pratikmjoshi/trumptweetgenerator | d1a2c1768680cc4723efb8b4ae9d441fe0da65b6 | 328e6707f1247088684ac8efc772f5e6b85eae11 | refs/heads/master | 2021-09-20T23:50:25.075501 | 2018-08-17T05:01:46 | 2018-08-17T05:01:46 | 109,177,064 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 681 | py | import numpy as np
import sys
import os
import keras
from keras.models import Sequential
from keras.layers import Embedding,LSTM,Dense,Dropout,Bidirectional,TimeDistributed,Activation
def get_model(vocab_size):
model = Sequential()
model.add(Embedding(vocab_size,100))
model.add(Bidirectional(LSTM(50,return_sequences = True)))
model.add(LSTM(50,return_sequences = True))
model.add(LSTM(50,return_sequences = True))
model.add(LSTM(50,return_sequences = True))
model.add(TimeDistributed(vocab_size))
model.add(Activation('softmax'))
model.compile(loss = 'categorical_crossentropy',optimizer='rmsprop')
return model
| [
"t-prjosh@microsoft.com"
] | t-prjosh@microsoft.com |
d04daf17932d454dd22f8c3a5fe6678b1fcf9138 | 14fbb3276c7a0ade8f0f30d10750b3839d87f500 | /custom/icds_reports/migrations/0099_service_delivery_report.py | 6ab1c34d19909dc8bc5e995b5dc82e7a8a03a973 | [] | no_license | developer11092/commcare-hq | a5725ca3a6fc6588438e10d804c23a9fa59149ec | 38aa42bfe8279d0af1eacf193c8f51344023a45b | refs/heads/master | 2021-10-23T22:27:47.254088 | 2019-03-20T08:23:59 | 2019-03-20T08:23:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-02-11 15:38
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
from corehq.sql_db.operations import RawSQLMigration
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
class Migration(migrations.Migration):
dependencies = [
('icds_reports', '0098_drop_legacy_tables'),
]
operations = [
migrator.get_migration('service_delivery_monthly.sql')
]
| [
"rohit25.negi@gmail.com"
] | rohit25.negi@gmail.com |
a984fa5280233327fd1997479acb19840c8efd63 | c8d7c4ba5e949b0af2f5aa234c3ae594b1a49950 | /Python接口自动化/auto_test_old/common/scripts/temp_db_file/xj_recon_model.py | beaa278e766c8975c7a679ebc4bb38c46a2d89d4 | [] | no_license | chase001/chase_learning | 00b7396a6775fb6e2fd80950f8acf3f1737b162e | c5ee2473e49923c781212eb3f9f50341c0bc80c8 | refs/heads/master | 2022-12-12T00:08:08.407671 | 2020-03-06T14:17:39 | 2020-03-06T14:17:39 | 221,268,073 | 0 | 0 | null | 2022-12-08T05:26:49 | 2019-11-12T16:56:34 | Python | UTF-8 | Python | false | false | 55,014 | py | from common.db.MyFields import *
from common.db.func import init_database
from peewee import *
database = init_database('db_hot')
class UnknownField(object):
def __init__(self, *_, **__): pass
class BaseModel(Model):
class Meta:
database = database
class AreaCode(BaseModel):
code = BigAutoField()
level = IntegerField(null=True)
name = CharField(null=True)
parent_code = BigIntegerField(index=True, null=True)
class Meta:
table_name = 'area_code'
class BatchOrderCustomer(BaseModel):
company_id = IntegerField(constraints=[SQL("DEFAULT 100")])
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
hj_user_id = BigIntegerField(null=True)
id = BigAutoField()
order_id = BigIntegerField(null=True)
receive_address = CharField(null=True)
receive_name = CharField(null=True)
receive_phone = CharField(null=True)
reference_order_id = BigIntegerField(null=True)
rerification_status = IntegerField(null=True)
ship_to_city = CharField(null=True)
ship_to_country = CharField(null=True)
ship_to_province = CharField(null=True)
ship_to_town = CharField(null=True)
task_id = BigIntegerField(index=True, null=True)
user_name = CharField(null=True)
class Meta:
table_name = 'batch_order_customer'
class BatchOrderProduct(BaseModel):
business_product_id = BigIntegerField(null=True)
combin_discount_amount = DecimalField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
id = BigAutoField()
is_master_product = MyBitField(null=True) # bit
manual_discount = DecimalField(null=True)
master_product_id = BigIntegerField(null=True)
product_id = BigIntegerField(null=True)
product_name = CharField(null=True)
product_type = IntegerField(null=True)
promotion_discount_amount = DecimalField(null=True)
quantity = IntegerField(null=True)
shipping_fee = DecimalField(null=True)
task_id = BigIntegerField(null=True)
unit_price = DecimalField(null=True)
class Meta:
table_name = 'batch_order_product'
class BatchOrderTask(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
end_date = MyDateTimeField(null=True)
operator = CharField(null=True)
operator_user_id = BigIntegerField(null=True)
order_department_id = IntegerField(null=True)
order_memo = CharField(null=True)
order_project_code = CharField(null=True)
order_reason_id = IntegerField(null=True)
start_date = MyDateTimeField(null=True)
status = IntegerField(null=True)
task_id = BigAutoField()
task_name = CharField(null=True)
class Meta:
table_name = 'batch_order_task'
class BiBusiness(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_business'
class BiCouponType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_coupon_type'
class BiDeviceType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_device_type'
class BiOrderReason(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_order_reason'
class BiOrderSalesChannel(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_order_sales_channel'
class BiOrderSource(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_order_source'
class BiOrderType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_order_type'
class BiPayMethod(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
is_active = MyBitField(constraints=[SQL("DEFAULT b'1'")]) # bit
pay_method_foe = CharField(null=True)
class Meta:
table_name = 'bi_pay_method'
class BiPlatformType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_platform_type'
class BiProductStatus(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_product_status'
class BiProductType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_product_type'
class BiSourceType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_source_type'
class BiSupplierType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_supplier_type'
class DepartmentCode(BaseModel):
department_id = BigIntegerField(unique=True)
department_name = CharField()
id = BigAutoField()
is_active = MyBitField(constraints=[SQL("DEFAULT b'1'")]) # bit
class Meta:
table_name = 'department_code'
class EsIndexOrderLog(BaseModel):
create_date = MyDateTimeField(constraints=[SQL("DEFAULT 0000-00-00 00:00:00")], index=True)
custom_data = CharField(null=True)
from_ = BigIntegerField(column_name='from', null=True)
id = BigAutoField()
is_valid = MyBitField(constraints=[SQL("DEFAULT b'1'")], null=True) # bit
last_order_date = MyDateTimeField(null=True)
last_order_id = BigIntegerField(null=True)
size = IntegerField(null=True)
total_records = IntegerField(null=True)
class Meta:
table_name = 'es_index_order_log'
indexes = (
(('last_order_id', 'from_', 'create_date'), False),
)
class GroupBuyCategory(BaseModel):
added_date = DateField(null=True)
alias = CharField(null=True)
id = BigAutoField()
is_valid = MyBitField(null=True) # bit
name = CharField(null=True)
parent_id = BigIntegerField(null=True)
path = CharField(null=True)
class Meta:
table_name = 'group_buy_category'
class GroupBuyCategoryAdmin(BaseModel):
added_date = DateField(null=True)
description = CharField(null=True)
id = BigAutoField()
is_valid = MyBitField(null=True) # bit
name = CharField(null=True)
class Meta:
table_name = 'group_buy_category_admin'
class GroupBuyCoupon(BaseModel):
added_date = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
batch_id = BigIntegerField(index=True)
batch_size = BigIntegerField(null=True)
description = CharField(null=True)
id = BigAutoField()
is_active = MyBitField(null=True) # bit
mail_format = CharField(null=True)
title = CharField(null=True)
class Meta:
table_name = 'group_buy_coupon'
class GroupBuyCouponDetail(BaseModel):
added_date = MyDateTimeField(null=True)
batch_id = BigIntegerField(null=True)
batch_type = IntegerField(null=True)
coupon_code = CharField(null=True)
expired_date = DateField(null=True)
extended = CharField(null=True)
group_buy_id = BigIntegerField(null=True)
id = BigAutoField()
is_active = MyBitField(null=True) # bit
send_date = MyDateTimeField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'group_buy_coupon_detail'
class GroupBuyGlobalSettings(BaseModel):
display_a4_list_page = MyBitField(null=True) # bit
class Meta:
table_name = 'group_buy_global_settings'
primary_key = False
class GroupBuyLuckOrders(BaseModel):
email = CharField(null=True)
group_buy_id = BigIntegerField(null=True)
invitor_user_id = BigIntegerField(null=True)
join_date = MyDateTimeField(null=True)
join_reason = CharField(null=True)
lucky_number = BigIntegerField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'group_buy_luck_orders'
primary_key = False
class GroupBuyProduct(BaseModel):
_360_cate = CharField(column_name='360_cate', null=True)
_360_display = MyBitField(column_name='360_display', null=True) # bit
_360_hot_bus_spot_name = CharField(column_name='360_hot_bus_spot_name', null=True)
_360_img = CharField(column_name='360_img', null=True)
_360_latitude = CharField(column_name='360_latitude', null=True)
_360_longitude = CharField(column_name='360_longitude', null=True)
_360_merchant_addr = CharField(column_name='360_merchant_addr', null=True)
_360_merchant_name = CharField(column_name='360_merchant_name', null=True)
_360_merchant_phone = CharField(column_name='360_merchant_phone', null=True)
_360_spent_end_time = MyDateTimeField(column_name='360_spent_end_time', null=True)
_360_spent_start_time = MyDateTimeField(column_name='360_spent_start_time', null=True)
_360_title = CharField(column_name='360_title', null=True)
admin_memo = TextField(null=True)
big_img_name = CharField(null=True)
bulo_display_img_url = CharField(null=True)
buy_only_once = MyBitField(null=True) # bit
cate_id = BigIntegerField(null=True)
cate_id_admin = BigIntegerField(null=True)
class_id = BigIntegerField(null=True)
ct_product_code = CharField(null=True)
display_by_bulo = MyBitField(null=True) # bit
end_time = MyDateTimeField(null=True)
free_buy_type = BigIntegerField(null=True)
full_num = BigIntegerField(null=True)
group_buy_price = DecimalField(null=True)
group_buy_type = BigIntegerField(null=True)
has_notice_by_mail = MyBitField(null=True) # bit
has_notice_by_sms = MyBitField(null=True) # bit
id = BigAutoField()
is_active = MyBitField(null=True) # bit
is_free_by_count = MyBitField(null=True) # bit
is_free_delivery = MyBitField(null=True) # bit
is_hide = MyBitField(null=True) # bit
is_new_version = MyBitField(null=True) # bit
is_take_by_customer = MyBitField(null=True) # bit
is_valid = MyBitField(null=True) # bit
is_view = MyBitField(null=True) # bit
key_words = CharField(null=True)
last_notice_time_mail = MyDateTimeField(null=True)
last_notice_time_sms = MyDateTimeField(null=True)
last_update_time = MyDateTimeField(null=True)
list_price = DecimalField(null=True)
low_cate_id = BigIntegerField(null=True)
mark = BigIntegerField(null=True)
max_buy_amount = BigIntegerField(null=True)
mention = TextField(null=True)
mini_product_name = CharField(null=True)
prevision_img_name = CharField(null=True)
product_desc = TextField(null=True)
product_id = BigIntegerField(null=True)
product_name = CharField(null=True)
quantity = BigIntegerField(null=True)
related_coupon_batch = BigIntegerField(null=True)
related_coupon_batch_type = IntegerField(null=True)
related_income = DecimalField(null=True)
related_staff = CharField(null=True)
room_id = BigIntegerField(null=True)
short_product_name = CharField(null=True)
small_img_name = CharField(null=True)
sort_index = BigIntegerField(null=True)
start_time = MyDateTimeField(null=True)
supplier_id = BigIntegerField(null=True)
supplier_type = BigIntegerField(null=True)
system_remark = TextField(null=True)
tags = CharField(null=True)
time_up_warning = MyBitField(null=True) # bit
total_buy_amount = BigIntegerField(null=True)
touch_product_desc = TextField(null=True)
unit_cost = DecimalField(null=True)
unit_delivery_cost = DecimalField(null=True)
user_ce_hua = CharField(null=True)
user_ce_hua_id = BigIntegerField(null=True)
user_comment = TextField(null=True)
user_design_id = BigIntegerField(null=True)
user_tui_guang = CharField(null=True)
user_tui_guang_id = BigIntegerField(null=True)
user_wen_an = CharField(null=True)
user_wen_an_id = BigIntegerField(null=True)
virtual_buyer_amount = BigIntegerField(null=True)
class Meta:
table_name = 'group_buy_product'
class GroupBuyProductDetail(BaseModel):
class_unit_cost = DecimalField(null=True)
group_buy_id = BigIntegerField(null=True)
id = BigAutoField()
is_active = MyBitField(null=True) # bit
product_id = BigIntegerField(null=True)
quantity = BigIntegerField(null=True)
unit_cost = DecimalField(null=True)
class Meta:
table_name = 'group_buy_product_detail'
class GroupBuyProductWarehouse(BaseModel):
group_buy_product_id = BigIntegerField(null=True)
id = BigAutoField()
warehouse_id = CharField(null=True)
warehouse_product_id = CharField(null=True)
class Meta:
table_name = 'group_buy_product_warehouse'
class InvoiceManage(BaseModel):
account_bank = CharField(null=True)
account_number = CharField(null=True)
apply_user_name = CharField(null=True)
company_address = CharField(null=True)
company_id = IntegerField(null=True)
company_name = CharField(null=True)
company_phone = CharField(null=True)
courier_number = BigIntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], null=True)
create_user_id = BigIntegerField(null=True)
express_name = CharField(null=True)
express_pay_method = IntegerField(null=True)
ext_param = CharField(null=True)
id = BigAutoField()
ident_number = CharField(null=True)
invoice_content = IntegerField(null=True)
invoice_fee = DecimalField(null=True)
invoice_header = CharField(null=True)
invoice_header_type = IntegerField(null=True)
invoice_status = IntegerField(constraints=[SQL("DEFAULT 1")], null=True)
invoice_type = IntegerField(null=True)
is_print = MyBitField(constraints=[SQL("DEFAULT b'0'")], null=True) # bit
is_send = MyBitField(null=True) # bit
order_id = BigIntegerField(null=True)
recipient = CharField(null=True)
recipient_address = CharField(null=True)
recipient_city = CharField(null=True)
recipient_phone = CharField(null=True)
recipient_province = CharField(null=True)
recipient_town = CharField(null=True)
remark = CharField(null=True)
update_time = MyDateTimeField(null=True)
update_user_id = BigIntegerField(null=True)
class Meta:
table_name = 'invoice_manage'
class JdHjOrders(BaseModel):
create_date = MyDateTimeField(null=True)
customer_address = CharField(null=True)
customer_phone = CharField(null=True)
hj_deal_fee = DecimalField(null=True)
hj_order_date = MyDateTimeField(null=True)
hj_order_id = BigIntegerField(null=True)
id = BigAutoField()
is_processed = MyBitField(null=True) # bit
is_same = MyBitField(null=True) # bit
jd_order_date = MyDateTimeField(null=True)
jd_order_id = CharField(unique=True)
jd_seller_price = DecimalField(null=True)
memo = CharField(null=True)
class Meta:
table_name = 'jd_hj_orders'
class OrderArchiveDetailLog(BaseModel):
archive_batch_code = CharField(index=True)
archive_time = MyDateTimeField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
delete_time = MyDateTimeField(null=True)
id = BigAutoField()
is_archive = MyBitField(null=True) # bit
is_delete = MyBitField(null=True) # bit
is_to_es = MyBitField(null=True) # bit
order_id = BigIntegerField(index=True)
to_es_time = MyDateTimeField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
class Meta:
table_name = 'order_archive_detail_log'
class OrderArchiveMasterLog(BaseModel):
archive_batch_code = CharField(index=True)
archive_order_quantity = BigIntegerField(null=True)
archive_status = MyBitField(null=True) # bit
begin_order_id = BigIntegerField(index=True, null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
delete_status = MyBitField(null=True) # bit
end_order_id = BigIntegerField(null=True)
id = BigAutoField()
to_es_status = MyBitField(null=True) # bit
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
class Meta:
table_name = 'order_archive_master_log'
class OrderAssessment(BaseModel):
business_product_id = BigIntegerField(null=True)
deposit_discount_amount = DecimalField(null=True)
id = BigAutoField()
manual_discount_amount = DecimalField(null=True)
multi_product_id = BigIntegerField(null=True)
new_product_id = BigIntegerField(null=True)
order_id = BigIntegerField(index=True, null=True)
product_id = BigIntegerField(null=True)
quantity = IntegerField(null=True)
share_card_fee = DecimalField(null=True)
share_card_income = DecimalField(null=True)
share_combine_fee = DecimalField(null=True)
share_cost = DecimalField(null=True)
share_coupon_fee = DecimalField(null=True)
share_coupon_income = DecimalField(null=True)
share_course_code_fee = DecimalField(null=True)
share_course_code_income = DecimalField(null=True)
share_discount_fee = DecimalField(null=True)
share_handling_fee = DecimalField(null=True)
share_income = DecimalField(null=True)
share_preincome = DecimalField(null=True)
share_purchase_xb = DecimalField(null=True)
share_recharge_xb = DecimalField(null=True)
share_reward_xb = DecimalField(null=True)
share_shipping_fee = DecimalField(null=True)
share_user_handling_fee = DecimalField(null=True)
share_vipcard_fee = DecimalField(null=True)
share_vipcard_income = DecimalField(null=True)
unit_price = DecimalField(null=True)
class Meta:
table_name = 'order_assessment'
indexes = (
(('order_id', 'product_id', 'multi_product_id'), False),
)
class OrderBaseUser(BaseModel):
address = CharField(null=True)
answer = CharField(null=True)
bbs_user_id = BigIntegerField(index=True, null=True)
buy_times = IntegerField(null=True)
cellphone = CharField(null=True)
charge = DecimalField(null=True)
display_pwd = CharField(null=True)
email = CharField(null=True)
expired_date = MyDateTimeField(null=True)
fee_mark = IntegerField(null=True)
froze_late_fee = DecimalField(null=True)
gender = IntegerField(null=True)
gold = IntegerField(null=True)
has_validate_cellphone = MyBitField(null=True) # bit
icon_name = CharField(null=True)
id_card_num = CharField(null=True)
last_login_ip = CharField(null=True)
last_login_time = MyDateTimeField(null=True)
late_fee = DecimalField(null=True)
lock_flag = IntegerField(null=True)
login_times = IntegerField(null=True)
phone = CharField(null=True)
question = CharField(null=True)
rank = IntegerField(null=True)
rank_mark = IntegerField(null=True)
reg_date = MyDateTimeField(null=True)
reg_ip = CharField(null=True)
sina_weibo_account = BigIntegerField(null=True)
timestamp = MyDateTimeField(null=True)
true_name = CharField(null=True)
user_custom_cata_list = CharField(null=True)
user_fav_cata_list = CharField(null=True)
user_id = BigAutoField()
user_name = CharField(null=True)
user_pwd = CharField(null=True)
user_top_custom_cata_list = CharField(null=True)
veri_code = CharField(null=True)
vip_level = IntegerField(null=True)
vip_total_days = IntegerField(null=True)
zipcode = CharField(null=True)
class Meta:
table_name = 'order_base_user'
class OrderBusinessExtend(BaseModel):
business_org_code = CharField(null=True)
company_id = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
id = BigAutoField()
key = CharField(null=True)
order_id = BigIntegerField(index=True, null=True)
values = CharField(null=True)
class Meta:
table_name = 'order_business_extend'
indexes = (
(('key', 'order_id', 'business_org_code'), False),
(('key', 'values'), False),
)
class OrderCancelLog(BaseModel):
cancel_type = IntegerField(null=True)
create_date = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
id = BigAutoField()
ip = CharField(null=True)
operator_user_id = BigIntegerField(null=True)
operator_user_name = CharField(null=True)
order_id = BigIntegerField(null=True)
remark = CharField(null=True)
source_id = IntegerField(null=True)
status = IntegerField(null=True)
class Meta:
table_name = 'order_cancel_log'
class OrderCarriedForward(BaseModel):
company_id = IntegerField(index=True, null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_id = BigIntegerField(null=True)
id = BigAutoField()
income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
order_id = BigIntegerField(index=True, null=True)
preincome = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
purchase_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
recharge_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
reward_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
xb_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
class Meta:
table_name = 'order_carried_forward'
class OrderCarriedForwardMulti(BaseModel):
business_id = IntegerField(null=True)
category_id = IntegerField(null=True)
company_id = IntegerField(index=True, null=True)
id = BigAutoField()
income = DecimalField()
multi_product_id = BigIntegerField(null=True)
order_id = BigIntegerField(index=True)
preincome = DecimalField()
product_id = BigIntegerField()
product_type = IntegerField(null=True)
purchase_xb = DecimalField()
quantity = IntegerField()
recharge_xb = DecimalField()
reward_xb = DecimalField()
seller_id = BigIntegerField(null=True)
unit_price = DecimalField()
xb_fee = DecimalField()
class Meta:
table_name = 'order_carried_forward_multi'
indexes = (
(('business_id', 'company_id', 'seller_id'), False),
(('multi_product_id', 'product_id'), False),
)
class OrderChangeLog(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
create_user_name = CharField(null=True)
id = BigAutoField()
old_ship_to_name = CharField(null=True)
old_ship_to_zip = CharField(null=True)
order_id = BigIntegerField(null=True)
ship_to_name = CharField(null=True)
ship_to_zip = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_company_id = IntegerField(null=True)
update_user_id = BigIntegerField(null=True)
update_user_name = CharField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'order_change_log'
class OrderConfig(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_id = BigIntegerField(null=True)
create_user_name = CharField(null=True)
id = BigAutoField()
is_active = MyBitField(null=True) # bit
is_delete = MyBitField(null=True) # bit
key = CharField(index=True)
remark = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_id = BigIntegerField(null=True)
update_user_name = CharField(null=True)
value = CharField(null=True)
class Meta:
table_name = 'order_config'
class OrderCouponConsum(BaseModel):
company_id = IntegerField(null=True)
cost = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
coupon_code = CharField(index=True, null=True)
coupon_discount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
coupon_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
coupon_income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
coupon_name = CharField(null=True)
coupon_type = IntegerField(null=True)
id = BigAutoField()
order_id = BigIntegerField(index=True)
class Meta:
table_name = 'order_coupon_consum'
class OrderDealMemo(BaseModel):
deal_date = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
deal_memo = CharField(null=True)
deal_user = CharField(null=True)
deal_user_company_id = IntegerField(null=True)
id = BigAutoField()
order_id = BigIntegerField(index=True)
class Meta:
table_name = 'order_deal_memo'
indexes = (
(('order_id', 'deal_user'), False),
)
class OrderDeliver(BaseModel):
batch_id = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
delivery_failed_qty = IntegerField(null=True)
delivery_qty = IntegerField(null=True)
delivery_status = IntegerField(null=True)
id = BigAutoField()
master_product_id = BigIntegerField(null=True)
order_id = BigIntegerField()
product_id = BigIntegerField()
product_type = IntegerField(null=True)
quantity = IntegerField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
class Meta:
table_name = 'order_deliver'
indexes = (
(('order_id', 'product_id'), False),
)
class OrderDetail(BaseModel):
account_date = MyDateTimeField(null=True)
batch_id = IntegerField(null=True)
business_id = IntegerField(null=True)
business_product_id = BigIntegerField(index=True, null=True)
category_id = IntegerField(null=True)
combine_discount_amount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
company_id = IntegerField(null=True)
coupon_code = CharField(null=True)
deposit_discount_amount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
discount_amount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
id = BigAutoField()
is_master_product = MyBitField() # bit
is_refunded = MyBitField(null=True) # bit
manual_discount_amount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
nsource = CharField(null=True)
order_id = BigIntegerField(null=True)
point_discount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
product_cate = IntegerField(null=True)
product_cost = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
product_id = BigIntegerField()
product_name = CharField(null=True)
product_type = IntegerField(null=True)
promotion_info = CharField(null=True)
quantity = IntegerField()
seller_id = BigIntegerField(null=True)
timestamp = MyDateTimeField(null=True)
unit_price = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
warehouse_id = IntegerField(null=True)
class Meta:
table_name = 'order_detail'
indexes = (
(('order_id', 'product_id'), False),
(('order_id', 'product_type'), False),
(('order_id', 'quantity'), False),
)
class OrderDetailAttached(BaseModel):
company_id = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
id = BigAutoField()
master_product_id = BigIntegerField(index=True)
order_id = BigIntegerField(index=True)
product_id = BigIntegerField(index=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_company_id = IntegerField(null=True)
update_user_id = BigIntegerField(null=True)
class Meta:
table_name = 'order_detail_attached'
class OrderDetailCoupon(BaseModel):
batch_id = IntegerField(null=True)
coupon_code = CharField(index=True, null=True)
coupon_type = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
id = BigAutoField()
is_verificationed = MyBitField(null=True) # bit
multi_product_id = BigIntegerField(null=True)
order_id = BigIntegerField()
product_id = BigIntegerField(null=True)
reference_verify_id = BigIntegerField(index=True, null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
verify_time = MyDateTimeField(null=True)
class Meta:
table_name = 'order_detail_coupon'
indexes = (
(('order_id', 'multi_product_id', 'product_id'), False),
)
class OrderDetailDiscount(BaseModel):
business_product_id = BigIntegerField(null=True)
company_id = IntegerField(null=True)
discount_amount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
discount_dec = CharField(null=True)
discount_xb = DecimalField(null=True)
end_time = MyDateTimeField(null=True)
id = BigAutoField()
order_id = BigIntegerField(index=True, null=True)
product_business_id = IntegerField(null=True)
product_id = BigIntegerField(index=True, null=True)
product_seller_id = BigIntegerField(null=True)
source_code = CharField(index=True, null=True)
source_id = BigIntegerField(null=True)
source_type = IntegerField(null=True)
start_time = MyDateTimeField(null=True)
class Meta:
table_name = 'order_detail_discount'
class OrderDetailMulti(BaseModel):
add_to_cart_url = CharField(null=True)
batch_id = IntegerField(null=True)
business_id = IntegerField(null=True)
business_product_id = BigIntegerField(index=True, null=True)
category_id = IntegerField(null=True)
combine_discount_amount = DecimalField(null=True)
company_id = IntegerField(null=True)
coupon_code = CharField(null=True)
coupon_type = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
deposit_discount_amount = DecimalField(null=True)
discount_amount = DecimalField(null=True)
has_subtotal_value = MyBitField(null=True) # bit
id = BigAutoField()
manual_discount_amount = DecimalField(null=True)
multi_product_id = BigIntegerField(null=True)
order_id = BigIntegerField(null=True)
point_discount = DecimalField(null=True)
product_cost = DecimalField(null=True)
product_id = BigIntegerField(index=True, null=True)
product_name = CharField(null=True)
product_type = IntegerField(null=True)
quantity = IntegerField(null=True)
seller_id = BigIntegerField(null=True)
share_card_fee = DecimalField(null=True)
share_card_income = DecimalField(null=True)
share_coupon_fee = DecimalField(null=True)
share_coupon_income = DecimalField(null=True)
share_course_code_fee = DecimalField(null=True)
share_course_code_income = DecimalField(null=True)
share_discount_fee = DecimalField(null=True)
share_handling_fee = DecimalField(null=True)
share_income = DecimalField(null=True)
share_invite_code_fee = DecimalField(null=True)
share_preincome = DecimalField(null=True)
share_purchase_xb = DecimalField(null=True)
share_recharge_xb = DecimalField(null=True)
share_reward_xb = DecimalField(null=True)
share_shipping_fee = DecimalField(null=True)
share_user_handling_fee = DecimalField(null=True)
share_vipcard_fee = DecimalField(null=True)
share_vipcard_income = DecimalField(null=True)
sid = CharField(null=True)
ssid = CharField(null=True)
subtotal_card_fee = DecimalField(null=True)
subtotal_card_income = DecimalField(null=True)
subtotal_coupon_fee = DecimalField(null=True)
subtotal_coupon_income = DecimalField(null=True)
subtotal_course_code_fee = DecimalField(null=True)
subtotal_course_code_income = DecimalField(null=True)
subtotal_discount_amount = DecimalField(null=True)
subtotal_handling_fee = DecimalField(null=True)
subtotal_income = DecimalField(null=True)
subtotal_invite_code_fee = DecimalField(null=True)
subtotal_pre_income = DecimalField(null=True)
subtotal_purchase_xb = DecimalField(null=True)
subtotal_recharge_xb = DecimalField(null=True)
subtotal_reward_xb = DecimalField(null=True)
subtotal_shipping_fee = DecimalField(null=True)
subtotal_user_handling_fee = DecimalField(null=True)
subtotal_vipcard_fee = DecimalField(null=True)
subtotal_vipcard_income = DecimalField(null=True)
uid = CharField(null=True)
unit_price = DecimalField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
warehouse_id = IntegerField(null=True)
class Meta:
table_name = 'order_detail_multi'
indexes = (
(('order_id', 'multi_product_id'), False),
(('order_id', 'product_type'), False),
)
class OrderFromTop(BaseModel):
added_date = MyDateTimeField(null=True)
ali_trade_no = BigIntegerField(null=True)
id = BigAutoField()
import_order = TextField(null=True)
operator = CharField(null=True)
order_id = BigIntegerField(null=True)
platform_id = IntegerField(null=True)
taobao_token = CharField(null=True)
class Meta:
table_name = 'order_from_top'
class OrderHjUser(BaseModel):
bbs_user_id = BigIntegerField(index=True, null=True)
company_id = IntegerField(null=True)
department_id = IntegerField(null=True)
email = CharField(null=True)
id = BigAutoField()
nick_name = CharField(null=True)
true_name = CharField(null=True)
user_name = CharField(null=True)
class Meta:
table_name = 'order_hj_user'
class OrderIncome(BaseModel):
batch_id = BigIntegerField(null=True)
coupon_code = CharField(index=True, null=True)
income_date = MyDateTimeField(index=True, null=True)
income_id = BigAutoField()
income_type = IntegerField(null=True)
last_update_date = MyDateTimeField(null=True)
master_product_id = BigIntegerField(null=True)
old_refund_id = BigIntegerField(null=True)
operater_type = IntegerField(null=True)
order_type = IntegerField(null=True)
product_name = CharField(null=True)
product_type = IntegerField(null=True)
quantity = IntegerField(null=True)
reference_income_id = BigIntegerField(null=True)
share_income_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
share_purchase_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
share_recharege_xb = DecimalField()
share_reward_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
source_order_id = BigIntegerField(null=True)
source_rma_id = BigIntegerField(index=True, null=True)
status = IntegerField(null=True)
sub_product_id = BigIntegerField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'order_income'
indexes = (
(('source_order_id', 'sub_product_id'), False),
)
class OrderIncomeStaging(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT 0000-00-00 00:00:00")], index=True)
id = BigAutoField()
rma_id = BigIntegerField(index=True, null=True)
source_order_id = BigIntegerField(index=True)
status = IntegerField(constraints=[SQL("DEFAULT 0")])
update_time = MyDateTimeField(null=True)
class Meta:
table_name = 'order_income_staging'
class OrderMaster(BaseModel):
ali_trade_no = CharField(null=True)
bank_code = CharField(null=True)
bill_date = MyDateTimeField(null=True)
bill_no = CharField(null=True)
cancel_date = MyDateTimeField(null=True)
cell_phone = CharField(null=True)
chest_fee = DecimalField(null=True)
city_id = IntegerField(null=True)
combine_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
company_id = IntegerField(null=True)
coupon_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
coupon_income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
deal_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
deal_memo = CharField(null=True)
deal_user = CharField(null=True)
deliver_id = CharField(null=True)
delivery_result = IntegerField(null=True)
delivery_status = IntegerField(null=True)
deposit_discount_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
discount_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
email = CharField(index=True, null=True)
express = IntegerField(null=True)
express_id = IntegerField(null=True)
extend_bill_status = IntegerField(null=True)
fee_memo = CharField(null=True)
from_ip = CharField(null=True)
handling_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
hj_user_id = BigIntegerField(index=True, null=True)
income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
installment_number = IntegerField(null=True)
invite_code_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
is_active = MyBitField() # bit
is_audit = MyBitField(null=True) # bit
is_bill = MyBitField() # bit
is_cancel = MyBitField() # bit
is_child = MyBitField(constraints=[SQL("DEFAULT b'0'")], null=True) # bit
is_inside = MyBitField(null=True) # bit
is_notify = MyBitField(null=True) # bit
is_phone = MyBitField(null=True) # bit
is_print = MyBitField(null=True) # bit
is_test = MyBitField(null=True) # bit
is_trace = MyBitField(null=True) # bit
is_unusual = MyBitField(constraints=[SQL("DEFAULT b'0'")], null=True) # bit
is_valid = MyBitField() # bit
manual_discount_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
mark = IntegerField(null=True)
msn = CharField(null=True)
notify_mark = CharField(null=True)
nsource = CharField(null=True)
operator_company_id = IntegerField(null=True)
operator_user_id = BigIntegerField(null=True)
order_date = MyDateTimeField(null=True)
order_device_id = IntegerField(null=True)
order_id = BigAutoField()
order_number = CharField(index=True, null=True)
order_type = IntegerField()
outer_trade_no = CharField(index=True, null=True)
parent_order_id = BigIntegerField(null=True)
pay_card_type = IntegerField(null=True)
pay_device_id = IntegerField(null=True)
pay_method = CharField(null=True)
payment_bank_discount = DecimalField(null=True)
phone_date = MyDateTimeField(null=True)
platform_id = IntegerField(null=True)
point_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
pre_income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
province_id = IntegerField(null=True)
purchase_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
qq = CharField(null=True)
recharge_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
refer_source_id = IntegerField(null=True)
refer_url = CharField(null=True)
refund_type = CharField(null=True)
related_order_id = BigIntegerField(index=True, null=True)
reward_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
seller_id = BigIntegerField(index=True, null=True)
ship_date = MyDateTimeField(null=True)
ship_flag = IntegerField(null=True)
ship_method = CharField(null=True)
ship_to_addr = CharField(null=True)
ship_to_city = CharField(null=True)
ship_to_country = CharField(constraints=[SQL("DEFAULT '中国'")], null=True)
ship_to_name = CharField(index=True, null=True)
ship_to_phone = CharField(index=True, null=True)
ship_to_province = CharField(null=True)
ship_to_time = CharField(null=True)
ship_to_town = CharField(null=True)
ship_to_zip = CharField(null=True)
shipping_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
temp_order_version = IntegerField(null=True)
timestamp = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
total_cost = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
total_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
total_order_today = IntegerField(null=True)
town_id = IntegerField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
update_user_company_id = IntegerField(null=True)
update_user_id = BigIntegerField(null=True)
user_coupon_id = IntegerField(null=True)
user_handling_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
user_id = BigIntegerField(null=True)
user_memo = CharField(null=True)
user_reg_date = MyDateTimeField(null=True)
user_source = CharField(null=True)
user_title = CharField(null=True)
xb_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
class Meta:
table_name = 'order_master'
indexes = (
(('bill_date', 'company_id', 'deal_fee', 'order_type', 'is_bill', 'is_cancel'), False),
(('order_date', 'order_type', 'is_bill', 'ship_flag', 'is_cancel'), False),
(('platform_id', 'temp_order_version'), False),
)
class OrderMessageLog(BaseModel):
id = BigAutoField()
message_content = TextField(null=True)
message_id = CharField(null=True)
produce_id = CharField(null=True)
send_date_time = MyDateTimeField(null=True)
send_machine_ip = CharField(null=True)
class Meta:
table_name = 'order_message_log'
class OrderPayInfo(BaseModel):
bank_code = CharField(null=True)
begin_time = MyDateTimeField(null=True)
bill_amount = DecimalField()
child_order_id = BigIntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_id = BigIntegerField(null=True)
end_time = MyDateTimeField(null=True)
ext_param = CharField(null=True)
id = BigAutoField()
order_id = BigIntegerField()
order_type = IntegerField(null=True)
origin_order_id = BigIntegerField(null=True)
pay_channel = CharField(null=True)
pay_device_id = IntegerField(null=True)
pay_method = IntegerField(null=True)
pay_num = CharField(null=True)
pay_status = IntegerField(null=True)
pay_time = MyDateTimeField(null=True)
pay_type = IntegerField(null=True)
purchase_xb = DecimalField()
recharge_xb = DecimalField()
remark = CharField(null=True)
reward_xb = DecimalField()
trans_seq_no = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
update_user_id = BigIntegerField(null=True)
xb_fee = DecimalField()
class Meta:
table_name = 'order_pay_info'
indexes = (
(('end_time', 'begin_time'), False),
(('order_id', 'child_order_id'), False),
(('order_id', 'order_type'), False),
(('pay_time', 'pay_type'), False),
)
class OrderProductGroupbuy(BaseModel):
a_360_cate = CharField()
a_360_display = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
a_360_hot_bus_spot_name = CharField()
a_360_img = CharField()
a_360_latitude = CharField()
a_360_longitude = CharField()
a_360_merchant_addr = CharField()
a_360_merchant_name = CharField()
a_360_merchant_phone = CharField()
a_360_spent_end_time = MyDateTimeField(null=True)
a_360_spent_start_time = MyDateTimeField(null=True)
a_360_title = CharField()
admin_memo = CharField()
big_img_name = CharField()
bulo_display_img_url = CharField(null=True)
buy_only_once = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
cate_id = IntegerField(constraints=[SQL("DEFAULT 0")])
cate_idadmin = IntegerField(constraints=[SQL("DEFAULT 0")])
class_id = IntegerField(constraints=[SQL("DEFAULT 0")])
ctproduct_code = CharField(null=True)
display_by_bulo = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
end_time = MyDateTimeField()
free_buy_type = IntegerField(constraints=[SQL("DEFAULT 0")])
full_num = IntegerField(constraints=[SQL("DEFAULT 0")])
group_buy_price = DecimalField()
groupbuy_type = IntegerField()
has_notice_by_mail = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
has_notice_by_sms = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
id = BigAutoField()
is_active = IntegerField(constraints=[SQL("DEFAULT 0")])
is_free_by_count = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
is_free_delivery = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
is_hide = MyBitField() # bit
is_new_version = MyBitField(null=True) # bit
is_takeby_customer = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
is_valid = MyBitField(constraints=[SQL("DEFAULT b'1'")]) # bit
is_view = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
keywords = CharField(null=True)
last_notice_time_mail = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
last_notice_time_sms = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
last_update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
list_price = DecimalField()
low_cate_id = IntegerField(null=True)
mark = IntegerField()
max_buy_amount = IntegerField(constraints=[SQL("DEFAULT 0")])
mention = CharField()
mini_product_name = CharField(null=True)
prevision_img_name = CharField()
product_desc = TextField()
product_id = BigIntegerField(index=True)
product_name = CharField(null=True)
quantity = IntegerField()
related_coupon_batch = IntegerField(constraints=[SQL("DEFAULT 0")])
related_coupon_batch_type = IntegerField(null=True)
related_income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
related_staff = CharField(null=True)
room_id = IntegerField(null=True)
short_product_name = CharField(null=True)
small_img_name = CharField()
sort_index = IntegerField(constraints=[SQL("DEFAULT 0")])
start_time = MyDateTimeField()
supplier_id = IntegerField(constraints=[SQL("DEFAULT 0")])
supplier_type = IntegerField(constraints=[SQL("DEFAULT 0")])
system_remark = TextField(null=True)
tags = CharField(null=True)
timeup_warning = MyBitField(constraints=[SQL("DEFAULT b'1'")]) # bit
total_buy_amount = IntegerField(constraints=[SQL("DEFAULT 0")])
touch_product_desc = TextField(null=True)
unit_cost = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
unit_delivery_cost = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
user_ce_hua = CharField()
user_ce_hua_id = IntegerField(null=True)
user_comment = TextField()
user_design_id = IntegerField(null=True)
user_tui_guang = CharField()
user_tui_guang_id = IntegerField(null=True)
user_wen_an = CharField()
user_wen_an_id = IntegerField(null=True)
virtual_buyer_amount = IntegerField()
class Meta:
table_name = 'order_product_groupbuy'
class OrderSplitIndex(BaseModel):
begin_order_id = BigIntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
database_index = CharField(null=True)
end_order_id = BigIntegerField(null=True)
id = BigAutoField()
last_order_id = BigIntegerField(null=True)
table_index = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
class Meta:
table_name = 'order_split_index'
indexes = (
(('begin_order_id', 'end_order_id'), False),
)
class OrderStageRetry(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], null=True)
order_id = BigIntegerField()
retry_times = IntegerField(constraints=[SQL("DEFAULT 1")])
stage = IntegerField()
status = IntegerField(constraints=[SQL("DEFAULT 0")])
update_time = MyDateTimeField(null=True)
class Meta:
table_name = 'order_stage_retry'
indexes = (
(('order_id', 'stage'), True),
)
primary_key = CompositeKey('order_id', 'stage')
class OrderTester(BaseModel):
company_id = IntegerField(null=True)
hj_user_id = BigIntegerField(index=True, null=True)
id = BigAutoField()
status = MyBitField(null=True) # bit
user_id = BigIntegerField(null=True)
user_name = CharField(null=True)
class Meta:
table_name = 'order_tester'
class OrderTracking(BaseModel):
add_to_cart_url = CharField(null=True)
app_id = CharField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
device_id = CharField(null=True)
ext_param = CharField(null=True)
from_ip = CharField(null=True)
id = BigAutoField()
order_department_id = IntegerField(null=True)
order_device_id = IntegerField(null=True)
order_id = BigIntegerField()
order_reason_id = IntegerField(null=True)
order_source_id = IntegerField(null=True)
pay_device_id = IntegerField(null=True)
refer_url = CharField(null=True)
reference_order_id = BigIntegerField(index=True, null=True)
rma_flag = IntegerField(null=True)
sales_channel_id = IntegerField(null=True)
sales_platform_id = IntegerField(null=True)
sid = CharField(null=True)
solution_code = CharField(null=True)
ssid = CharField(null=True)
swap_solution_code = CharField(null=True)
uid = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True, null=True)
class Meta:
table_name = 'order_tracking'
indexes = (
(('order_id', 'order_source_id', 'solution_code', 'sales_platform_id'), False),
)
class OrderUserAddressLog(BaseModel):
address = CharField(null=True)
change_date = MyDateTimeField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
id = BigAutoField()
old_address = CharField(null=True)
operator = CharField(null=True)
order_id = BigIntegerField(null=True)
shop_user_id = BigIntegerField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'order_user_address_log'
class OrderUserPhoneLog(BaseModel):
change_date = MyDateTimeField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
id = BigAutoField()
old_phone = CharField(null=True)
operator = CharField(null=True)
order_id = BigIntegerField(null=True)
phone = CharField(null=True)
shop_user_id = BigIntegerField(null=True)
type = IntegerField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'order_user_phone_log'
class OrderVirtualDeliver(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
id = BigAutoField()
order_deliver_id = BigIntegerField(index=True, null=True)
order_id = BigIntegerField(index=True, null=True)
send_code = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
class Meta:
table_name = 'order_virtual_deliver'
class TempOrderMetaData(BaseModel):
hj_user_id = BigIntegerField(null=True)
id = BigAutoField()
product_id = BigIntegerField(null=True)
user_domain = CharField(null=True)
class Meta:
table_name = 'temp_order_meta_data'
class TempOrderSellerCc(BaseModel):
id = BigAutoField()
seller_id = BigIntegerField(null=True)
class Meta:
table_name = 'temp_order_seller_cc'
class TempOrderUserCc(BaseModel):
hj_user_id = BigIntegerField(null=True)
id = BigAutoField()
class Meta:
table_name = 'temp_order_user_cc'
class TradeControl(BaseModel):
compensate_action = CharField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
create_user_id = BigIntegerField(null=True)
has_cancel = IntegerField(null=True)
has_commit = IntegerField(null=True)
has_compensate = IntegerField(index=True, null=True)
has_freeze = IntegerField(null=True)
id = BigAutoField()
order_id = BigIntegerField(index=True, null=True)
trade_number = CharField(unique=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_id = BigIntegerField(null=True)
class Meta:
table_name = 'trade_control'
class TradeResourceStatus(BaseModel):
cancel_time = MyDateTimeField(null=True)
commit_time = MyDateTimeField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
create_user_id = BigIntegerField(null=True)
freeze_time = MyDateTimeField(null=True)
has_cancel = IntegerField(null=True)
has_commit = IntegerField(null=True)
has_freeze = IntegerField(null=True)
id = BigAutoField()
order_id = BigIntegerField(null=True)
resource_code = CharField(null=True, unique=True)
resource_type = IntegerField(null=True)
retry_count = IntegerField(null=True)
retry_time = MyDateTimeField(null=True)
trade_number = CharField(index=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_id = BigIntegerField(null=True)
class Meta:
table_name = 'trade_resource_status'
class UserAddress(BaseModel):
city_id = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
id = BigAutoField()
is_default = MyBitField(null=True) # bit
msn = CharField(null=True)
province_id = IntegerField(null=True)
qq = CharField(null=True)
ship_to_address = CharField(null=True)
ship_to_cellphone = CharField(null=True)
ship_to_email = CharField(null=True)
ship_to_name = CharField(null=True)
ship_to_phone = CharField(null=True)
ship_to_zip = CharField(null=True)
shop_user_id = BigIntegerField(null=True)
town_id = IntegerField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_company_id = IntegerField(null=True)
update_user_id = BigIntegerField(null=True)
class Meta:
table_name = 'user_address'
| [
"chenyuliang@globalegrow.com"
] | chenyuliang@globalegrow.com |
7f9e6d7b2d645fcd5aa6bd33457e423a8acbaae7 | 485784cea86f52c2acda0a495942689104cd391e | /schedule/migrations/0002_rinkschedule_schedule_date.py | 9b86d692f4df4d3b153c2be9115884978a11c438 | [] | no_license | BrianC68/OIC_Web_Apps | 50ec9f46868ad37dc809548d2d362a4573320539 | e75b9439b11cf2325675d76dacac38806156fb16 | refs/heads/master | 2023-08-09T07:39:33.066601 | 2023-08-07T13:22:39 | 2023-08-07T13:22:39 | 197,438,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | # Generated by Django 2.2.1 on 2019-10-08 23:50
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('schedule', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='rinkschedule',
name='schedule_date',
field=models.DateField(default=django.utils.timezone.now),
preserve_default=False,
),
]
| [
"brianc@wi.rr.com"
] | brianc@wi.rr.com |
c241357210f715247bd9310a64e055e45000c867 | 1a7051fed7e872d1217bf3a4f16eab3b20b04156 | /graph_bfs_dfs.py | 1b8fc36108645ffd78145a471b7872f9e44ef99f | [] | no_license | michaelng126/python_cp | e8aa64b41fe42efc2ae7e9bf84121b3f9caf07ff | 1b28dd9e1fd4f1dcd6cf82028613ceb431f89bb2 | refs/heads/master | 2023-01-14T08:50:39.402899 | 2020-11-22T17:25:43 | 2020-11-22T17:25:43 | 315,022,809 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,172 | py | # Graph (undirected), adjacency list form
# Connected Components via dfs
# Example BFS
# Example DFS
class G:
# vertices zero indexed
# connected components via dfs
def __init__(self, V):
self.V = V
self.adj = [[] for i in range(V)]
def add_edge(self, v, w):
self.adj[v].append(w)
self.adj[w].append(v)
def degree(self, v):
return len(self.adj(v))
def conn_comp(self):
# find connected components via dfs
vtd = [False for i in range(self.V)]
cc = []
for v in range(self.V):
if vtd[v] == False:
temp = []
cc.append(self.conn_comp_dfs_help(v, vtd, temp))
return cc
def conn_comp_dfs_help(self, v, vtd, temp):
vtd[v] = True
temp.append(v) # visit v
for i in self.adj[v]:
if vtd[i] == False:
temp = self.conn_comp_dfs_help(i, vtd, temp) # add each
return temp
# Example Connected Components
N = 7
g = G(N)
g.add_edge(1, 2)
g.add_edge(2, 3)
g.add_edge(4, 5)
g.add_edge(0, 4)
g.add_edge(4, 6)
print('Connected components:')
print(g.conn_comp())
# Example BFS - find all distances from 0
N = 7
g = G(N)
g.add_edge(0, 4)
g.add_edge(4, 5)
g.add_edge(4, 6)
g.add_edge(6, 1)
g.add_edge(1, 2)
g.add_edge(2, 3)
g.add_edge(1, 3)
from collections import deque
visited = set()
start = 0
# visit 0
visited.add(start)
queue = deque([start])
distances = [0 for i in range(N)]
while queue:
s = queue.popleft()
for v in g.adj[s]:
if v not in visited:
queue.append(v)
# visit v
visited.add(v)
distances[v] = distances[s]+1
print('Distances in example BFS:')
print(distances)
# Example DFS - find a vertex connected to 0 with index 1 mod 4
N = 7
g = G(N)
g.add_edge(0, 4)
g.add_edge(4, 5)
g.add_edge(4, 6)
g.add_edge(6, 1)
g.add_edge(1, 2)
g.add_edge(2, 3)
g.add_edge(1, 3)
visited = set()
def dfs(s):
if s in visited:
return
visited.add(s)
# visit v
if s % 4 == 1:
print(f'Vertex {s} found.')
for v in g.adj[s]:
dfs(v)
print('Example DFS:')
dfs(0)
| [
"michael@michaels-mbp.home"
] | michael@michaels-mbp.home |
a5ce8e3808360b1d98652c7e2b7c2658debc42d3 | 559f3dec0964d2e0f86c6c871371fe779cf3726c | /contrib/MedicalSeg/tools/preprocess_utils/dataset_json.py | 5b3372963ae365689314ba2a9ae2d83d7a9307a9 | [
"Apache-2.0"
] | permissive | PaddlePaddle/PaddleSeg | 319ab26665ea492527a1949671650135123ffc39 | 2c8c35a8949fef74599f5ec557d340a14415f20d | refs/heads/release/2.8 | 2023-08-31T09:08:06.724717 | 2023-08-18T01:59:56 | 2023-08-18T01:59:56 | 204,380,779 | 8,531 | 1,866 | Apache-2.0 | 2023-09-12T02:30:42 | 2019-08-26T02:32:22 | Python | UTF-8 | Python | false | false | 478 | py | import json
def parse_msd_basic_info(json_path):
"""
get dataset basic info from msd dataset.json
"""
dict = json.loads(open(json_path, "r").read())
info = {}
info["modalities"] = tuple(dict["modality"].values())
info["labels"] = dict["labels"]
info["dataset_name"] = dict["name"]
info["dataset_description"] = dict["description"]
info["license_desc"] = dict["licence"]
info["dataset_reference"] = dict["reference"]
return info
| [
"noreply@github.com"
] | PaddlePaddle.noreply@github.com |
3040ec4d0744d00d0c908c83374cffac423a5df1 | 01f134e6f6f4cbf148979ed4831fcf28639ced5a | /codificar.py | 8626bafd4e909707aa60dbbf6e630e984ad149c2 | [] | no_license | ikerseco/encrypt | a3db625b296da5f30967ca17aad98b7285944809 | 73e3f51d1759bc7daf2c050c7e0d8ae33ba153f8 | refs/heads/master | 2021-05-07T18:01:20.646506 | 2018-01-02T20:47:06 | 2018-01-02T20:47:06 | 108,777,952 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,711 | py | import os
codigo = [
"vx66cLsxoñU3ñp1;MLf*ÑlkBt3&dmqmFYY4iopCÑwÇs?9Py¿V9KKN00cRPF?jgR;ydwj2QÑ0w*!0kKlz2A$aO8aÑVoYsJU7QiI2q",
"Qm50QdjO@V7&9/QZCbrVM%VijS9%3;!xY3%b9Ñ*Çpg1Z1L^JbkSgs28@LMq¿ZnXmVR2uvyh5y?GU;M@RIWl33Z0zhbsXb%&WÑLS/",
"VGUsaVi!lnnoQY6@L!AP/2&%/Mb&zsx8?26w5sXhD*$rD*VñCa7ZWB!DI2B¿7vArZ@FRtHWJMx4m2ñ¿^?f3bGzCxPPlmbCofR;cY",
"UfGzaS3b7s¿/ñnLUu*ur4O?n¿KJXÇ6¿sSbhYUsQu78d&PY2A&Ck9w7uNcg1;xsc?rmKMyñ!k@Qbq9rnaO0b23S4P9UmMz¿w!3!Gn",
"ñQBJÑs6YI2&FZX5mAwm7W¿BqmaInzVwk;G*B7w6D;rvtpIrbjp//NKmWhJidzmruNzymCy4ÇadBwmuyochfVcopVVCooo40Yk!VÇ",
"fW!wjj@&Mh8C^oZD$XZDhm$!Zw^R&ngDNwh%dI3uiFHJP9mVGgqz&fRJ*mfNnosX/dI;yJWj$BKñcilu;MI?YNBXm$V&pBz?dRF7",
"uB3FNZU4rJHphmJkIi!dC;SÇ*ñF8&RPswpaysXbN¿2xpasB?BkiLIpGgFnjPGDVZ3319/6nbjOt3;UF4zfk!fnpvttFLcJaxCl^w",
"PjLG9ugKX¿wGM!F!zZVp1&mas4%*sMGddoBL1i^iPBY¿yYzfm8$*H$^/iKVt$Ç5P4PcCnd8wWKlX%3i8q3!nxYJICDi0rlaDt0qn",
"KwVZtcy!7yuI/OBWd*?X9j66Uc0VX2YoCñÑwUL18qpcD8ñSb!IJz^kFG4JNuw?¿%l^whaMNI1JqK/z?UQ$O8$5If1^IRD6q&nj8I",
"idÑ2;NU$6ÇItAzSHbwQsvV6$o0dioR!&?uwPOIah102AYif¿cHOwOt7;8;7pXa^%NLXjC9ofQ7XOV&tÇI$Çcx*WA$;%OñpKdScx6fmk",
"oP;SzbqDX!aZg?G9dUAwN@95Ls?¿7K?iS2SDLtrAO*R¿uQ8;VMñ6YdNuN3KCBnIÇzO6I;NstA1$bVqHY¿!jx@46Mzg0p¿nz08iXJ",
"u6kosZROv0$;6ARbLZt780nUo^VÇ2u?4zXq7UtÇ8Dddc¿p;2WLV!B0hpxñ%qFWuWs5;llh*%DsfJUo!wPbV78!g!/0Ñl1/IwMqqM",
"3GVqD;ZQ0UF9ls&@H%bOUKumZÑMX1K4!qVDÇQs$Pkqt!vLmD9DñXQ*5*?ni90&C$ml$BLkhBKq0HA3CaLQGi1FBF2QuIdrÑXhMHV",
"4/ñlhQyXazUG0yÇQmntgMcas^mjbÇz^9C0ij98J5HN40fÇvsM5p4%dVl;SsudP0xUCGJñ^jW6ñUqIwx$mtdÑÇñziAj*;vIGoYÇZb",
"J¿sñy¿VYQ%uB5pQÑM4Ç4cGgAf6%rGÑqñrg8JñPAC3HaNV;kBlOo¿twzBaQ3&RPaÇZ$/zym^LL9fvGJÇfXO6v2QDwÇuYxñPw;Vwvc",
"sd^ñ39rpÇ&08axDCyWz¿4d6AÇN6s01Kx2y%s0ks0NiG8O&dÑq/6¿7%d9;7ÇcgR*sFtV55NwxjtL2*PV3jshHiX5¿zQP/Y7K7m!2¿",
"rtñ4KlÑgyÇ2tAnar4*7xb*s;yXOuBa2WcO4@!pFZCqYGKkq65Zom%;vPYClO5cHxOFV/n0K/Pk9u3oOñ4k;GQzM¿5?wvOt@s&XDF",
"jHhNcO$PN9b1pajRSUÑx;*YySrpn93&5HC&S/4i@Çg1bR@arM*ñmaV0nrxdJO4watvG7Psf?9qf7XgKp@ÑÑm4nOKGIN*D^JkkFnt",
"7SBSd$D?9xO^hlznCX!sbZDs%@PfrfgwIkd6&6gyNÑ3M^gvZ^rFh5;xY?jx@w&MZ¿SBCIJj22hO/27wV4GX4ñKlñcwHCu;ss9rDn",
"¿&R5xuxCoÇm^0qKkF9g4y41¿ñm6WFk9lhVZÑ&IuLb?FdzxmB;ogoñPfFkcJaf@LGSf7!jSKB?l;nwDakCQ?!pMn$JkAgzpIIIMCn",
"¿wqA%YK37192p;u!*F;o!h¿MOgU;fHCAQmF2y2MK4Md5wXGB9gb5MvmisvqiSj%&?h?^%HZx7QKziVnwuÇyS@v9UBncuhYHY*CjA",
"ztR1CJH@ñp%$Zoma*R$VPjxNPRSnFdZZkiw&RY7klIKZoI^6Ñ0W7OZPcgd?cU2s%y1?5xNI36sI2GQQ8odwp;?mR@96Ñ%hXÑmS2J",
"3CIqOPS3A7XBZb&LrasYMo0h4M?3ZQfdyjf3F8Ç?frhN@/nGP3¿?!87RS*J$6bMzK$kPkKR?d99g3QñB4XQmr%u*UYxÇy7%vBpxo",
"ks&OfYxOoVru$Nts&h1fÇo9P/FKFCvÇ5Vp^ihdK!FUqnV9NXF$Ç*ñsaBiUK$69S&@2l25IWWdmvRÑawsOhMDNYhV/;z^lG3d$Onm",
"Sñ7DC!M$9DB68hñu?uu9GÑZLPmñyrLKDZ71ZFr¿!Ñ0dhxrC1YzCisÇN1FJq6IZrxr8k;kXSC4kgkLlLpv8ox0&InKÇtNcQUgv%nD",
"ARJotyYa^OuB39Fi%?qBX4GpNPD2XJSR^ñÇj@xa?d&HnZ!g1m4M6rbcñsXMmt4F*!vlrjCUr^6caGDOIlÑpVHdhpd¿66vtzwU/rD",
"NxOgñxfñnñCnjsdwx22v@$NLdAcUq$1UWjkÇoX?I&H?4Dh$3kñ8;ñHIg$fwD;ÇrNsIHuYi&yLHvQ04&9kP^6J$qHlIF1kVf5fbI$",
"HIkJAG/VK*xlM;%p9LxcoKtnMsJ6*L8WBÑ7Ç¿nZ%CNVJw9GZaÑjñ7QKiarrtWlXZ2%dD0ÇzJrXf5ñBV6jd!¿ipIBwM&QhWP1OÇxL"
]
caracteres = [" ","q","w","e","r","t","y","u","i","o","p","a","s","d","f","g","h","j","k","l","ñ","z","x","c","v","b","n","m"]
"""
es = "vx66cLsxoñU3ñp1;MLf*ÑlkBt3&dmqmFYY4iopCÑwÇs?9Py¿V9KKN00cRPF?jgR;ydwj2QÑ0w*!0kKlz2A$aO8aÑVoYsJU7QiI2q"
q = "Qm50QdjO@V7&9/QZCbrVM%VijS9%3;!xY3%b9Ñ*Çpg1Z1L^JbkSgs28@LMq¿ZnXmVR2uvyh5y?GU;M@RIWl33Z0zhbsXb%&WÑLS/"
w = "VGUsaVi!lnnoQY6@L!AP/2&%/Mb&zsx8?26w5sXhD*$rD*VñCa7ZWB!DI2B¿7vArZ@FRtHWJMx4m2ñ¿^?f3bGzCxPPlmbCofR;cY"
e = "UfGzaS3b7s¿/ñnLUu*ur4O?n¿KJXÇ6¿sSbhYUsQu78d&PY2A&Ck9w7uNcg1;xsc?rmKMyñ!k@Qbq9rnaO0b23S4P9UmMz¿w!3!Gn"
r = "ñQBJÑs6YI2&FZX5mAwm7W¿BqmaInzVwk;G*B7w6D;rvtpIrbjp//NKmWhJidzmruNzymCy4ÇadBwmuyochfVcopVVCooo40Yk!VÇ"
t = "fW!wjj@&Mh8C^oZD$XZDhm$!Zw^R&ngDNwh%dI3uiFHJP9mVGgqz&fRJ*mfNnosX/dI;yJWj$BKñcilu;MI?YNBXm$V&pBz?dRF7"
y = "uB3FNZU4rJHphmJkIi!dC;SÇ*ñF8&RPswpaysXbN¿2xpasB?BkiLIpGgFnjPGDVZ3319/6nbjOt3;UF4zfk!fnpvttFLcJaxCl^w"
u = "PjLG9ugKX¿wGM!F!zZVp1&mas4%*sMGddoBL1i^iPBY¿yYzfm8$*H$^/iKVt$Ç5P4PcCnd8wWKlX%3i8q3!nxYJICDi0rlaDt0qn"
i = "YWRfPP$iPgÑasv1ñ?HV@$A3j!5RHG@/vzz7iuÑFbopX;HP41sJHflGXohuLlxiAFu/uiñUQ?8;$RupCfxRHBD@R2Dd1vAv1WJ?YQ"
o = "S?h6G^GwV/pcg3NsknMk8zñgÑ$Kgx¿swuHHVFGGYW&HvglK08@N¿Bpb*uLuk%ym^QWxA@qfggjiA3;OVK8xlv¿nvbStq*S^8FYyS"
p = "vMYdk5/SCysñO9d;0@sfsIQhBco$vhñ5vqPÑ6bWJt2@9N71szHkfrZD3aMRGi/8dkHBMgW53wQWq5JGDwx96HKm0s7K3XyH2/N&I"
a = "Gg15ANhM6NlB$htu7ii1%kFQMPjfIfl@rMs52Lp^lñoO@Ñ%JG$RlMxwQÑAwFvP3!olbrWqNl/ÇzÑjXS/7By¿/Ojx364UYUDoML2?"
s = "&;tA*xyJbX@k6ZK¿vY3tvIñ1Ipgq8FgQSaVYfÇa?nvWabLFVi*lQñ&I1CudD2ZM$RiSiUF^H1Z3G556mCz$4$qul¿QDk;YzMq4y9"
d = "2yDMLyk@t*UsqJFyoLñ2¿J!JAW@sVM;uZG4OZNmXZJONl8JB0/o!3?rrÑUMBGvÑQyuply¿F*ÑtcVBIX?GZñjsl7YfWrAxm1UfNQg"
f = "nVhhgypBx6ybuCs23Og*;bnbs/7lV1!n$Suw5JR554uVWHsv?yuMvb87Gjñ%xFg40HDLlmlñqqj1IÑj3mqB0DñAmahIzHO3%iÑh*"
g = "RyI^q32aXÑVXCMXG@z3;2^moZW6RXyP¿!RgÑfS&;X^yzn%XAdPQDh;zcR/%3QffMXÇv&mxN*7*pZLnjCÑr^GC0?1x$NC7^n*500M"
h = "*%AowQrNtmVxRtw!@kr@sa*;WlHÇ%yaYjAw1BZN2I5ñzt!100HR0sj^RWabVK3bHgMj4hS@J5!H;OQRBvwvsYzxcRbup$zRHoLXW"
j = "ñV*2MglJA6uj&UyoOz3Wtn@RQJV8cZPY5lQy5/89K2ñAd6lnK2wXU^sopxJJiI9CAZXlZP*^3MVvW%Vgafñjo4H7ujPMBIa;0*Uq"
k = "jAXy5OX$gSmdgv/c@/^YOrWqQjCc¿jhbvk*UZ9R?85Z3vu120xM^m2tuX/w67a@r&@^Y%J8WL2AxFro5oA2SWlÇm@7FmUj25V;ng"
l = "0cYHnVO*8jP!yd;rvÇAoSQ9V?Bd0?qñ@l7ñd!4R6JfhRnaBCZDBmj&ñ6k$VblFfrs^ñRñVpWzÑIJn$FbzX6¿*uPns75HSQkGUHqG"
ñ = "W2D%3y^1%bY6VMq?2CzZ/ugVtht^IP8Pñ@^&¿o;v0NBZFV3W5ñ7hZ&w1MS^ñmv?1/VIOCat&C%5pA3$&l7^FW/x9a8^8fLÑKrSrG"
z = "4GÑJmKn&UR9nJm!4Çu*RRc15jÑjRhñÑAYo^fYFn!UIqwxdSKcñ2@D2f%^W@svkqW@NGODñ¿DO8mN5yV*U4pobIMwXH8M44AVZ!PF"
xi = "UoLÑZ//M!ly6d4gfy3/A5@XBPa!8ñxaZlg/PDAvt?ñIQGMW^^jgdI4r;JNp3DXwcvGlnHFiqdjCN&lbXFFxlpWfiñ¿%0C1?23Fm^"
c = "rÑhp¿rHd5C?nOKoOCoPjd!B!9?YPa3ZtJÑlÑcÑ9!SdjñYAN&jaLF3cZN^ñqAxzmIJ@¿q6k0vÑGqiS2yD*8I!Yl@9*GC4yR3t1cci"
v = "@s?pchR9CSBocqxp3ZLIC;usubRNCg/Ç?F64uWYcKh;4sÑ8ruSWWl!r9VO3lNHGrro?;VñkQL!nlg4r2Cag73%XtZÑ;2%D¿Wc¿c@"
b = "qNhPWA!¿9kxmgFY!yN¿dgXñN¿tM$3S2I;hyUm&kqMa9ArX5Cx4ÇWRyj3¿ñ1xBoñI@kW¿d$C2hW^uUNwhZNlWqsÇa8Km6MlQ@A¿h4"
n = "0¿t2nkñaN&mU5F/Zl$1Çy6now2jH6ya?s*%^*a5cUAQb6pJo2rR^iBrIWÑSXOy5!cNA6f^Zjabk¿;J5@i2Fkx??jF0GNy@kIddN%"
m = "ytis65X4?ÑAht$F6?ZLW0ñÇaq36&WQxfLo8r2D$i^^ÇG$ChlfdDCNÇW5achZbtqyg/ltÇ¿5P4FV9tGK1Ñ7ñfSPYC*J90?c^2QUhL"
"""
def iniciar(directorio):
fitxategia = input("fitxategiaren izena jarri:")
kodi_deskodi = input("kodifikatu edo deskodificatu ko/des:")
os.chdir(directorio);
if kodi_deskodi == "ko":
fichategia = open(fitxategia,'rt')
irakurri = fichategia.read()
lotura = kodifikatu(irakurri)
fichategia.close()
idatzi(lotura,directorio)
elif kodi_deskodi == "des":
fichategia = open (fitxategia,'rt')
irakurri = fichategia.read()
lotura = deskodifikatu(irakurri)
print(lotura)
fichategia.close()
idatzi(lotura,directorio)
#kodificatu
def kodifikatu(irakurri):
luzehera = len(irakurri)
luzehera2 = len(caracteres)
lotura = ""
for x in range(luzehera):
letrak = irakurri[x]
for t in range(luzehera2):
if caracteres[t] == letrak:
lotura += codigo[t]
return lotura
#descodificatu
def deskodifikatu(irakurri):
luzehera = len(irakurri)
luzehera2 = len(codigo)
le = ""
lotura = ""
for x in range(luzehera):
le += irakurri[x]
for t in range(luzehera2):
if le == codigo[t]:
lotura += caracteres[t]
le = ""
return lotura
def idatzi(lotura,directorio):
os.chdir(directorio)
fichategia = open ('a.txt','wt')
fichategia.write(lotura)
fichategia.close()
#programa
directorio = input("aukeratu directorio bat fitsategiak kodifikatzeko edo deskodifikatzeko:")
iniciar(directorio)
| [
"garrantsitsua@gmail.com"
] | garrantsitsua@gmail.com |
cc30e3e61bd5839e2a0450e37255e918cb0b15cc | 814e4ad96172a76d9b72ac35697553980d0db5f1 | /pyalp/gs_interface/generate_certificates.py | e7aa246d4bb2851366daaf5f91a5fe555ce9c5c2 | [
"MIT"
] | permissive | Mause/pyalp | 29785037d3b4ebc2822e6ec74621aa65f621bb8e | fb0f723070e11f8c9ed57e2475eb963599f442a6 | refs/heads/master | 2022-12-05T12:05:01.804305 | 2014-07-02T13:04:21 | 2014-07-02T13:04:21 | 15,419,434 | 0 | 0 | MIT | 2022-11-22T00:24:05 | 2013-12-24T14:00:26 | PHP | UTF-8 | Python | false | false | 692 | py | #!/usr/bin/env python
"""
Generate client and server CURVE certificate files then move them into the
appropriate store directory, private_keys or public_keys. The certificates
generated by this script are used by the stonehouse and ironhouse examples.
In practice this would be done by hand or some out-of-band process.
Author: Chris Laws
"""
import zmq.auth
from __init__ import KEYS_DIR
def generate_certificates():
''' Generate client and server CURVE certificate files'''
# create new keys in certificates dir
zmq.auth.create_certificates(KEYS_DIR, "server")
zmq.auth.create_certificates(KEYS_DIR, "client")
if __name__ == '__main__':
generate_certificates()
| [
"jack.thatch@gmail.com"
] | jack.thatch@gmail.com |
3fd7828668916336350ebd4793108ab031472436 | f9d6fce3a46297321a9429ef75834e5d48da36dc | /test/test.py | a8adb64af2593f46ae209320e7223e1eb2fd75e8 | [] | no_license | rafiei91/Assignment---Scientific-Computing-using-Python-1 | 38409a60befa72b91182d5428725d6fad04e295a | fc2c99ff3bf0b75f4ce931a0b1e5e6eb6d436ca5 | refs/heads/main | 2023-06-29T13:55:55.488956 | 2021-07-31T10:56:31 | 2021-07-31T10:56:31 | 388,409,328 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,103 | py | """
In this file you may have your tests
"""
import sys
sys.path.append('../')
import unittest
from scipy.integrate import solve_ivp
import numpy as np
import lorenz
import case
def ode_lorenz_attractor_t(t, X):
x = X[0]
y = X[1]
z = X[2]
delta = 10
beta = 8 / 3
rho = 6
dxdt = delta * (y - x)
dydt = x * (rho - z) - y
dzdt = x * y - beta * z
return [dxdt, dydt, dzdt]
class TestMethod(unittest.TestCase):
def test_solver(self):
c0 = [4, 4, 5]
dt = 0.01
N = 50001
T = N * dt
sol = solve_ivp(ode_lorenz_attractor_t, y0=c0, t_span=[0, T], t_eval=np.arange(0, T, dt))
x1 = sol.y[0]
y1 = sol.y[1]
z1 = sol.y[2]
f = case.case1.ode_lorenz_attractor
u, t = lorenz.solver.ode_solver(f, c0)
x2 = u[:, 0]
y2 = u[:, 1]
z2 = u[:, 2]
assert np.allclose(x1, x2, rtol=1e1, atol=1e1)
assert np.allclose(y1, y2, rtol=1e1, atol=1e1)
assert np.allclose(z1, z2, rtol=1e1, atol=1e1)
| [
"noreply@github.com"
] | rafiei91.noreply@github.com |
d5d3139ac75d3c0e6996ff5e91d37517035be29c | 9c60059131f9e084954c43c696472be2df640362 | /generators2.py | e064391a2ed142f8b06f8dd071cb0ed5b48d4e5c | [] | no_license | mgud1989/lear-Py | 4bf9c07fcf76b23355a2273fcf35ca882c25cc0b | 7487dfbd210684089cc9ab702527018ac7724ff2 | refs/heads/master | 2020-12-24T02:30:31.035338 | 2020-03-14T21:34:10 | 2020-03-14T21:34:10 | 237,351,869 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | #basicamente es para acceder o manejar array anidados
def devuelveCiudades (*ciudades):
for elemento in ciudades:
#for subElemento in elemento:
yield from elemento
ciudadesDevueltas = devuelveCiudades("Caracas", "Madrid", "Buenos Aires")
print(next(ciudadesDevueltas))
print(next(ciudadesDevueltas)) | [
"m.gud1989@gmail.com"
] | m.gud1989@gmail.com |
28cba7aaaf5149aec561511644e027600f0e5a15 | c47a7256a6371aae98399fc1547fcc404f593549 | /jalvenv/bin/django-admin | 6ff3a8e7aefb5186b21dbb19a89712434cc6810b | [] | no_license | captainjex/my-first-blog | 7a81c90a11aa6df2b8395201a787178d51240f15 | 3cb5f5aca7ee2f87435c78664940345dc6acccc9 | refs/heads/master | 2021-06-05T22:16:36.498214 | 2016-10-26T06:23:27 | 2016-10-26T06:23:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | #!/home/laborank/wkwksama/djangogirls/jalvenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
| [
"mailpakgasing@gmail.com"
] | mailpakgasing@gmail.com | |
0f5b6933686f9dfea4edde042b1f68f228d414fd | 7d40e1de5f12521bdab2ed19d37fe3798f7695ed | /scripts/functions.py | 29939a2e4a82b5f81f086797c99a78edb46b9341 | [] | no_license | bridgecrew-perf4/AWS-ECS-Terraform-Jenkins-JavaApp-Deploy | f4db2a77fc957652278881ea062987311bc38534 | 9fda4d912fd1431414669852023cde5199138418 | refs/heads/master | 2023-03-16T18:00:04.842788 | 2020-12-16T09:33:29 | 2020-12-16T09:33:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,065 | py | import urllib.request
import urllib.parse
def availability_check(host, port):
try:
if urllib.request.urlopen(host + ":" + port, timeout=10).getcode() == 200:
print('!OK --- Connection established')
except Exception as e:
print("An Error occurred, probably can't connect to host, details below")
exit(e)
def response_check(host, port):
req_list = [
[host + ':' + port + '/test', 'I\'m working!'],
[host + ':' + port, "Please use /sort or /sortStudent"]
]
request_sort_list = [
[("table", "5,2,3,1,4"), ("method", "bubble")],
[("table", "Marek,zenon,Adam,dawid"), ("method", "bubble")],
[("table", "5,2,3,1,4"), ("method", "heap")],
[("table", "Marek,zenon,Adam,dawid"), ("method", "heap")],
[("table", "5,2,3,1,4"), ("method", "insertion")],
[("table", "Marek,zenon,Adam,dawid"), ("method", "insertion")],
[("table", "5,2,3,1,4"), ("method", "merge")],
[("table", "Marek,zenon,Adam,dawid"), ("method", "merge")],
[("table", "5,2,3,1,4"), ("method", "quick")],
[("table", "Marek,zenon,Adam,dawid"), ("method", "quick")],
[("table", "5,2,3,1,4"), ("method", "selection")],
[("table", "Marek,zenon,Adam,dawid"), ("method", "selection")],
]
response_expected = ['[1,2,3,4,5]', '["Adam","Marek","dawid","zenon"]']
for x in req_list:
if urllib.request.urlopen(x[0]).read().decode("utf-8") != x[1]:
exit("Data don't match for " + x[0])
else:
print('!OK ---' + x[0])
main_url = host + ':' + port + '/sort?'
for x in request_sort_list:
sort_request = urllib.parse.urlencode(x)
response_got = urllib.request.urlopen(main_url + sort_request).read().decode("utf-8")
if response_got == response_expected[0] or response_got == response_expected[1]:
print('!OK '+ str(x[1].__getitem__(1)))
else:
exit('Data don\'t match')
print(response_got)
print(response_expected)
| [
"d.klocek@wp.pl"
] | d.klocek@wp.pl |
08ab2bdd3f7dd32a02472f4851a4d4ca0bc7851c | d2b1b6e5f1c1ab14dda148183a6cb5ce73368731 | /Raspberry/django/gestorArduino/carcel/urls.py | 280989259f5c678c470f170af315d2a252dd2563 | [] | no_license | jaabal/projecte_ASI | 70955983858e3a3f16c52d5414e8c1ae9c53cc3a | 6298a8576b5c2fe20a562ce5396ee9e092335d5f | refs/heads/master | 2020-03-09T01:27:37.523213 | 2018-05-25T17:06:37 | 2018-05-25T17:06:37 | 128,515,000 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | import views
from django.conf.urls import url
urlpatterns = [ url(r'^action/', views.action), url(r'^proba/', views.proba) ]
| [
"erjabal@gmail.com"
] | erjabal@gmail.com |
a98a8495541ccde04ffcafcc1ec91cf60765e561 | b0056c7b2178dde4c646c4caed433b29e2bae3e9 | /BankAccountClass.py | b6db43db3c3cf394dfd87072615a05c552211b50 | [] | no_license | victoriadonohoo/ClassesAndOOP | 1170bdbf1c0977b10732eda203427649d1f91649 | fd299348b692c57142cc9490c5d1f9c08e97b5ed | refs/heads/master | 2023-03-07T10:22:14.537477 | 2021-02-25T03:43:48 | 2021-02-25T03:43:48 | 337,841,558 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 935 | py | # The BankAccount class simulates a bank account.
class BankAccount:
# The __init__ method accepts an argument for
# the account's balance. It is assigned to
# the __balance attribute.
def __init__(self, bal):
self.__balance = bal
# The deposit method makes a deposit into the
# account.
def deposit(self, amount):
self.__balance += amount
# The withdraw method withdraws an amount
# from the account.
def withdraw(self, amount):
if amount < 0:
amount *= -1
if self.__balance >= amount:
self.__balance -= amount
else:
print('Error: Insufficient funds')
# The get_balance method returns the
# account balance.
def get_balance(self):
return self.__balance
def __str__(self):
return 'The balance is $' + format(self.__balance, ',.2f')
| [
"victoria_donohoo1@baylor.edu"
] | victoria_donohoo1@baylor.edu |
153ef5840765a6a35807780ba3aad81045b0e0b6 | e3825619d276b154588653477c45c11663339296 | /Youtube Trending Music Lyrics Scraper/support.py | 26903b7b8aa30c44fd418f1ad17d40aaa6982965 | [
"MIT"
] | permissive | Skygear55/youtube-trending-music-lyrics-scraper | f9be003f6064c686704e505072a994325ece0619 | f404ecefb3580278d880fc46365a4bacd4e41ba9 | refs/heads/master | 2023-01-19T07:58:31.572343 | 2020-11-21T14:10:47 | 2020-11-21T14:10:47 | 314,818,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,088 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Nov 11 22:36:26 2020
@author: k.kirilov
"""
from bs4 import BeautifulSoup
import requests
""" Function truncates and formats the titles so that they can be directly input to search for genius url"""
def title_truncator(title): #argument must be a string
permissible_letters = " '-abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
for i in range(len(title)-2):
if title[i] not in permissible_letters:
title = title.replace(title[i:len(title)], "")
break
title = title.replace('-', ' ')
title = title.replace(' ', ' ')
title = title.replace(' x ', ' ')
title = title.replace(' ', '-')
title = title.replace("'", "")
if title[-1] != "-":
title = title + "-"
return title
def scrape_song_lyrics(url):
page = requests.get(url)
html = BeautifulSoup(page.text, 'html.parser')
lyrics = html.find('div', class_='lyrics').get_text()
return lyrics
| [
"noreply@github.com"
] | Skygear55.noreply@github.com |
1434600aa23894fe97502d7f97ad8e909d58c0ec | 9249947c07f8addf64dd3d2a2f9f37d379f83921 | /libs/gluon/contrib/aes.py | cecf2d9073cb71ee75006f771492d0cb4b5943e6 | [
"MIT"
] | permissive | operepo/ope | eb71aa763d157416009d7c3052ace11852660e0a | 018c82af46845315795c67c36801e2a128f515d5 | refs/heads/master | 2023-08-08T15:05:28.592589 | 2023-07-25T00:22:24 | 2023-07-25T00:22:24 | 96,855,111 | 12 | 11 | MIT | 2023-03-03T15:10:34 | 2017-07-11T05:42:14 | Perl | UTF-8 | Python | false | false | 16,671 | py | """Simple AES cipher implementation in pure Python following PEP-272 API
Homepage: https://bitbucket.org/intgr/pyaes/
The goal of this module is to be as fast as reasonable in Python while still
being Pythonic and readable/understandable. It is licensed under the permissive
MIT license.
Hopefully the code is readable and commented enough that it can serve as an
introduction to the AES cipher for Python coders. In fact, it should go along
well with the Stick Figure Guide to AES:
http://www.moserware.com/2009/09/stick-figure-guide-to-advanced.html
Contrary to intuition, this implementation numbers the 4x4 matrices from top to
bottom for efficiency reasons::
0 4 8 12
1 5 9 13
2 6 10 14
3 7 11 15
Effectively it's the transposition of what you'd expect. This actually makes
the code simpler -- except the ShiftRows step, but hopefully the explanation
there clears it up.
"""
####
# Copyright (c) 2010 Marti Raudsepp <marti@juffo.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
####
from array import array
# Globals mandated by PEP 272:
# http://www.python.org/dev/peps/pep-0272/
MODE_ECB = 1
MODE_CBC = 2
#MODE_CTR = 6
block_size = 16
key_size = None
def new(key, mode=MODE_CBC, IV=None):
if mode == MODE_ECB:
return ECBMode(AES(key))
elif mode == MODE_CBC:
if IV is None:
raise ValueError("CBC mode needs an IV value!")
return CBCMode(AES(key), IV)
else:
raise NotImplementedError
#### AES cipher implementation
class AES(object):
block_size = 16
def __init__(self, key):
self.setkey(key)
def setkey(self, key):
"""Sets the key and performs key expansion."""
self.key = key
self.key_size = len(key)
if self.key_size == 16:
self.rounds = 10
elif self.key_size == 24:
self.rounds = 12
elif self.key_size == 32:
self.rounds = 14
else:
raise ValueError("Key length must be 16, 24 or 32 bytes")
self.expand_key()
def expand_key(self):
"""Performs AES key expansion on self.key and stores in self.exkey"""
# The key schedule specifies how parts of the key are fed into the
# cipher's round functions. "Key expansion" means performing this
# schedule in advance. Almost all implementations do this.
#
# Here's a description of AES key schedule:
# http://en.wikipedia.org/wiki/Rijndael_key_schedule
# The expanded key starts with the actual key itself
exkey = array('B', self.key)
# extra key expansion steps
if self.key_size == 16:
extra_cnt = 0
elif self.key_size == 24:
extra_cnt = 2
else:
extra_cnt = 3
# 4-byte temporary variable for key expansion
word = exkey[-4:]
# Each expansion cycle uses 'i' once for Rcon table lookup
for i in xrange(1, 11):
#### key schedule core:
# left-rotate by 1 byte
word = word[1:4] + word[0:1]
# apply S-box to all bytes
for j in xrange(4):
word[j] = aes_sbox[word[j]]
# apply the Rcon table to the leftmost byte
word[0] = word[0] ^ aes_Rcon[i]
#### end key schedule core
for z in xrange(4):
for j in xrange(4):
# mix in bytes from the last subkey
word[j] ^= exkey[-self.key_size + j]
exkey.extend(word)
# Last key expansion cycle always finishes here
if len(exkey) >= (self.rounds+1) * self.block_size:
break
# Special substitution step for 256-bit key
if self.key_size == 32:
for j in xrange(4):
# mix in bytes from the last subkey XORed with S-box of
# current word bytes
word[j] = aes_sbox[word[j]] ^ exkey[-self.key_size + j]
exkey.extend(word)
# Twice for 192-bit key, thrice for 256-bit key
for z in xrange(extra_cnt):
for j in xrange(4):
# mix in bytes from the last subkey
word[j] ^= exkey[-self.key_size + j]
exkey.extend(word)
self.exkey = exkey
def add_round_key(self, block, round):
"""AddRoundKey step in AES. This is where the key is mixed into plaintext"""
offset = round * 16
exkey = self.exkey
for i in xrange(16):
block[i] ^= exkey[offset + i]
#print 'AddRoundKey:', block
def sub_bytes(self, block, sbox):
"""SubBytes step, apply S-box to all bytes
Depending on whether encrypting or decrypting, a different sbox array
is passed in.
"""
for i in xrange(16):
block[i] = sbox[block[i]]
#print 'SubBytes :', block
def shift_rows(self, b):
"""ShiftRows step. Shifts 2nd row to left by 1, 3rd row by 2, 4th row by 3
Since we're performing this on a transposed matrix, cells are numbered
from top to bottom::
0 4 8 12 -> 0 4 8 12 -- 1st row doesn't change
1 5 9 13 -> 5 9 13 1 -- row shifted to left by 1 (wraps around)
2 6 10 14 -> 10 14 2 6 -- shifted by 2
3 7 11 15 -> 15 3 7 11 -- shifted by 3
"""
b[1], b[5], b[ 9], b[13] = b[ 5], b[ 9], b[13], b[ 1]
b[2], b[6], b[10], b[14] = b[10], b[14], b[ 2], b[ 6]
b[3], b[7], b[11], b[15] = b[15], b[ 3], b[ 7], b[11]
#print 'ShiftRows :', b
def shift_rows_inv(self, b):
"""Similar to shift_rows above, but performed in inverse for decryption."""
b[ 5], b[ 9], b[13], b[ 1] = b[1], b[5], b[ 9], b[13]
b[10], b[14], b[ 2], b[ 6] = b[2], b[6], b[10], b[14]
b[15], b[ 3], b[ 7], b[11] = b[3], b[7], b[11], b[15]
#print 'ShiftRows :', b
def mix_columns(self, block):
"""MixColumns step. Mixes the values in each column"""
# Cache global multiplication tables (see below)
mul_by_2 = gf_mul_by_2
mul_by_3 = gf_mul_by_3
# Since we're dealing with a transposed matrix, columns are already
# sequential
for i in xrange(4):
col = i * 4
#v0, v1, v2, v3 = block[col : col+4]
v0, v1, v2, v3 = (block[col], block[col + 1], block[col + 2],
block[col + 3])
block[col ] = mul_by_2[v0] ^ v3 ^ v2 ^ mul_by_3[v1]
block[col+1] = mul_by_2[v1] ^ v0 ^ v3 ^ mul_by_3[v2]
block[col+2] = mul_by_2[v2] ^ v1 ^ v0 ^ mul_by_3[v3]
block[col+3] = mul_by_2[v3] ^ v2 ^ v1 ^ mul_by_3[v0]
#print 'MixColumns :', block
def mix_columns_inv(self, block):
"""Similar to mix_columns above, but performed in inverse for decryption."""
# Cache global multiplication tables (see below)
mul_9 = gf_mul_by_9
mul_11 = gf_mul_by_11
mul_13 = gf_mul_by_13
mul_14 = gf_mul_by_14
# Since we're dealing with a transposed matrix, columns are already
# sequential
for i in xrange(4):
col = i * 4
v0, v1, v2, v3 = (block[col], block[col + 1], block[col + 2],
block[col + 3])
#v0, v1, v2, v3 = block[col:col+4]
block[col ] = mul_14[v0] ^ mul_9[v3] ^ mul_13[v2] ^ mul_11[v1]
block[col+1] = mul_14[v1] ^ mul_9[v0] ^ mul_13[v3] ^ mul_11[v2]
block[col+2] = mul_14[v2] ^ mul_9[v1] ^ mul_13[v0] ^ mul_11[v3]
block[col+3] = mul_14[v3] ^ mul_9[v2] ^ mul_13[v1] ^ mul_11[v0]
#print 'MixColumns :', block
def encrypt_block(self, block):
"""Encrypts a single block. This is the main AES function"""
# For efficiency reasons, the state between steps is transmitted via a
# mutable array, not returned.
self.add_round_key(block, 0)
for round in xrange(1, self.rounds):
self.sub_bytes(block, aes_sbox)
self.shift_rows(block)
self.mix_columns(block)
self.add_round_key(block, round)
self.sub_bytes(block, aes_sbox)
self.shift_rows(block)
# no mix_columns step in the last round
self.add_round_key(block, self.rounds)
def decrypt_block(self, block):
"""Decrypts a single block. This is the main AES decryption function"""
# For efficiency reasons, the state between steps is transmitted via a
# mutable array, not returned.
self.add_round_key(block, self.rounds)
# count rounds down from 15 ... 1
for round in xrange(self.rounds-1, 0, -1):
self.shift_rows_inv(block)
self.sub_bytes(block, aes_inv_sbox)
self.add_round_key(block, round)
self.mix_columns_inv(block)
self.shift_rows_inv(block)
self.sub_bytes(block, aes_inv_sbox)
self.add_round_key(block, 0)
# no mix_columns step in the last round
#### ECB mode implementation
class ECBMode(object):
"""Electronic CodeBook (ECB) mode encryption.
Basically this mode applies the cipher function to each block individually;
no feedback is done. NB! This is insecure for almost all purposes
"""
def __init__(self, cipher):
self.cipher = cipher
self.block_size = cipher.block_size
def ecb(self, data, block_func):
"""Perform ECB mode with the given function"""
if len(data) % self.block_size != 0:
raise ValueError("Plaintext length must be multiple of 16")
block_size = self.block_size
data = array('B', data)
for offset in xrange(0, len(data), block_size):
block = data[offset : offset+block_size]
block_func(block)
data[offset : offset+block_size] = block
return data.tostring()
def encrypt(self, data):
"""Encrypt data in ECB mode"""
return self.ecb(data, self.cipher.encrypt_block)
def decrypt(self, data):
"""Decrypt data in ECB mode"""
return self.ecb(data, self.cipher.decrypt_block)
#### CBC mode
class CBCMode(object):
"""Cipher Block Chaining (CBC) mode encryption. This mode avoids content leaks.
In CBC encryption, each plaintext block is XORed with the ciphertext block
preceding it; decryption is simply the inverse.
"""
# A better explanation of CBC can be found here:
# http://en.wikipedia.org/wiki/Block_cipher_modes_of_operation#Cipher-block_chaining_.28CBC.29
def __init__(self, cipher, IV):
self.cipher = cipher
self.block_size = cipher.block_size
self.IV = array('B', IV)
def encrypt(self, data):
"""Encrypt data in CBC mode"""
block_size = self.block_size
if len(data) % block_size != 0:
raise ValueError("Plaintext length must be multiple of 16")
data = array('B', data)
IV = self.IV
for offset in xrange(0, len(data), block_size):
block = data[offset : offset+block_size]
# Perform CBC chaining
for i in xrange(block_size):
block[i] ^= IV[i]
self.cipher.encrypt_block(block)
data[offset : offset+block_size] = block
IV = block
self.IV = IV
return data.tostring()
def decrypt(self, data):
"""Decrypt data in CBC mode"""
block_size = self.block_size
if len(data) % block_size != 0:
raise ValueError("Ciphertext length must be multiple of 16")
data = array('B', data)
IV = self.IV
for offset in xrange(0, len(data), block_size):
ctext = data[offset : offset+block_size]
block = ctext[:]
self.cipher.decrypt_block(block)
# Perform CBC chaining
#for i in xrange(block_size):
# data[offset + i] ^= IV[i]
for i in xrange(block_size):
block[i] ^= IV[i]
data[offset : offset+block_size] = block
IV = ctext
#data[offset : offset+block_size] = block
self.IV = IV
return data.tostring()
####
def galois_multiply(a, b):
"""Galois Field multiplicaiton for AES"""
p = 0
while b:
if b & 1:
p ^= a
a <<= 1
if a & 0x100:
a ^= 0x1b
b >>= 1
return p & 0xff
# Precompute the multiplication tables for encryption
gf_mul_by_2 = array('B', [galois_multiply(x, 2) for x in range(256)])
gf_mul_by_3 = array('B', [galois_multiply(x, 3) for x in range(256)])
# ... for decryption
gf_mul_by_9 = array('B', [galois_multiply(x, 9) for x in range(256)])
gf_mul_by_11 = array('B', [galois_multiply(x, 11) for x in range(256)])
gf_mul_by_13 = array('B', [galois_multiply(x, 13) for x in range(256)])
gf_mul_by_14 = array('B', [galois_multiply(x, 14) for x in range(256)])
####
# The S-box is a 256-element array, that maps a single byte value to another
# byte value. Since it's designed to be reversible, each value occurs only once
# in the S-box
#
# More information: http://en.wikipedia.org/wiki/Rijndael_S-box
aes_sbox = array('B',
'637c777bf26b6fc53001672bfed7ab76'
'ca82c97dfa5947f0add4a2af9ca472c0'
'b7fd9326363ff7cc34a5e5f171d83115'
'04c723c31896059a071280e2eb27b275'
'09832c1a1b6e5aa0523bd6b329e32f84'
'53d100ed20fcb15b6acbbe394a4c58cf'
'd0efaafb434d338545f9027f503c9fa8'
'51a3408f929d38f5bcb6da2110fff3d2'
'cd0c13ec5f974417c4a77e3d645d1973'
'60814fdc222a908846eeb814de5e0bdb'
'e0323a0a4906245cc2d3ac629195e479'
'e7c8376d8dd54ea96c56f4ea657aae08'
'ba78252e1ca6b4c6e8dd741f4bbd8b8a'
'703eb5664803f60e613557b986c11d9e'
'e1f8981169d98e949b1e87e9ce5528df'
'8ca1890dbfe6426841992d0fb054bb16'.decode('hex')
)
# This is the inverse of the above. In other words:
# aes_inv_sbox[aes_sbox[val]] == val
aes_inv_sbox = array('B',
'52096ad53036a538bf40a39e81f3d7fb'
'7ce339829b2fff87348e4344c4dee9cb'
'547b9432a6c2233dee4c950b42fac34e'
'082ea16628d924b2765ba2496d8bd125'
'72f8f66486689816d4a45ccc5d65b692'
'6c704850fdedb9da5e154657a78d9d84'
'90d8ab008cbcd30af7e45805b8b34506'
'd02c1e8fca3f0f02c1afbd0301138a6b'
'3a9111414f67dcea97f2cfcef0b4e673'
'96ac7422e7ad3585e2f937e81c75df6e'
'47f11a711d29c5896fb7620eaa18be1b'
'fc563e4bc6d279209adbc0fe78cd5af4'
'1fdda8338807c731b11210592780ec5f'
'60517fa919b54a0d2de57a9f93c99cef'
'a0e03b4dae2af5b0c8ebbb3c83539961'
'172b047eba77d626e169146355210c7d'.decode('hex')
)
# The Rcon table is used in AES's key schedule (key expansion)
# It's a pre-computed table of exponentation of 2 in AES's finite field
#
# More information: http://en.wikipedia.org/wiki/Rijndael_key_schedule
aes_Rcon = array('B',
'8d01020408102040801b366cd8ab4d9a'
'2f5ebc63c697356ad4b37dfaefc59139'
'72e4d3bd61c29f254a943366cc831d3a'
'74e8cb8d01020408102040801b366cd8'
'ab4d9a2f5ebc63c697356ad4b37dfaef'
'c5913972e4d3bd61c29f254a943366cc'
'831d3a74e8cb8d01020408102040801b'
'366cd8ab4d9a2f5ebc63c697356ad4b3'
'7dfaefc5913972e4d3bd61c29f254a94'
'3366cc831d3a74e8cb8d010204081020'
'40801b366cd8ab4d9a2f5ebc63c69735'
'6ad4b37dfaefc5913972e4d3bd61c29f'
'254a943366cc831d3a74e8cb8d010204'
'08102040801b366cd8ab4d9a2f5ebc63'
'c697356ad4b37dfaefc5913972e4d3bd'
'61c29f254a943366cc831d3a74e8cb'.decode('hex')
)
| [
"ray@cmagic.biz"
] | ray@cmagic.biz |
aeb6c6f719bb4bd7980c1dcf68a4aa5fbf449e3f | 609992bf47f2f77ecd4dd7f92dc8309b4c69d91b | /authentification/templatetags/perso.py | ff520e4393c2a5a91e1d37d28bc2c66146018c93 | [] | no_license | Sokhna-cambell/projet_planning_EPT_DIC1_GIT | c9ae99723446e161210ab75bc514e69332e65065 | 3ac6dd29bf1d7d54d7c75abfb0eee5e96142792f | refs/heads/master | 2021-03-08T05:30:05.152066 | 2020-03-10T14:09:53 | 2020-03-10T14:09:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 951 | py | from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
def add(value, arg):
return value + int(arg)
@register.filter
def replace(value, arg):
return arg
class SetVarNode(template.Node):
def __init__(self, var_name, var_value):
self.var_name = var_name
self.var_value = var_value
def render(self, context):
try:
value = template.Variable(self.var_value).resolve(context)
except template.VariableDoesNotExist:
value = ""
context[self.var_name] = value
return u""
@register.tag(name='set')
def set_var(parser, token):
"""
{% set some_var = '123' %}
"""
parts = token.split_contents()
if len(parts) < 4:
raise template.TemplateSyntaxError("'set' tag must be of the form: {% set <var_name> = <var_value> %}")
return SetVarNode(parts[1], parts[3]) | [
"cambelldieng76@gmail.com"
] | cambelldieng76@gmail.com |
c68caa4bbb324ddfe067f6747e712a921df68b1c | acac8f186630c1de2fca6fa06ec5ed5d03c5785e | /mmtrack/core/track/similarity.py | 66a07300d4537b87de4c783e90f997cba5dfba2e | [
"Apache-2.0"
] | permissive | akiozihao/center_track | eb1b2afb7898537d26fa2f65b65d35c9df4c97ee | 3a06c91044e9f441963f6f602a6f223bad0eeaaa | refs/heads/master | 2023-07-03T10:56:17.233695 | 2021-08-04T02:25:49 | 2021-08-04T02:25:49 | 385,299,742 | 0 | 1 | Apache-2.0 | 2021-07-18T13:36:57 | 2021-07-12T15:45:14 | Python | UTF-8 | Python | false | false | 1,549 | py | import torch
import torch.nn.functional as F
def embed_similarity(key_embeds,
ref_embeds,
method='dot_product',
temperature=-1,
transpose=True):
"""Calculate feature similarity from embeddings.
Args:
key_embeds (Tensor): Shape (N1, C).
ref_embeds (Tensor): Shape (N2, C) or (C, N2).
method (str, optional): Method to calculate the similarity,
options are 'dot_product' and 'cosine'. Defaults to
'dot_product'.
temperature (int, optional): Softmax temperature. Defaults to -1.
transpose (bool, optional): Whether transpose `ref_embeds`.
Defaults to True.
Returns:
Tensor: Similarity matrix of shape (N1, N2).
"""
assert method in ['dot_product', 'cosine']
if key_embeds.size(0) == 0 or ref_embeds.size(0) == 0:
return torch.zeros((key_embeds.size(0), ref_embeds.size(0)),
device=key_embeds.device)
if method == 'cosine':
key_embeds = F.normalize(key_embeds, p=2, dim=1)
ref_embeds = F.normalize(ref_embeds, p=2, dim=1)
elif method == 'dot_product':
if temperature > 0:
sims = embed_similarity(
key_embeds, ref_embeds, method='cosine', transpose=transpose)
sims /= temperature
return sims
else:
raise NotImplementedError()
if transpose:
ref_embeds = ref_embeds.t()
return torch.mm(key_embeds, ref_embeds)
| [
"noreply@github.com"
] | akiozihao.noreply@github.com |
a81d1c8f8a794323a7e144230038a9cf75b5a284 | bbed50a6dac60e1a4d9a3173f116368c85ab5b61 | /contacts/factories.py | f2cfcb0e6ddbeb515b83031b1442c710a8cbee07 | [] | no_license | Elvirarp92/django-query-troubleshooting | 12b5523eace5b07edfb5a66a76484e2c4dae81b9 | 5370721e13ee811f353016834ef876e305318fc1 | refs/heads/main | 2023-05-14T07:43:16.037636 | 2021-06-07T10:33:23 | 2021-06-07T10:33:23 | 374,647,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | import factory
from faker import Faker
from .models import Contact, ContactSubtype
fake = Faker('es_ES')
class ContactFactory(factory.django.DjangoModelFactory):
name = fake.first_name_nonbinary()
surname_1 = fake.last_name()
surname_2 = fake.last_name()
class Meta:
model = Contact
class ContactSubtypeFactory(factory.django.DjangoModelFactory):
name = fake.unique.word()
class Meta:
model = ContactSubtype
| [
"elvirarp92@gmail.com"
] | elvirarp92@gmail.com |
ee63b12e238a4138f9963f331b11ffc93c1e0fa0 | 5979cf3c79daa04706c8fef1595574c6e36c14a1 | /vgl/home/urls.py | 0547341057b158de3626eb131277cda6b03c92a1 | [] | no_license | rahuezo/valley-green-landscape-inc | f675b5242ed7a80e457b236a253fb9ed0602829c | 6dac5ed2202336a69a86c6dcafee892cbadaa5b3 | refs/heads/master | 2021-08-15T16:48:50.594629 | 2017-11-18T00:08:49 | 2017-11-18T00:08:49 | 110,613,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | from django.conf.urls import url
from . import views
app_name = 'home'
urlpatterns = [
url(r'^$', views.index, name='index'),
]
| [
"rahuezo@ucdavis.edu"
] | rahuezo@ucdavis.edu |
ff3d49be3bf5b78cc08abc6a3ff566006d7ac3f2 | 8beaf464e741de2da91a077634c7fb4c598269a0 | /requests模块简单使用/05.发送带参数的请求1.py | 9ea4015f87f7501cdd9b13044c040dd9e703fb5f | [] | no_license | starrye/spider_learning | d1ee3f5a571e4c7ce26dc5dc990681ea4093a7d5 | b969e6b6ff92d15478000948c9f165ff2cee6d42 | refs/heads/master | 2020-04-23T01:40:28.238512 | 2019-02-18T05:58:19 | 2019-02-18T05:58:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | import requests
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36"}
url = 'https://www.baidu.com/s?wd=python'
response = requests.get(url,headers=headers)
print(response.text) | [
"hello_lwz163.com"
] | hello_lwz163.com |
16e2efa0e9f7f301ed6fff3874477844dcdea2ad | 32b15ce47bae2ae08abb06dac11bbf0da7ca2e21 | /matcher/matcher_wsgi.py | 23cfd62bb378b035ecf7584fa9046496c6503ff7 | [] | no_license | yuqiaoyan/match650 | a045453b633e92258759e92a920e3ecc620933d4 | 1698b325293948d9ac545729ab833187ae3fa06d | refs/heads/master | 2016-09-06T14:33:50.750718 | 2012-08-19T18:27:32 | 2012-08-19T18:28:03 | 3,540,984 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 333 | py | from lucene import *
initVM()
getVMEnv().attachCurrentThread()
SITE_DIR = '/opt/apps/site/matcher'
import site
site.addsitedir(SITE_DIR)
import os
import sys
sys.path.append(SITE_DIR)
os.environ['DJANGO_SETTINGS_MODULE'] = 'matcher.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
| [
"ke.wu@ibeca.me"
] | ke.wu@ibeca.me |
ea61199d476cb071f56b1843ee2812a6240fab8e | c6cd0b904dbb4a68eb6786bb4d62edf81e256424 | /guessinggame.py | 4d56a54f6eb2aca2c4e5f0672e60b1d6241d9c70 | [] | no_license | 18donovkyld/Programming-Portfolio | 99b10fee70c3e6229ee3f8aeeace925b16872dfc | c3a8eaf104e248b613cf4d9bedb62b0b986bcf21 | refs/heads/master | 2020-12-24T16:31:52.591527 | 2016-03-04T21:55:33 | 2016-03-04T21:55:33 | 41,106,043 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | # High Low Guessing Game
def show_instrustions():
print "Pick a number between 1 and 100 and I will try to guess it. I can do this in \
no more than 10 guesses. \
After each guess, enter: \
0 - if I got it right. \
-1 - if I guessed too high \
1 - if I guessed too low. "
def take_guess():
| [
"kyliedonovan@Alveyworld-20.local"
] | kyliedonovan@Alveyworld-20.local |
ebd421b8ca86aef485ac23adaef83b91b32a5f0a | 201bfacc820f2ecb3d0f7921f2348648369bea75 | /Project2/src/motif_prj2_console.py | 83977eb100c7dafd96545fe55e1a67267b1af86c | [] | no_license | markkuku/Bioinformatics_project_demo | 73a6663871446ef060c31d310da31e3228935881 | 0b87716918c4f3b98d32c9f0420699d23f5bb716 | refs/heads/master | 2021-01-25T09:45:02.555351 | 2017-06-10T08:04:28 | 2017-06-10T08:04:28 | 93,876,130 | 1 | 0 | null | 2017-06-10T08:04:29 | 2017-06-09T16:11:24 | null | UTF-8 | Python | false | false | 566 | py | '''
Created on 2011/11/5
@author: markku
# The console of motif finding algorithm
'''
from time import clock, time
from motif_find_obj import *
mf = motif_finding()
filename = raw_input("input your file name:\n")
#filename = "test"
#length = 3
length = int(raw_input("input your motif length:\n"))
mf.load_seq(filename)
mf.set_motif_len(length)
start_time = time()
best = mf.motif_finding_bnb("")
end_time = time()
e_time = end_time - start_time
print "%s %s" % (mf.get_locat(),mf.get_min_dis())
print "%s " % best
print "it spends %0.2f seconds " % e_time | [
"b871626@life.nthu.edu.tw"
] | b871626@life.nthu.edu.tw |
3bad13cc5bddd857215b2177bebc8b7cae6f2551 | 20c80f722c451b64d05cc027b66a81e1976c3253 | /commons/libs/pyblish_qml/rpc/__init__.py | a461a54bd8d5068ae347c7d751ec764fdb30bacd | [] | no_license | flypotatojun/Barbarian | 2d3fcb6fcb1b4495b6d62fc5e32634abf4638312 | efe14dd24c65b4852997dad1290e503211bcc419 | refs/heads/master | 2021-07-18T01:43:14.443911 | 2017-10-24T03:37:43 | 2017-10-24T03:37:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 105 | py | from . import client, server, service
__all__ = [
"client",
"server",
"service",
]
| [
"lonegather@users.noreply.github.com"
] | lonegather@users.noreply.github.com |
dc89107f7dcfdfa9cd7401d4281ed7ea790232a3 | 0ad7f553df6b210b5ac004fbf490ed651a21d55e | /algos/discrete_esay_control_lib_01.py | ef19dd246fb029fd2da77e0c6b9a839eebbfc2a8 | [] | no_license | MarianoDel/spyder_python | fa00987eb9aa1ef61d7224679a84c05a217c6c35 | 5f5896df68f95eb860bc08c21ae2b19516432cdc | refs/heads/master | 2020-05-23T06:14:57.329478 | 2020-04-23T14:58:16 | 2020-04-23T14:58:16 | 84,753,428 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,354 | py | # -*- coding: utf-8 -*-
#usando libreria de control
import numpy as np
from scipy import signal
b = [0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.125]
tf1 = (b, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 0.001) #ver que dt coincida con el step de tiempo en discreto
#w, h = signal.freqz(b)
#w, h = signal.freqz(tf1)
w, h = signal.freqz((b, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 0.001))
import matplotlib.pyplot as plt
fig = plt.figure()
plt.title('Digital filter frequency response')
ax1 = fig.add_subplot(111)
plt.plot(w, 20 * np.log10(abs(h)), 'b')
plt.ylabel('Amplitude [dB]', color='b')
plt.xlabel('Frequency [rad/sample]')
ax2 = ax1.twinx()
angles = np.unwrap(np.angle(h))
plt.plot(w, angles, 'g')
plt.ylabel('Angle (radians)', color='g')
plt.grid()
plt.axis('tight')
plt.show()
plt.figure(2)
plt.clf()
tf = (b, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 0.001) #ver que dt coincida con el step de tiempo en discreto
#ademas le genero un multilpe polo en origen para que no me diga que num > den
t_in = np.arange(0.0, 0.1, 0.001)
#t_in = np.arange(0.0, 4.0, 1.0)
#u = np.asarray([0.0, 0.0, 1.0, 1.0])
u = np.ones(np.size(t_in))
t_out, y = signal.dlsim(tf, u, t=t_in)
plt.plot(t_out, y, 'b')
plt.plot(t_out, u+0.1, 'g')
plt.show()
| [
"marianodeleu@yahoo.com.ar"
] | marianodeleu@yahoo.com.ar |
67f12d8933ae63eef4aa93f09cc44f61d8f48c3d | 7801b0356b60de5a4fa6b214717a1c04942b5b62 | /crm/migrations/0003_userinfo_user.py | 2daf274b45a19a80d357f35f9323cbef54a43799 | [] | no_license | hqs2212586/CRM_demo | 365652c61c991a2098d32b5db318d55cf29baa0b | 941a896aef598d81750a96074bc63ccfaaadf0a5 | refs/heads/master | 2020-03-27T17:43:40.110992 | 2018-08-31T09:20:01 | 2018-08-31T09:20:01 | 146,869,963 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 517 | py | # Generated by Django 2.0.6 on 2018-08-31 03:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rbac', '0004_permission_action'),
('crm', '0002_customerdistrbute'),
]
operations = [
migrations.AddField(
model_name='userinfo',
name='user',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='rbac.User'),
),
]
| [
"443514404@qq.com"
] | 443514404@qq.com |
c462fe20d6d7cce0e51e04a3e6bcad1a9adb245d | fdbe0e560f9488d9cbeca7f633c242a0bde57f1f | /reverse_word_order.py | 67f78677caeb2326c4943d0690316215f7a93531 | [] | no_license | adamcfro/practice-python-solutions | a3e497307de715c993134f4f4fbb74b5b48a032e | e3d8a6c8e4c600bd2511a53ea32d3dca87bd9554 | refs/heads/master | 2022-07-10T04:08:23.639000 | 2022-06-28T13:25:18 | 2022-06-28T13:25:18 | 153,543,063 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 400 | py |
# def letters_reversed():
# backwords = input("Give me several words: ")
# return backwords[::-1]
# print(letters_reversed())
##########
def words_reversed():
backwords = input("Give me several words: ").split(' ')
return ' '.join(backwords[::-1])
print(words_reversed())
##########
def reverseWord(w):
return ' '.join(w.split()[::-1])
print(reverseWord('totally cool')) | [
"adamcfro@gmail.com"
] | adamcfro@gmail.com |
2a02caa7558f764522bd58b00871216e796676d8 | d42dea822871be6027fadbf8b167be1c0b38d9c7 | /BST/debug.py | 2cc73f8d9784aa9f8a21c2b7530ff531d0bb1e4b | [] | no_license | siddhantprateek/Python-in-Practice | d8412c46dec57d512d8abd87cb0a33b71070c5ee | 0ad806f02fecb87de20078ef956f8e23bb38e342 | refs/heads/main | 2023-06-26T22:34:55.172882 | 2021-07-29T15:14:09 | 2021-07-29T15:14:09 | 354,875,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,479 | py | class BSTreeNode:
def __init__(self, val):
self.value = val
self.left, self.right = None, None
class BinaryST:
def __init__(self):
self.root = None
def insert(self, val):
self.root = self.insertHelp(val, self.root)
def insertHelp(self, value, node):
if node == None:
node = BSTreeNode(value)
return node
if node.value > value:
node.left = self.insertHelp(value, node.left)
if node.value < value:
node.right = self.insertHelp(value, node.right)
return node
def Sum(self):
return self.sumHelp(self.root)
def sumHelp(self, node):
if node == None:
return 0
return node.value + self.sumHelp(node.left) + self.sumHelp(node.right)
def display(self):
self.displayHelper(self.root, "Root Node: ")
def displayHelper(self, node, details):
if node == None:
return
print(details, node.value)
self.displayHelper(node.left, "left child of " + str(node.value) + ":")
self.displayHelper(node.right, "right child of " + str(node.value) + ":")
# nums = [4, 5, 2, 7, 6, 1]
if __name__ == '__main__':
bst = BinaryST()
bst.insert(4)
bst.insert(5)
bst.insert(2)
bst.insert(7)
bst.insert(6)
bst.display() | [
"siddhantprateek@gmail.com"
] | siddhantprateek@gmail.com |
debb5cc0393534e545e36b88a2e60783faca5eef | cbbaa3fc4b747e666c18ae9ddc2b24ffa9da9a8f | /workspace1/build/mrobot_gazebo/catkin_generated/pkg.develspace.context.pc.py | 964395d3266f6d7c4228bc2520a77bf6c32f652b | [] | no_license | bucky527/roswork | 4aef0fc6b5ceea418df7ac3628cf79febbcd62ee | bcc5810e60ff7e24a73bee711fb4fe023e3beec9 | refs/heads/master | 2022-04-27T18:22:26.934289 | 2020-04-28T15:06:39 | 2020-04-28T15:06:39 | 255,950,062 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 382 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "mrobot_gazebo"
PROJECT_SPACE_DIR = "/home/hyuk/roswork/workspace1/devel"
PROJECT_VERSION = "0.0.0"
| [
"435133229@qq.com"
] | 435133229@qq.com |
4f25b1e5b4f7a4e50b0cdaa6f94e53e850f93c0b | d2316e3902190dfa766258471270335295d59892 | /suggestions/migrations/0001_initial.py | 3c71b0bb5dcfba8963325ac5faaf95e087b0b124 | [
"BSD-2-Clause"
] | permissive | yoniLavi/Open-Knesset | 62c18585470e5c859f96b8934b340ba03edbfeab | 8297e5431914edb48b12e4d5a17b910ac9271b59 | refs/heads/master | 2022-06-05T13:28:08.575149 | 2013-07-03T22:59:34 | 2013-07-03T22:59:34 | 7,697,672 | 0 | 0 | BSD-3-Clause | 2022-05-21T00:22:58 | 2013-01-19T01:25:48 | Python | UTF-8 | Python | false | false | 8,743 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Suggestion'
db.create_table('suggestions_suggestion', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('suggested_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, db_index=True, blank=True)),
('suggested_by', self.gf('django.db.models.fields.related.ForeignKey')(related_name='suggestions', to=orm['auth.User'])),
('comment', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('resolved_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('resolved_by', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='resolved_suggestions', null=True, to=orm['auth.User'])),
('resolved_status', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
))
db.send_create_signal('suggestions', ['Suggestion'])
# Adding model 'SuggestedAction'
db.create_table('suggestions_suggestedaction', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('suggestion', self.gf('django.db.models.fields.related.ForeignKey')(related_name='actions', to=orm['suggestions.Suggestion'])),
('action', self.gf('django.db.models.fields.PositiveIntegerField')()),
('subject_type', self.gf('django.db.models.fields.related.ForeignKey')(related_name='action_subjects', to=orm['contenttypes.ContentType'])),
('subject_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
))
db.send_create_signal('suggestions', ['SuggestedAction'])
# Adding model 'ActionFields'
db.create_table('suggestions_actionfields', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('action', self.gf('django.db.models.fields.related.ForeignKey')(related_name='action_fields', to=orm['suggestions.SuggestedAction'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=50)),
('value', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('value_type', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='action_values', null=True, to=orm['contenttypes.ContentType'])),
('value_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
))
db.send_create_signal('suggestions', ['ActionFields'])
def backwards(self, orm):
# Deleting model 'Suggestion'
db.delete_table('suggestions_suggestion')
# Deleting model 'SuggestedAction'
db.delete_table('suggestions_suggestedaction')
# Deleting model 'ActionFields'
db.delete_table('suggestions_actionfields')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'suggestions.actionfields': {
'Meta': {'object_name': 'ActionFields'},
'action': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'action_fields'", 'to': "orm['suggestions.SuggestedAction']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_values'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"})
},
'suggestions.suggestedaction': {
'Meta': {'object_name': 'SuggestedAction'},
'action': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'subject_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'subject_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'action_subjects'", 'to': "orm['contenttypes.ContentType']"}),
'suggestion': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actions'", 'to': "orm['suggestions.Suggestion']"})
},
'suggestions.suggestion': {
'Meta': {'object_name': 'Suggestion'},
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'resolved_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resolved_suggestions'", 'null': 'True', 'to': "orm['auth.User']"}),
'resolved_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'suggested_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True', 'blank': 'True'}),
'suggested_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'suggestions'", 'to': "orm['auth.User']"})
}
}
complete_apps = ['suggestions'] | [
"mkriheli@gmail.com"
] | mkriheli@gmail.com |
5ae9b28df851a85fea96edf6169e6cf8f14c6a50 | 07f92805a75dc91b8be2ac14c238394245eda9ea | /Python生物信息学数据管理/python-for-biologists/03-modular_programming/10-functions/calc_atom_atom_distance.py | 5156d4af63c5626cef0355e63c950a6aecc07d18 | [] | no_license | 08zhangyi/Some-thing-interesting-for-me | 6ea7366ef1f0812397300259b2e9d0e7217bcba0 | f4cbda341ada98753c57a3ba07653163522dd023 | refs/heads/master | 2023-01-11T22:54:03.396911 | 2023-01-06T05:47:41 | 2023-01-06T05:47:41 | 136,426,995 | 7 | 6 | null | null | null | null | UTF-8 | Python | false | false | 917 | py | '''
Find two alpha-C atoms in a PDB structure and calculate their distance.
-----------------------------------------------------------
(c) 2013 Allegra Via and Kristian Rother
Licensed under the conditions of the Python License
This code appears in section 10.4.4 of the book
"Managing Biological Data with Python".
-----------------------------------------------------------
'''
from math import sqrt
from distance import calc_dist
from parse_pdb import parse_atom_line
pdb = open('3G5U.pdb')
points = []
while len(points) < 2:
line = pdb.readline()
if line.startswith("ATOM"):
chain, res_type, res_num, atom, x, y, z = parse_atom_line(line)
if res_num == '123' and chain == 'A' and atom == 'CA':
points.append((x, y, z))
if res_num == '209' and chain == 'A' and atom == 'CA':
points.append((x, y, z))
print calc_dist(points[0], points[1])
| [
"395871987@qq.com"
] | 395871987@qq.com |
21393f5ec3107ae718cce881c013ad295cbc9e74 | bf74f773f0c69e0ce7c5cc57a5897ca86cca6587 | /custom_collections/tree.py | b2452b64db079d1947db82cf94a240f15c822c36 | [
"BSD-3-Clause"
] | permissive | weijia/custom_collections | a532b01b18049f0e0aad9920f8e90d45e3c24812 | e9b7bcc25f83f6a9adfbee94c825835414799aab | refs/heads/master | 2016-09-06T09:01:05.969014 | 2014-09-20T17:42:48 | 2014-09-20T17:42:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,068 | py | import uuid
from django.utils import timezone
import django.db.utils
from django.contrib.auth.models import User, Group
from django.contrib import admin
from django.conf import settings
from obj_sys.models import UfsObj
from guardian.admin import GuardedModelAdmin
from guardian.shortcuts import assign_perm
try:
from models import CollectionItem
except:
pass
gRootUuid = u"4a5e8673-f2a2-4cf2-af6c-461fa9f31a15"
def register(objectClass, group_name = "scheduling"):
module_name = objectClass.__module__.split(".")[0].lower()
class_name = objectClass.__name__.lower()
url = u"view://admin/%s/%s/add"%(module_name, class_name)
try:
for i in UfsObj.objects.filter(ufs_url = url):
return
except django.db.utils.DatabaseError:
#Database is not created yet, just return, items will be created after syncdb is executed
return
o = UfsObj(ufs_url = url, uuid = unicode(uuid.uuid4()), timestamp=timezone.now(), user=User.objects.filter(username="AnonymousUser")[0])
o.save()
c = CollectionItem(obj = o, uuid = gRootUuid, id_in_col="%s_%s_add"%(module_name, class_name),
timestamp=timezone.now(), user=User.objects.filter(username="AnonymousUser")[0])
c.save()
#Add to group
try:
group = Group.objects.filter(name=group_name)[0]
except:
#Group not exist, create it
group = Group.objects.create(name=group_name)
#print 'assigning: ', group, c
assign_perm('view_collection_item', group, c)
def get_item_id(parent_path):
subitem_list = parent_path.split("/")
parent_item_uuid = gRootUuid
for i in subitem_list:
#print 'getting uuid for item: ', i, ', parent:', parent_item_uuid, 'end'
if i == "":
continue
parent_item_uuid = CollectionItem.objects.filter(uuid = parent_item_uuid, id_in_col = i)[0].obj.uuid
#print 'returning parent', parent_item_uuid
return parent_item_uuid
def register_menu(subitem_url, subitem_text, parent_path = "/", permmited_group = None):
"""
If subitem_test contains dynamic, subitem_url is not used.
Otherwise, subitem_url is the content of this menu item.
Register a menu item in the left tree in object manager, the info is stored in obj_sys.models.Collection.
:param subitem_url: menu item's URL. When the item is clicked, the URL will be loaded to the content pane
:param subitem_text: menu item's text. It is stored in to id_in_col field for Collection and if it is
"dynamic://xxxx", the parent item's children will be dynamically generated by opening
URL: xxxx. xxxx should return a collection of items as in tags.tag_list. The format is
described in tags.tag_list as well.
:param parent_path: the parent for this menu item. Root item is "/", sub menus should start with "/" as well.
:param permmited_group:
:return: N/A
"""
try:
root_uuid = get_item_id(parent_path)
url = u"view://%s"%(subitem_url)
qs = UfsObj.objects.filter(ufs_url = url)
if 0 == qs.count():
print 'creating new ufs obj'
o = UfsObj(ufs_url = url, uuid = unicode(uuid.uuid4()), timestamp=timezone.now(), user=User.objects.filter(username="AnonymousUser")[0])
o.save()
else:
#print 'use existing item'
o = qs[0]
except django.db.utils.DatabaseError:
#Database is not created yet, just return, items will be created after syncdb is executed
return
#print 'creating collection item for root: ', root_uuid
if permmited_group is None:
#If no permission requested, set anonymous user accessable.
permitted_user_or_group = User.objects.filter(pk=settings.ANONYMOUS_USER_ID)[0]
else:
try:
permitted_user_or_group = Group.objects.filter(name = permmited_group)[0]
except:
#Group not exist, create it
permitted_user_or_group = Group.objects.create(name = permmited_group)
collqs = CollectionItem.objects.filter(uuid = root_uuid, id_in_col = subitem_text)
if 0 == collqs.count():
c = CollectionItem(obj = o, uuid = root_uuid, id_in_col = subitem_text,
timestamp=timezone.now(), user=User.objects.filter(username="AnonymousUser")[0])
c.save()
else:
c = collqs[0]
#Assign group permission
assign_perm('view_collection_item', permitted_user_or_group, c)
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | [
"richardwangwang@gmail.com"
] | richardwangwang@gmail.com |
43589f610d031b46faaa40ca7ed51622d5c8345d | 8c9c27cb88a2d210a5e2fb5803fe89204dba95ef | /phy/gui/qt.py | fee9e2549519dba466c131d6a190d3459af0496c | [] | no_license | arnefmeyer/phy | c13b1eceb70ee72cf0ff9c4a273e195f122fabc4 | 14663e1f2baad421d6bc9f420d34170c6c969bbe | refs/heads/master | 2020-12-07T15:42:49.605432 | 2016-04-20T21:10:38 | 2016-04-20T21:10:38 | 56,718,986 | 1 | 0 | null | 2016-04-20T20:32:18 | 2016-04-20T20:32:18 | null | UTF-8 | Python | false | false | 4,107 | py | # -*- coding: utf-8 -*-
"""Qt utilities."""
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
from contextlib import contextmanager
from functools import wraps
import logging
import sys
logger = logging.getLogger(__name__)
# -----------------------------------------------------------------------------
# PyQt import
# -----------------------------------------------------------------------------
from PyQt4.QtCore import (Qt, QByteArray, QMetaObject, QObject, # noqa
QVariant, QEventLoop, QTimer,
pyqtSignal, pyqtSlot, QSize, QUrl)
try:
from PyQt4.QtCore import QPyNullVariant # noqa
except: # pragma: no cover
QPyNullVariant = None
try:
from PyQt4.QtCore import QString # noqa
except: # pragma: no cover
QString = None
from PyQt4.QtGui import (QKeySequence, QAction, QStatusBar, # noqa
QMainWindow, QDockWidget, QWidget,
QMessageBox, QApplication, QMenuBar,
QInputDialog,
)
from PyQt4.QtWebKit import QWebView, QWebPage, QWebSettings # noqa
# -----------------------------------------------------------------------------
# Utility functions
# -----------------------------------------------------------------------------
def _button_enum_from_name(name):
return getattr(QMessageBox, name.capitalize())
def _button_name_from_enum(enum):
names = dir(QMessageBox)
for name in names:
if getattr(QMessageBox, name) == enum:
return name.lower()
def _prompt(message, buttons=('yes', 'no'), title='Question'):
buttons = [(button, _button_enum_from_name(button)) for button in buttons]
arg_buttons = 0
for (_, button) in buttons:
arg_buttons |= button
box = QMessageBox()
box.setWindowTitle(title)
box.setText(message)
box.setStandardButtons(arg_buttons)
box.setDefaultButton(buttons[0][1])
return box
def _show_box(box): # pragma: no cover
return _button_name_from_enum(box.exec_())
def _input_dialog(title, sentence):
return QInputDialog.getText(None, title, sentence)
@contextmanager
def _wait_signal(signal, timeout=None):
"""Block loop until signal emitted, or timeout (ms) elapses."""
# http://jdreaver.com/posts/2014-07-03-waiting-for-signals-pyside-pyqt.html
loop = QEventLoop()
signal.connect(loop.quit)
yield
if timeout is not None:
QTimer.singleShot(timeout, loop.quit)
loop.exec_()
# -----------------------------------------------------------------------------
# Qt app
# -----------------------------------------------------------------------------
def require_qt(func):
"""Specify that a function requires a Qt application.
Use this decorator to specify that a function needs a running
Qt application before it can run. An error is raised if that is not
the case.
"""
@wraps(func)
def wrapped(*args, **kwargs):
if not QApplication.instance(): # pragma: no cover
raise RuntimeError("A Qt application must be created.")
return func(*args, **kwargs)
return wrapped
# Global variable with the current Qt application.
QT_APP = None
def create_app():
"""Create a Qt application."""
global QT_APP
QT_APP = QApplication.instance()
if QT_APP is None: # pragma: no cover
QT_APP = QApplication(sys.argv)
return QT_APP
@require_qt
def run_app(): # pragma: no cover
"""Run the Qt application."""
global QT_APP
return QT_APP.exit(QT_APP.exec_())
# -----------------------------------------------------------------------------
# Testing utilities
# -----------------------------------------------------------------------------
def _debug_trace(): # pragma: no cover
"""Set a tracepoint in the Python debugger that works with Qt."""
from PyQt4.QtCore import pyqtRemoveInputHook
from pdb import set_trace
pyqtRemoveInputHook()
set_trace()
| [
"cyrille.rossant@gmail.com"
] | cyrille.rossant@gmail.com |
8fccd0cff368e16ca6d7c0dc283a75d8b574cc55 | 75b1524774479afee0c9bc1fc4beb7325f9ac021 | /adivinhacao/adivinhacao.v10.py | e002fe645a35119e4b21a3599d5d5959a534bbc6 | [] | no_license | ronaldfalcao/formacao-python-alura | 2fb8c10dbd56a5dc34a61ed9dfc40ae03d77a41e | 460ab551e6f8d43d3e8c2de27033467dd939ef7c | refs/heads/master | 2020-12-04T08:12:08.204505 | 2020-02-27T01:11:59 | 2020-02-27T01:11:59 | 231,689,854 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,767 | py |
import random
print(36 * '*')
print("* Bem Vindo ao Jogo da Adivinhação *")
print(36 * '*')
numero_secreto = random.randrange(1, 101)
total_tentativas = 0
pontos = 1000
print("Escolha seu nível de dificuldade:\n", numero_secreto)
print("(1) Fácil\t(2) Médio\t(3)Difícil")
nivel = int(input("Escolha seu nível agora: "))
if nivel == 1:
total_tentativas = 20
elif nivel == 2:
total_tentativas = 10
elif nivel == 3:
total_tentativas = 5
else:
total_tentativas = 20 # Deixando o nível Fácil como default para qualquer outro valor
for rodada in range(1, total_tentativas + 1):
print("Rodada {} de {}.".format(rodada, total_tentativas)) # string interpolation
chute = int(input("Digite o seu número: "))
if chute < 1 or chute > 100:
print("Você deve digitar um número entre 1 e 100.")
continue
print("Você digitou", chute)
acertou = numero_secreto == chute
errou_chute_menor = numero_secreto > chute
errou_chute_maior = numero_secreto < chute
if acertou:
print("Você Acertou!!!")
print("Vocês fez {}".format(pontos))
break
else:
if errou_chute_menor:
print("Você errou! O seu chute foi menor do que o número secreto.")
if rodada == total_tentativas:
print("O número secreto era {}. Você fez {}".format(numero_secreto, pontos))
elif errou_chute_maior:
print("Você errou! O seu chute foi maior do que o número secreto.")
if rodada == total_tentativas:
print("O número secreto era {}. Você fez {} pontos.".format(numero_secreto, pontos))
pontos_perdidos = abs(numero_secreto - chute)
pontos -= pontos_perdidos
print("Fim do jogo")
| [
"ronald@ronaldfalcao.com.br"
] | ronald@ronaldfalcao.com.br |
0a9353bdfb1b9fb77bb469e3de9a5a5472e7acb1 | 2283ed34956c6a3d2aecf2bd6a0d9ed61e70b17b | /python_command_process/command_ex1.py | 67a1e208a94b747126569523e156d825c0d4bd1f | [] | no_license | starshineman/frank_python_toolbox | d6ec51be5713c4222589aefdb45852ef7976a6c2 | cd59132148db16cfc7ac40202e4a80686587f9a4 | refs/heads/master | 2021-06-09T11:23:33.616931 | 2016-12-11T03:16:39 | 2016-12-11T03:16:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,648 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import sys
import getopt
def get_opt():
opts, args = getopt.getopt(sys.argv[1:], "hi:o:")
input_file=""
output_file=""
for op, value in opts:
if op == "-i":
input_file = value
print input_file
elif op == "-o":
output_file = value
print output_file
elif op == "-h":
print "help!"
sys.exit()
def get_simple_params():
print "脚本名:", sys.argv[0]
for i in range(1, len(sys.argv)):
print "参数", i, sys.argv[i]
def Usage():
print 'PyTest.py usage:'
print '-h,--help: print help message.'
print '-v, --version: print script version'
print '-o, --output: input an output verb'
print '--foo: Test option '
print '--fre: another test option'
def Version():
print 'PyTest.py 1.0.0.0.1'
def OutPut(args):
print 'Hello, %s'%args
def main(argv):
try:
opts, args = getopt.getopt(argv[1:], 'hvo:', ['output=', 'foo=', 'fre='])
except getopt.GetoptError, err:
print str(err)
Usage()
sys.exit(2)
for o, a in opts:
if o in ('-h', '--help'):
Usage()
sys.exit(1)
elif o in ('-v', '--version'):
Version()
sys.exit(0)
elif o in ('-o', '--output'):
OutPut(a)
sys.exit(0)
elif o in ('--foo',):
OutPut(a)
Foo=a
elif o in ('--fre',):
OutPut(a)
Fre=a
else:
print 'unhandled option'
sys.exit(3)
if __name__ == "__main__":
# get_opt()
main(sys.argv) | [
"junjunwang1@creditease.cn"
] | junjunwang1@creditease.cn |
a92804e62a65f55e2f8b2801e043655a9d1c621c | 8ba99582cfa3ef771bb38947247dd457002ed0b7 | /poly.py | 0578eff91db2e27dac0f3458898abbc41c11c5e2 | [
"MIT"
] | permissive | psorus/graham | 928280f9324101fa63e56902d0419b131b214f5c | b986c3657ee6a679581a5c13ce17f5b784ac8fcc | refs/heads/main | 2023-03-21T13:55:46.631383 | 2021-03-23T19:36:43 | 2021-03-23T19:36:43 | 326,035,321 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,738 | py |
def basis(n=3):
return [unit(i) for i in range(n)]
def unit(i):
ret=[]
while len(ret)<i:ret.append(0.0)
ret.append(1.0)
return poly(*ret)
class poly(object):
r1=0.0#ranges for the orthogonality
r2=1
def __init__(s,*q):
s.q=q
def ql(s,l):
ret=list(s.q)
while len(ret)<l:
# ret.insert(0,0)
ret.append(0.0)
return ret
def __len__(s):
return len(s.q)
def __add__(a,b):
qa=a.ql(len(b))
qb=b.ql(len(a))
return poly(*[pa+pb for pa,pb in zip(qa,qb)])
def __subtr__(a,b):
return a+(b*-1)
def scalar(a,b):
qa=a.ql(len(b))
qb=b.ql(len(a))
rel=[0 for i in range(2*len(qb))]
for ia,aa in enumerate(qa):
for ib,bb in enumerate(qb):
rel[ia+ib]+=aa*bb
return poly(*rel).intfrom(a.r1,a.r2)
def multwithfloat(s,f):
return poly(*[q*f for q in s.q])
def __mul__(a,b):
if type(a) is int:a=float(a)
if type(b) is int:b=float(b)
if type(a) is float:
if type(b) is float:
raise Exception("THIS SHOULD NOT HAPPEN")
else:
return b.multwithfloat(a)
else:
if type(b) is float:
return a.multwithfloat(b)
else:
return a.scalar(b)
__rmul__=__mul__
def int(s,x0=0.0):
return poly(x0,*[q/(i+1) for i,q in enumerate(s.q)])
def eval(s,x):
ret=0.0
for zw in [q*x**i for i,q in enumerate(s.q)]:
ret+=zw
return ret
def __getitem__(s,key):
return s.eval(key)
def delta(s,x1,x2):
return s[x2]-s[x1]
def intfrom(s,x1,x2):
S=s.int()
return S.delta(x1,x2)
def __repr__(s):
return str(s)
def __str__(s):
return "+".join([str(q)+"x**"+str(i) for i,q in enumerate(s.q) if not q==0.0])
| [
"simon.kluettermann@rwth-aachen.de"
] | simon.kluettermann@rwth-aachen.de |
8f19500bec07cb1d58e54569fa7819da44bba7f3 | 7d95e90453752d03606e3557227f31c673b8cdad | /Basic Programs/Python/W3ForSchools/5-DataTypes.py | f1ceafedeea49dc24c9a9fe86f58c5b893543022 | [] | no_license | mayur-jagtap/Hacktoberfest | a2d70bafba03dffe2d182a986c99e5b94f67038a | 1e7096be5132ab0d51ab58cdd424440666c7a4a8 | refs/heads/main | 2023-08-26T23:10:52.017951 | 2021-10-27T06:57:25 | 2021-10-27T06:57:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,220 | py | """
Text Type: str
Numeric Types: int, float, complex
Sequence Types: list, tuple, range
Mapping Type: dict
Set Types: set, frozenset
Boolean Type: bool
Binary Types: bytes, bytearray, memoryview
"""
#Getting data types
intv = 5
print (type(intv))
strv = "This is a string"
print (type(strv))
floatv = 20.5
print (type(floatv))
complexv = 1j
print (type(complexv))
listv = ["hello", "iahooo", "ame ame ame"]
print (type(listv))
print(listv)
tuplev = ["hello", "iahooo", "ame ame ame"]
print (type(tuplev))
print(tuplev)
rangev = range(6)
print (type(rangev))
print(rangev)
dictv = {"name" : "Yuuki", "age" : 57}
print (type(dictv))
print(dictv)
setv = {"apple", "banana", "cherry"}
print (type(setv))
frozensetv = frozenset({"apple", "banana", "cherry"})
print (type(frozensetv))
boolv = True
print (type(boolv))
boolv = False
print (type(boolv))
bytesv = b"Hello"
print (type(bytesv))
bytearrayv = bytearray(5)
print (type(bytearrayv))
memoryviewv = memoryview(bytes(5))
print (type(memoryviewv))
#Setting a specific data type
x = str("Hello World") #here i say that "Hello World is a sting"
x = int(20) #here i say that 20 it is a integer | [
"noreply@github.com"
] | mayur-jagtap.noreply@github.com |
39cee95e4282f7757bc95867b9002bfc9eab11e4 | e2b4e4715e117b413650db30e5f5cb0f4e784f84 | /mysite/settings.py | 9dc79270f793674ceb392d5b36f5b26b749aa3e6 | [] | no_license | Yuxin45/CMPUT404-lab4 | f3c183e2d355b2eb41cbad005fc4749db94633e0 | 7825cb35dc51fee6beee1ab5e84c8d8ff6ce2d9c | refs/heads/master | 2023-02-26T13:15:28.619240 | 2021-02-02T21:04:13 | 2021-02-02T21:04:13 | 335,119,882 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,115 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 3.1.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+te$0q8yu&$!d$$^7%ntv#djkkb0m)(=k4oe28d=qg55+go$kl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'polls.apps.PollsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| [
"yliu17@ualberta.ca"
] | yliu17@ualberta.ca |
21a7a0d191e436778696a60dc3d9ec84201d7805 | 9e2fee3aecc462f8a7250fe31a8a43ee6bf271d3 | /11.py | 7ff8a2cfc5126954d0d78069e53ba159a7d95d26 | [] | no_license | kushalchordiya216/OpenCV-Practice | 426396fdb82a2bf69e704e53ab21c6fe90eec7a4 | 8e6d92cc6b68a58478e4a082a842a2b776d6e64f | refs/heads/master | 2020-06-16T06:27:56.715621 | 2019-07-06T05:40:16 | 2019-07-06T05:40:16 | 195,502,061 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | import cv2
import numpy as np
img_bgr = cv2.imread('test_image.jpg')
img_gray = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2GRAY)
template = cv2.imread('template.jpg', 0)
w, h = template.shape[::-1]
res = cv2.matchTemplate(img_gray, template, cv2.TM_CCOEFF_NORMED)
threshold = 0.8
loc = np.where(res >= threshold)
for pt in zip(*loc[::-1]):
cv2.rectangle(img_bgr, pt, (pt[0]+w, pt[1]+h), (0, 255, 255), 2)
cv2.imshow('detection', img_bgr)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"chordiyakushal@gmail.com"
] | chordiyakushal@gmail.com |
3ae34a178134449c68525baf4ebdd896725934b8 | 83ba57063f31447afa4914dfe272a7ec5f82e249 | /conditions_and_loops/positive_or_negative.py | 83e0c0c64c3ae5359dfb02aa0cc2e9f98c078855 | [] | no_license | timekeeper13/python_scripts | 81085381f35548bb6a7c2d524544178487d539df | e34433394c3cf86a6059c87bf85f9a993527c81c | refs/heads/main | 2023-04-30T13:14:53.130651 | 2021-05-14T04:53:19 | 2021-05-14T04:53:19 | 366,722,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 182 | py |
a = float(input("enter the number : "))
if a < 0:
print(f"the number {a} is negative")
elif a == 0 :
print(f"the number {a} is zero")
else :
print(f"the number {a} is positive") | [
"noreply@github.com"
] | timekeeper13.noreply@github.com |
7c6e0f6234acef62dcc182e1e93468181f99ce5c | 187a6558f3c7cb6234164677a2bda2e73c26eaaf | /jdcloud_sdk/services/apigateway/apis/DescribeIsDeployApiGroupsRequest.py | 5734f9aeb75d8ebb2d719eb77989012b428bf204 | [
"Apache-2.0"
] | permissive | jdcloud-api/jdcloud-sdk-python | 4d2db584acc2620b7a866af82d21658cdd7cc227 | 3d1c50ed9117304d3b77a21babe899f939ae91cd | refs/heads/master | 2023-09-04T02:51:08.335168 | 2023-08-30T12:00:25 | 2023-08-30T12:00:25 | 126,276,169 | 18 | 36 | Apache-2.0 | 2023-09-07T06:54:49 | 2018-03-22T03:47:02 | Python | UTF-8 | Python | false | false | 1,460 | py | # coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class DescribeIsDeployApiGroupsRequest(JDCloudRequest):
"""
查询分组
"""
def __init__(self, parameters, header=None, version="v1"):
super(DescribeIsDeployApiGroupsRequest, self).__init__(
'/regions/{regionId}/apiGroups:isDeploy', 'GET', header, version)
self.parameters = parameters
class DescribeIsDeployApiGroupsParameters(object):
def __init__(self, regionId, ):
"""
:param regionId: 地域ID
"""
self.regionId = regionId
self.filters = None
def setFilters(self, filters):
"""
:param filters: (Optional) deployStatus - 发布状态,已发布:1,未发布:0
"""
self.filters = filters
| [
"tancong@jd.com"
] | tancong@jd.com |
28e7a8291a172f4b4e2037a1d25ea719a7d1cf32 | 3a6cec32b4faedf54ecfd418c072f4ec0c58139b | /property_management /report/__init__.py | f28ac9003c1b73ea835fa2ead3b08bfee949bda2 | [] | no_license | ketul-1997/2021-Tranie-demo | 6db5ee0e49386f5db1287c19d6cc1a857aac6b6e | 45fa89cfb6912d122bd65b65c094e3df897096e4 | refs/heads/main | 2023-02-20T22:59:44.073229 | 2020-12-30T12:33:30 | 2020-12-30T12:33:30 | 325,518,542 | 0 | 7 | null | 2020-12-30T12:34:35 | 2020-12-30T10:16:11 | Python | UTF-8 | Python | false | false | 54 | py | from . import tenancy_repot
from . import date_report
| [
"shahketul97@gmail.com"
] | shahketul97@gmail.com |
34c7b773e53917d2159f02ce52b8b596c639c5d3 | df5646391c32799e436bb2d89df2fce4d331778a | /LSTM/rnn.py | 0507ded4ae0b1b0d1d169d017dabee0173492739 | [] | no_license | sukruc/Time_series | 73af4f04ea13fdd4d7532677ddc0fc24297d8591 | 942e105aa361702297acd0c7d3699ceac25e2167 | refs/heads/master | 2020-03-28T14:48:09.202321 | 2018-10-20T23:02:34 | 2018-10-20T23:02:34 | 148,524,053 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,057 | py | # Recurrent Neural Network
# Part 1 - Data Preprocessing
# Importing the libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import statsmodels.api as sm
import statsmodels.formula.api as sf
import statsmodels.tsa.api as st
from arch import arch_model
from plots.plot_series import tsplot
from generator.GARCH_generator import GARCH_generator
# Generate some data
np.random.seed(2)
a0 = 0.2
a1 = 0.5
a2 = 0.01
b1 = 0.3
b2 = 0.15
n = 100000
sigsq,eps = GARCH_generator(n=n,a0=a0,a1=a1,a2=a2,b1=b1,b2=b2)
training_set = eps[:60000].copy()
# Feature Scaling
from sklearn.preprocessing import MinMaxScaler
sc = MinMaxScaler(feature_range = (0, 1))
training_set_scaled = sc.fit_transform(eps.reshape(-1,1))
# Creating a data structure with 60 timesteps and 1 output
X_train = []
y_train = []
for i in range(60, 60000):
X_train.append(training_set_scaled[i-60:i, 0])
y_train.append(training_set_scaled[i, 0])
X_train, y_train = np.array(X_train), np.array(y_train)
# Reshaping
X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 1))
# Part 2 - Building the RNN
# Importing the Keras libraries and packages
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Dropout
# Initialising the RNN
regressor = Sequential()
# Adding the first LSTM layer and some Dropout regularisation
regressor.add(LSTM(units = 50, return_sequences = True, input_shape = (X_train.shape[1], 1)))
regressor.add(Dropout(0.2))
# Adding a second LSTM layer and some Dropout regularisation
regressor.add(LSTM(units = 50, return_sequences = True))
regressor.add(Dropout(0.2))
# Adding a third LSTM layer and some Dropout regularisation
regressor.add(LSTM(units = 50, return_sequences = True))
regressor.add(Dropout(0.2))
# Adding a fourth LSTM layer and some Dropout regularisation
regressor.add(LSTM(units = 50))
regressor.add(Dropout(0.2))
# Adding the output layer
regressor.add(Dense(units = 1))
# Compiling the RNN
regressor.compile(optimizer = 'adam', loss = 'mean_squared_error')
# Fitting the RNN to the Training set
regressor.fit(X_train, y_train, epochs = 100, batch_size = 32)
# Part 3 - Making the predictions and visualising the results
# Getting the predicted series
dataset_total = eps.copy()
dataset_test= eps[60000:].copy()
inputs = dataset_total[len(dataset_total) - len(dataset_test) - 60:]
inputs = inputs.reshape(-1,1)
inputs = sc.transform(inputs)
X_test = []
for i in range(59940, 100000):
X_test.append(inputs[i-60:i, 0])
X_test = np.array(X_test)
X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))
predicted_stock_price = regressor.predict(X_test)
predicted_stock_price = sc.inverse_transform(predicted_stock_price)
# Visualising the results
plt.plot(real_stock_price, color = 'red', label = 'Real Time Series')
plt.plot(predicted_stock_price, color = 'blue', label = 'Predicted Time Series')
plt.title('Time Series Prediction')
plt.xlabel('Time')
plt.ylabel('Series Value')
plt.legend()
plt.show()
| [
"39256971+sukruc@users.noreply.github.com"
] | 39256971+sukruc@users.noreply.github.com |
a0e47b2dc0fcc47fb71038af8cc410bbb4a1d078 | 4d30d393fbd9bdf6955409c99a3122502377d9b9 | /scripts/development/zdoom/github_release.py | 48c2cd1b2bac3a0fea86361bb50b94c322fdc844 | [] | no_license | Nonamiriuki/tools | a4a141e52d50eb4db4de9157e2288dabc47ce4cd | 6cc5371a951d4b8031ed3b04987d491270a6a7fd | refs/heads/master | 2022-04-11T01:35:35.565414 | 2020-04-01T14:19:06 | 2020-04-01T14:19:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,139 | py | #!/usr/bin/env python
import sys
import os
import traceback
import github3
if 8 != len(sys.argv):
print('Usage: github_release <user> <token> <repository> <tag> <name> <description> <file-to-upload>')
sys.exit(1)
class Config(object):
pass
config = Config()
config.user = sys.argv[1]
config.token = sys.argv[2]
config.repo = sys.argv[3]
config.tag = sys.argv[4]
config.name = sys.argv[5]
config.desc = sys.argv[6].replace(r'\n', '\n')
config.path = sys.argv[7]
try:
gh = github3.login(config.user, token=config.token)
print('Connecting to GitHub...')
repo = gh.repository(config.user, config.repo)
print('Creating GitHub release...')
release = repo.create_release(config.tag, name=config.name, body=config.desc, prerelease=True)
with open(config.path, 'rb') as asset_file:
asset_name = os.path.basename(config.path)
print('Uploading GitHub release asset...')
release.upload_asset('application/octet-stream', asset_name, asset_file)
except Exception as ex:
print('\nERROR: Failed to create GitHub release\n')
traceback.print_exc(file=sys.stdout)
sys.exit(1)
| [
"alexey.lysiuk@gmail.com"
] | alexey.lysiuk@gmail.com |
e7ef376d819a5f5d02e4afb8eb7e19d9d6c7d651 | fd8b5abc390fa584266694abda7e8d558ad43f73 | /1-python基础/2-OOP/01.py | da607f0c962dbf4803b48d78ffd8b654d4d344d5 | [] | no_license | nightwarm/Python_lerning | e01899846c8d27e2f774d1da377b7dfab01ba664 | a0483f1f96f5443059d33809c15f95ebafb29cd2 | refs/heads/master | 2020-05-09T16:36:46.187539 | 2020-02-29T06:50:09 | 2020-02-29T06:50:09 | 181,277,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | '''
定义一个学生类,用来形容学生
'''
#定义一个空的类
class Student():
# 一个空类,pass代表直接跳过
#此处pass必须由
pass
# 定义一个对象
mingyue = Student()
# 在定义一个类,用来描述听Python的学生
class PythonStudent():
# 用None给不确定的值赋值
name = None
age = 18
course = 'Python'
# 需要注意
# 1.def doHomework的缩进层级
# 2.系统默认除一个self参数
def doHomework(self):
print('在做作业')
# 推荐在函数末尾使用return语句
return None
# 实例化一个叫yueyue的学生,是一个具体的人
yueyue = PythonStudent()
print(yueyue.name)
print(yueyue.age)
# 注意成员函数的调用没有传递进入参数
yueyue.doHomework() | [
"nightwarm@126.com"
] | nightwarm@126.com |
9cc54889a765bab903199f828a43ea283d327182 | 252289003a312e81033c0510ffb6c74601330e30 | /yyTagManager/serializers.py | 9f358ede44b48a4399c437519e4d9904a0c7deba | [] | no_license | ComiCoder/YoYoProject | 5283d9b83d3b87077916697b9a861b1d8233c0a6 | 3fb5a80642c98a2eeef3b78c4dbca4f3e8d86a90 | refs/heads/master | 2021-01-22T10:18:27.728374 | 2014-12-17T07:03:39 | 2014-12-17T07:03:39 | 26,566,679 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 481 | py | from rest_framework import serializers
from yyTagManager.models import YYTagInfo
from rest_framework import pagination
class YYTagInfoSerializer(serializers.ModelSerializer):
class Meta:
model = YYTagInfo
fields = ('id','tagType','tagValue','status',
'createTime','updateTime','validTime')
class YYPaginatedTagInfoSerializer(pagination.PaginationSerializer):
class Meta:
object_serializer_class = YYTagInfoSerializer | [
"glutinit@gmail.com"
] | glutinit@gmail.com |
8b7641767d7456a30a42aaefeb9cee8c4c607de4 | 51888119e10cdff12dafb060a54824632edccf3f | /Folders/Python/BlackSailSubmit.py | fb3aabf18c996ca4a44e2ffda207c3e9e2ed6b01 | [
"BSD-2-Clause"
] | permissive | kuchinal/lamakaha | b64511ad8c6d2b36da5a84a266b9e7a69acd3106 | 24e3b2ff53bcac2ad1c0e5a3b9afd4593d85f22d | refs/heads/master | 2023-09-01T17:55:56.551183 | 2023-07-31T19:32:04 | 2023-07-31T19:32:04 | 182,849,747 | 0 | 0 | null | 2021-09-10T06:34:22 | 2019-04-22T19:00:02 | Python | UTF-8 | Python | false | false | 519 | py |
import nuke
import rrSubmit_Nuke_5
def BlackSailSubmit():
try :
g = nuke.selectedNode()
f = nuke.allNodes("Write")
f= nuke.allNodes("AutoWrite")+f
for a in f:
sel = a['selected'].value()
if sel == 1:
a['disable'].setValue(0)
else:
a['disable'].setValue(1)
print "selected"
rrSubmit_Nuke_5.rrSubmit_Nuke_5()
except:
rrSubmit_Nuke_5.rrSubmit_Nuke_5()
print "all" | [
"lamakaha@gmail.com"
] | lamakaha@gmail.com |
d4d954d44be3a396eee4d2792ce89b3c14160299 | c909310c5a19affd99ece040f8977f0473d43793 | /view/loading_stage.py | 2288e6bf6cef4368024366180edf0d23f7d0d637 | [] | no_license | pineapplebin/ClickHero | e57b8589d4fa85acaf0f7d2c0fa8d67fa5edf983 | 93779653526c1b00471c0fc863aaf00306351cd4 | refs/heads/master | 2021-01-10T06:16:35.410976 | 2016-02-20T13:56:43 | 2016-02-20T13:56:43 | 51,997,869 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,348 | py | import pygame
import lib
from view.base_stage import BaseStageView
from lib.constant import C_WHITE, SCREEN_SIZE
class LoadingStageView(BaseStageView):
def __init__(self):
BaseStageView.__init__(self)
self.background_color = (0, 0, 0)
self.font = pygame.font.SysFont('微软雅黑', 24)
self.dot_count = 0
self.passed_time = 0
self.rate = 500
def render(self):
BaseStageView.render(self)
self.passed_time += lib.data.frame_time
if not lib.data.is_loading_done:
self.dot_count = int(self.passed_time / self.rate) % 4
text = '请稍候' + '.'*self.dot_count
else:
text = '请点击以继续'
show = self.font.render(text, True, C_WHITE)
w, h = show.get_size()
lib.draw.draw_image(
show, ((SCREEN_SIZE[0]-w)/2, (SCREEN_SIZE[1]-h)/2))
if self.passed_time > self.rate and self.dot_count == 0:
self.passed_time = 0
self.check_continue_click()
def check_continue_click(self):
if lib.data.is_loading_done:
if lib.event.mouse_unpressed:
lib.event.post_loadingdone(
{'stagename': lib.data.loading_stagename})
lib.data.is_loading_done = False
lib.data.loading_stagename = ''
| [
"pineapplewing@126.com"
] | pineapplewing@126.com |
ca92b11ae29b542a68de49c252a4cedc2ea136ee | 9cfbc37f52ae506ef4695476c7522e97766c583b | /format.py | afc7a416e751d1e41575a0086eaf34b8931e99e7 | [] | no_license | cdeocampo/coding-challenge-dev | 3423ed80ba5243907f945cbe5aed9b110680b331 | fbbd5c950a19e04139c617f9a6e986057ef1bb29 | refs/heads/master | 2020-03-08T01:01:22.487513 | 2018-04-02T22:32:29 | 2018-04-02T22:32:29 | 127,819,570 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | import abc
class Format(abc.ABC):
@abc.abstractmethod
def getCost(self):
pass | [
"christian.deocampo@gmail.com"
] | christian.deocampo@gmail.com |
729ee3599b771638d9ec339109ca49aec032238f | fc2553e40712aa389a46be44d269a90fd46ff153 | /snakeGame.py | c64d55bfef5108e01862856ac84f0b85f74102c6 | [] | no_license | VonHumbolt/SnakeGameWithPythonPygame | 95b397d905260348851520bd064c5e315c7ce752 | ebf25cc5e1325c5b627c06efc6eed6d11eda9754 | refs/heads/main | 2023-05-09T06:01:53.014070 | 2021-05-26T18:20:44 | 2021-05-26T18:20:44 | 371,126,882 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,736 | py | import pygame,random
pygame.init()
WIDTH , HEIGHT = 800,600
screen = pygame.display.set_mode((WIDTH , HEIGHT))
clock = pygame.time.Clock()
direction = "UNKNOWN"
class Snake(pygame.sprite.Sprite):
def __init__(self):
super(Snake, self).__init__()
self.surf = pygame.Surface((15,15))
self.surf.fill((104, 104,104))
self.rect = self.surf.get_rect()
self.rect.x = 350
self.rect.y = 200
def update(self, keyPressed):
global direction
if keyPressed[pygame.K_UP] and direction != "DOWN":
direction = "UP"
elif keyPressed[pygame.K_DOWN] and direction != "UP":
direction = "DOWN"
elif keyPressed[pygame.K_RIGHT] and direction != "LEFT":
direction = "RIGHT"
elif keyPressed[pygame.K_LEFT] and direction != "RIGHT":
direction = "LEFT"
if direction == "UP":
self.rect.move_ip(0,-20)
snakeCoordinates.append((snake.rect.x,snake.rect.y))
elif direction == "DOWN":
self.rect.move_ip(0, 20)
snakeCoordinates.append((snake.rect.x, snake.rect.y))
elif direction == "RIGHT":
self.rect.move_ip(20, 0)
snakeCoordinates.append((snake.rect.x, snake.rect.y))
elif direction == "LEFT":
self.rect.move_ip(-20, 0)
snakeCoordinates.append((snake.rect.x, snake.rect.y))
## Border Control
if self.rect.top < 0:
self.rect.top = HEIGHT
if self.rect.top > HEIGHT:
self.rect.top = 0
if self.rect.left < 0:
self.rect.left = WIDTH
if self.rect.left > WIDTH:
self.rect.left = 0
class Body(pygame.sprite.Sprite):
def __init__(self):
super(Body, self).__init__()
self.surf = pygame.Surface((15,15))
self.surf.fill((104, 104,104))
self.rect = self.surf.get_rect()
class Apple(pygame.sprite.Sprite):
def __init__(self,randomX,randomY):
super(Apple, self).__init__()
self.surf = pygame.Surface((15,15))
self.surf.fill((200,100,50))
self.rect = self.surf.get_rect()
self.rect.x = randomX
self.rect.y = randomY
## Sprite Groups
allSprites = pygame.sprite.Group()
apples = pygame.sprite.Group()
snakes = pygame.sprite.Group()
bodies = pygame.sprite.Group()
snake = Snake()
apple = Apple(100,200)
allSprites.add(apple,snake)
apples.add(apple)
snakes.add(snake)
bodyList = list()
snakeCoordinates = list()
bodyCount = 0
running = True
while running:
clock.tick(30)
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
running = False
elif event.type == pygame.QUIT:
running = False
keypressed = pygame.key.get_pressed()
screen.fill((255,255,255))
if pygame.sprite.groupcollide(snakes,apples,0,1):
randomX = random.randint(0, WIDTH)
randomY = random.randint(0, HEIGHT)
apple.kill()
newApple = Apple(randomX, randomY)
bodyCount += 1
body = Body()
allSprites.add(newApple,body)
apples.add(newApple)
bodyList.append(body)
bodies.add(body)
## Draw all Sprites
for sprite in allSprites:
screen.blit(sprite.surf,sprite.rect)
## ForLoop for the snake's body to follow its head
if len(bodyList) > 0:
for i in range(len(bodyList)):
# Her bir parça kendisinden öncekini takip ediyor!
bodyList[i].rect = snakeCoordinates[-i-2]
screen.blit(bodyList[i].surf,bodyList[i].rect)
snakes.update(keypressed)
pygame.display.flip()
| [
"noreply@github.com"
] | VonHumbolt.noreply@github.com |
d8016bc80a93118465606928587f8e8755d4e87a | aba74338092c4de7cb504419eb7b4a19a71d35d7 | /1.딥러닝과러닝머신/4/5/bmiuse-graph.py | 8f160be6a591c5441628084fee098926c061c4d8 | [] | no_license | goodlucky1215/artificial-intelligence | 469f6ec931dcd30aae4b9d2782588e2468a3635f | 07c5fd009ca86c6ceb0f5ce9c960aeb1ffcd435a | refs/heads/master | 2022-04-24T22:57:33.094666 | 2020-04-29T13:00:59 | 2020-04-29T13:00:59 | 259,822,092 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 636 | py | import matplotlib.pyplot as plt
import pandas as pd
#pandas로 CSV 파일 읽어드리기
tbl=pd.read_csv("bmi.csv", index_col=2) #???만약 테이블 내의 특정한 열을 행 인덱스로 지정하고 싶으면 index_col 인수를 사용한다.
#그래프 그리기
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
#서브 플롯 전용 - 지정한 레이블을 임의의 색으로 칠하기
def scatter(lbl, color):
b = tbl.loc[lbl]
ax.scatter(b["weight"],b["height"],c=color,label=lbl)
scatter("fat", "red")
scatter("normal","yellow")
scatter("thin", "purple")
ax.legend()
plt.savefig("bmi-test.png")
| [
"goodlucky1215@naver.com"
] | goodlucky1215@naver.com |
19f23f916ded964153a9795db3c755a1e3549688 | 31cc24893472113dfea7db427db173e4c988b0ad | /multiplayer/lib/python3.6/copyreg.py | 1cee2c63fbd91047847c73eed36d2010834bf892 | [] | no_license | hanhanhan/multiplayer | cb1dd926c58b4826dbed324f1f422719f4edb305 | 19a6614b9757ec21476c3558a75db6d2f54cd77f | refs/heads/master | 2022-12-10T14:33:35.045390 | 2018-01-05T21:03:03 | 2018-01-05T21:03:03 | 116,425,898 | 0 | 0 | null | 2022-12-07T23:47:24 | 2018-01-05T20:55:48 | Python | UTF-8 | Python | false | false | 49 | py | /Users/hannahl/anaconda3/lib/python3.6/copyreg.py | [
"hanhanhan.lazarus@gmail.com"
] | hanhanhan.lazarus@gmail.com |
1dc159782b26f0d17f41ecff2c8a15e8a94d3429 | 989badd5ac93a8ee65b6f7e7524cd31f2a265cc0 | /apps/Pymes/forms.py | 9eab28d9131ed71ce3d4fbaa134f55ceb41e2290 | [] | no_license | n4pc/uPymer | c742074c52ae1486bf1e71a10842bb7e7eefb212 | c636027c3db9206fa01764beef1ab5a261e4c54f | refs/heads/main | 2023-01-24T22:45:42.558135 | 2020-12-03T04:11:09 | 2020-12-03T04:11:09 | 311,497,624 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,037 | py | from django import forms
from .models import Cat, Pyme, Producto
class CatForm(forms.ModelForm):
class Meta:
model = Cat
fields = ['nombre', 'descripcion','img']
labels = {
'nombre': 'Nombre',
'descripcion': 'Descripción',
'img' : 'Imágen de Categoría',
}
widgets = {
'nombre': forms.TextInput(attrs={'class': 'form-control'}),
'descripcion': forms.TextInput(attrs={'class': 'form-control'}),
}
class PymeForm(forms.ModelForm):
class Meta:
model = Pyme
fields = ['nombre', 'rut_empresa', 'descripcion', 'direccion', 'cat', 'img']
labels = {
'nombre': 'Nombre',
'rut_empresa': 'Rut de Empresa',
'descripcion': 'Descripción',
'direccion': 'Dirección',
'cat': 'Categoría de la Pyme',
'img' : 'Imágen de Pyme',
}
widgets = {
'nombre': forms.TextInput(attrs={'class': 'form-control'}),
'rut_empresa': forms.TextInput(attrs={'class': 'form-control'}),
'descripcion': forms.TextInput(attrs={'class': 'form-control'}),
'direccion': forms.TextInput(attrs={'class': 'form-control'}),
'cat': forms.Select(attrs={'class': 'form-control'}),
}
class ProductoForm(forms.ModelForm):
class Meta:
model = Producto
fields = ['nombre', 'descripcion', 'precio', 'pyme', 'img']
labels = {
'nombre': 'Nombre',
'descripcion': 'Descripción',
'precio': 'Precio',
'pyme': 'Pyme asociada al producto',
'img' : 'Imágen del producto',
}
widgets = {
'nombre': forms.TextInput(attrs={'class': 'form-control'}),
'descripcion': forms.TextInput(attrs={'class': 'form-control'}),
'precio': forms.TextInput(attrs={'class': 'form-control'}),
'pyme': forms.Select(attrs={'class': 'form-control'}),
}
| [
"73497817+n4pc@users.noreply.github.com"
] | 73497817+n4pc@users.noreply.github.com |
6c1ed26e3ef5b698878b91a283ecd74d9a4e09b7 | f4583e8d54936107e4991a1b0d79362e22c4a90e | /vfwizx/VFwizv1/fbr_maincode.py | d437496a1a3e4c3cb1a78b45328a543e41f6a675 | [] | no_license | GaiaKnowledge/VFWiz | 61874c24f7b346d81ce442ca6aa997917488b551 | 7ee2ba8afe26fedabca0f153cc50222cb9f8029e | refs/heads/master | 2020-05-04T17:46:51.357124 | 2019-08-02T21:57:50 | 2019-08-02T21:57:50 | 179,326,145 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,004 | py | # Main Code for VF Wiz - Francis Baumont De Oliveira
import json
import math
from . vf_input import inputContainer
def get_input_scenario(input_file):
with open(input_file) as f:
inputs = json.load(f)
input_scenario = inputContainer()
input_scenario.iLights = inputs['light_type']
input_scenario.iCrop = inputs['crop']
input_scenario.area = inputs['grow_area']
input_scenario.iSurface = inputs['surface_area']
input_scenario.iVolume = inputs['farm_volume']
input_scenario.iBuilding = inputs['building_type']
input_scenario.iSystem = inputs['grow_system']
input_scenario.iCO2 = inputs['co2_enrichment']
input_scenario.iEnergy = inputs['energy_price']
input_scenario.Toutdoors = inputs['average_outdoor_temperature']
input_scenario.iCrop_price = inputs['crop_price_per_kilo']
input_scenario.iWages = inputs['minimum_wage']
return input_scenario
# System
def number_of_racks(grow_system, grow_area):
if grow_system == 'ziprack_8':
no_of_racks = math.floor(grow_area/4.62963) # 54 Zipracks per 250 sq-m (including aisles, work bench and plumbing kit)
else:
raise RuntimeError("Unknown grow_system: {}".format(grow_system))
return no_of_racks
# Harvest weight
def crop(crop_type):
if crop_type == "lettuce":
harvest_weight = 0.5 # kg
else:
harvest_weight = "unknown"
return harvest_weight
def gross_yield(crop_type):
if crop_type == 'lettuce':
ys = 78.5 # kg / m2 / year
else:
ys = 'unknown'
return ys
# Plant Capacity
def plant_capacity(crop_type, grow_system, no_of_racks): # Excluding propagation and only those within the vertical farming systems
if crop_type == "lettuce" and grow_system == 'ziprack_8':
no_of_towers = no_of_racks*30 # Tight spacing with lettuce (30 towers per rack)
yield_capacity = no_of_towers*3.3 # 3.3kg of greens per tower
farm_plant_capacity = yield_capacity/harvest_weight # Potential yield divided by harvest weight of each product
elif grow_system != 'ziprack_8' or crop_type != 'lettuce':
print("unknown system or crop variety. Please insert another.")
yield_capacity = "unknown"
farm_plant_capacity = "unknown"
else:
print("unknown")
yield_capacity = "unknown"
farm_plant_capacity = "unknown"
return farm_plant_capacity, yield_capacity
# Lights
def displayspec(light_type):
if light_type == "intraspectra_spectrablade_8":
light_wattage = 75
light_efficiency = 0.4
print('The' + light_type + 'light is' + light_wattage, "Watts with an efficiency of:" + light_efficiency)
else:
light_wattage = 'unknown'
light_efficiency = 'unknown'
return light_wattage, light_efficiency
def get_qty_lights(grow_system, no_of_racks):
if grow_system == 'ziprack_8':
no_of_lights == no_of_racks*24 # Assumption that 24 lighting units are require to cover crop area of 1 Ziprack (30 towers)
else:
no_of_lights == "unknown"
return no_of_lights
def get_lights_energy(light_type, qty_lights):
lights_watts, efficiency = displayspec(light_type)
lighting_kw_usage = lights_watts*qty_lights/1000
kwh_per_day = lighting_kw_usage*12 # Assuming 12 hours of light for plants
return kwh_per_day
# HVAC
def temp_crop_reqs(crop_type):
if crop_type == 'lettuce':
Tin = 23.9 # Temperature optimal for lettuce growth
else:
Tin = 22 # 'general temperature'
return Tin
def HVAC_energy(surface_area, building_type, Tin, Tout):
if building_type == 'basement':
U = 0.5
else:
U= 24
Q = U*surface_area*(Tin-Tout)
HVAC_kwh = Q*0.00666667*24 # Conversion factor of kJ/h to kWh x 24 hours
"""" Heat Transfer Equation
Notes
-----
Q = U x SA x (Tin - Tout)
Q - Heat lost or gained due to outside temperature (kJ·h−1)
U - Overall heat transfer coefficient (kJ·h−1·m−2·°C−1)
SA - Surface Area of the space
Tin - Inside air set point temperature (°C)
Tout - Outside air temperature (°C)
"""
return HVAC_kwh
# Labour
# Energy
def daily_energy_consumption(HVAC_daily_energy, lights_daily_energy):
kwh_per_day = HVAC_daily_energy + lights_daily_energy
return kwh_per_day
)
def monthly_energy_consumption(farm_kwh_per_day):
m_energy_consumption = farm_kwh_per_day * 28 # 4 weeks, 28 days a month
return mec
# Yield
"""" Adjusted Plant Yield Equation
Notes
-----
Ya = Ys x PA x PARf x CO2f x Tf x (1 - Fr)
Adjusted Plant Yield = Standard Yield x Plant Area x PAR factor
PARf = ratio of actual PAR delivered to plant canopy compared to theoretical plant requirements. In artificial lighting
VF the value was 1 as controlled at optimal level. Sun-fed plant level from EcoTect simulation.) x
CO2f = Increment by CO2 enrichment
Tf = Temperature factor (reflects reduction of yield caused by overheating or freezing of the growing area
if indoor temperature is uncontrolled by HVAC or other systems, value can be set for 0.9 for preliminary estimation)
Fr = Failure rate, by default set at 5%
"""
def get_crop_ppfd_reqs(crop_type):
if crop_type == 'lettuce':
crop_ppfd_reqs = 295
else:
crop_ppfd_reqs = 'unknown'
return crop_ppfd_reqs
# Annual yield
def adjusted_yield(building_type, ys, crop_type, pa, light_type, co2_enrichment, tf, crop_ppfd_reqs, ppfd_lights, grow_area):
#PAR factor
if light_type == "intraspectra_spectrablade_8":
PARf = 1
else:
PARf = ppfd_lights/crop_ppfd_reqs # ratio of PAR delivered to canopy to theoretical PAR reqs
# CO2 factor
if co2_enrichment == 'yes':
CO2f = 1
else:
CO2f = 'unknown'
# standard yield
if isinstance(ys, int):
ys = ys
else:
ys = gross_yield(crop_type)
pa = grow_area
#Temperature
#Failure rate
fr = 0.05
ya = ys*pa*PARf*CO2f*tf*(1-fr)
return ya
# Sales
def sales(ya, crop_price):
crop_sales = ya*crop_price
return crop_sales
input_file = 'input_file.json'
scenario = get_input_scenario(input_file)
no_of_racks = number_of_racks(scenario.iSystem, scenario.area)
qty_lights = get_qty_lights(scenario.iSystem, no_of_racks)
lights_daily_energy = get_lights_energy(scenario.iLights, qty_lights)
HVAC_daily_energy = HVAC_energy(surface_area=scenario.iSurface, building_type=scenario.iBuilding,
Tin=temp_crop_reqs(scenario.iCrop, Tout=scenario.Toutdoors))
farm_kwh_per_day = daily_energy_consumption(HVAC_daily_energy, lights_daily_energy
monthly_energy_consumption_farm = monthly_energy_consumption(farm_kwh_per_day)
farm_plant_capacity, standard_yield = plant_capacity(scenario.iCrop, scenario.iSystem, no_of_racks)
ys = standard_yield
crop_ppfd_reqs = get_crop_ppfd_reqs(scenario.iCrop)
ppfd_lights = 295 # placeholder
tf = 1
# OPEX
OpEx: int = 0
days = 366
print("Days",days-1)
OpEx_array = []
def OpEx(days,labour daily_energy_consumption):
for i in range(days):
if i % 30 == 0:
# OpEx += labour() # Fixed costs
# OpEx += energy()
elif i % 365 == 0:
OpEx += 2 # annualcosts(ienergystandingcharge, iwaterstandingcharge, iinsurance) # Fixed costs
OpEx_array.append(OpEx)
# Operations = Bill Growth Lights + Bill Environmental Control + Bill Misc Energy + Water Bill + Seed Cost
# + Nutrient Cost + Personnel Cost + Maintenance Cost + CO2 Cost - Reduction from Renewable Energy
# Inputs = Seeds + Nutrients + Grow Media
# REVENUE
sales: int = 0
sales_array = []
for i in range(days):
if i % 30 == 0:
sales += sales(ya, crop_price)) # revenue(input_data.iSystem)
#Revenue += rent(iAnnual_rent, iSize, iLocation, iLocation_type)
#Revenue += utilitiesM(energy(), water(), internet)
#Revenue += consumables(nutrients, seeds, grow_media)
#Revenue +=
elif i % 365 == 0:
sales += 50
sales_array.append(sales)
# ARRAY conversion
sales_array = np.asarray(sales_array) # Sales as an array
OpEx_array = np.asarray(OpEx_array) # OpEx as an array
# PROFIT
def profit(sales_array, OpEx_array):
profit_array = sales_array - OpEx_array # Profit = revenue from sales - running costs
return profit_array
profit_array = profit(sales_array, OpEx_array)
def gross_profit_margin(sales_array,
cogs): # Profit and Cost of Goods Sold - i.e. cost of materials and director labour costs
gross_profit_margin = (sales_array - cogs) / sales_array # Total revenue - Cost of goods sold (COGS) / revenue
return gross_profit_margin
# gross_profit_margin(sales_array, cogs)
print("Profit £:", profit_array[-1])
plt.plot(profit_array)
plt.xlabel('Days')
plt.ylabel('Gross Profit')
plt.show()
# plt.figure()
# plt.plot(gross_profit_margin)
# plt.xlabel('Days')
# plt.ylabel('Gross Profit Margin')
# plt.show()
#
# print("Gross Profit Margin:",gross_profit_margin[-1])
# print("GOT costs ", costs) | [
"44947452+FrancisBaumont@users.noreply.github.com"
] | 44947452+FrancisBaumont@users.noreply.github.com |
795c2cd25d0c63738792fa566600877cea9a2584 | 04b0ba56d7d6749e1d3946da43e19f69caefe356 | /logme/urls.py | 8bc140c7fa8584c5d04426d94248c7c29d73df8c | [] | no_license | ChristopherDuallo/django-TimeLog | e97a37403f6ca452608f628a2146f6677ed2b717 | dfa5488941459ee194d879ab5fb2585bef52a628 | refs/heads/master | 2020-12-11T04:00:32.374521 | 2015-05-07T14:13:05 | 2015-05-07T14:13:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | from django.conf.urls import patterns, url
from logme import views
urlpatterns = patterns('',
url(r'^$', views.Index.as_view(), name='index'),
url(r'^home/$', views.Home_Page.as_view(), name='home'),
url(r'^register/$', views.Register.as_view(), name='register'),
) | [
"ark485@gmail.com"
] | ark485@gmail.com |
6cb993adb73ad3b1b6d015990d0112732cd66128 | f47c540d5e8d1b773de57a9e66e0ad2af9b13e9c | /mwana/apps/userverification/tests.py | fc6a39684e1d412df4c24db9f9cd01eb9e384ae5 | [] | no_license | mwana/mwana | e70d1b0c59f2f3ad1300452f58b908a57210415d | 698c2f693f2c49f6dc41c1c0b6669300b619b579 | refs/heads/develop | 2021-01-17T10:11:17.502708 | 2016-02-15T10:56:56 | 2016-02-15T10:56:56 | 600,738 | 5 | 6 | null | 2016-02-03T19:12:45 | 2010-04-08T13:24:18 | Python | UTF-8 | Python | false | false | 16,874 | py | # vim: ai ts=4 sts=4 et sw=4
from mwana.apps.reports.models import SupportedLocation
from mwana.apps.userverification.models import UserVerification
from datetime import timedelta
import time
from mwana.apps.userverification import tasks
from mwana.apps.labresults.testdata.reports import *
from mwana.apps.locations.models import Location
from mwana.apps.locations.models import LocationType
import mwana.const as const
from rapidsms.models import Contact
from rapidsms.tests.scripted import TestScript
from rapidsms.contrib.messagelog.models import Message
class UserVerificationSetUp(TestScript):
def setUp(self):
# this call is required if you want to override setUp
super(UserVerificationSetUp, self).setUp()
self.type = LocationType.objects.get_or_create(singular="clinic", plural="clinics", slug=const.CLINIC_SLUGS[2])[0]
self.type1 = LocationType.objects.get_or_create(singular="district", plural="districts", slug="districts")[0]
self.type2 = LocationType.objects.get_or_create(singular="province", plural="provinces", slug="provinces")[0]
self.luapula = Location.objects.create(type=self.type2, name="Luapula Province", slug="400000")
self.mansa = Location.objects.create(type=self.type1, name="Mansa District", slug="403000", parent=self.luapula)
self.samfya = Location.objects.create(type=self.type1, name="Samfya District", slug="402000", parent=self.luapula)
self.kawambwa = Location.objects.create(type=self.type1, name="Kawambwa District", slug="401000", parent=self.luapula)
self.mibenge = Location.objects.create(type=self.type, name="Mibenge Clinic", slug="403029", parent=self.mansa, send_live_results=True)
self.kashitu = Location.objects.create(type=self.type, name="Kashitu Clinic", slug="402026", parent=self.samfya, send_live_results=True)
self.mansa_central = Location.objects.create(type=self.type, name="Central Clinic", slug="403012", parent=self.mansa, send_live_results=True)
self.salanga = Location.objects.create(type=self.type, name="Salanga Clinic", slug="401012", parent=self.kawambwa, send_live_results=True)
for loc in Location.objects.filter(type__singular="clinic"):
SupportedLocation.objects.create(location =loc, supported =True)
self.unsupported = Location.objects.create(type=self.type, name="Unsupported Clinic", slug="401013", parent=self.kawambwa, send_live_results=True)
# register staff for the clinics and also their districts and provinces
self.assertEqual(Contact.objects.count(), 0, "Contact list is not empty")
#create different users - control and non control
script = """
luapula_pho > join pho 400000 Luapula PHO 1111
mansa_dho > join dho 403000 Mansa DHO 1111
samfya_dho > join dho 402000 Samfya DHO 1111
kawambwa_dho > join dho 401000 Kawambwa DHO 1111
salanga_worker > join clinic 401012 Salanga Man 1111
peter_hubs > join hub 401012 Peter Hubs 1111
mibenge_worker > join clinic 403029 Mibenge Man 1111
kashitu_worker > join clinic 402026 kashitu Man 1111
central_worker > join clinic 403012 Central Man 1111
unsupported_worker > join clinic 403013 I Man 1111
mibenge_cba > join cba 403029 1 Mibenge CBA
kashitu_cba > join cba 402026 2 kashitu cba
central_cba1 > join cba 403012 3 Central cba1
central_cba2 > join cba 403012 4 Central cba2
"""
self.runScript(script)
self.assertEqual(Contact.objects.count(), 13)
msgs = self.receiveAllMessages()
self.assertEqual(14, len(msgs))
def tearDown(self):
# this call is required if you want to override tearDown
super(UserVerificationSetUp, self).tearDown()
class TestUserVerications(UserVerificationSetUp):
def testSendingOnlyOnceInAPeriod(self):
"""
defaulting clinic workers should receive user verification messages only
once in a period
"""
self.assertEqual(UserVerification.objects.count(), 0, "User verication model is not empty")
time.sleep(.1)
self.startRouter()
tasks.send_verification_request(self.router)
tasks.send_verification_request(self.router)
msgs = self.receiveAllMessages()
expected_recipients = ["salanga_worker", "mibenge_worker",
"kashitu_worker", "central_worker",'mibenge_cba',
'kashitu_cba', 'central_cba1', 'central_cba2',
'luapula_pho', 'mansa_dho', 'samfya_dho',
'kawambwa_dho', 'peter_hubs']
expected_msgs = """Hello Salanga Man. Are you still working at Salanga Clinic and still using Results160? Please respond with YES or No
Hello Mibenge Man. Are you still working at Mibenge Clinic and still using Results160? Please respond with YES or No
Hello Kashitu Man. Are you still working at Kashitu Clinic and still using Results160? Please respond with YES or No
Hello Central Man. Are you still working at Central Clinic and still using Results160? Please respond with YES or No
Hello Mibenge Cba. Are you still working as a RemindMi Agent for Mibenge Clinic? Please respond with YES or No
Hello Kashitu Cba. Are you still working as a RemindMi Agent for Kashitu Clinic? Please respond with YES or No
Hello Central Cba1. Are you still working as a RemindMi Agent for Central Clinic? Please respond with YES or No
Hello Central Cba2. Are you still working as a RemindMi Agent for Central Clinic? Please respond with YES or No
Hello Luapula Pho. Are you still working at Luapula Province PHO? Please respond with YES or No
Hello Mansa Dho. Are you still working at Mansa District DHO? Please respond with YES or No
Hello Samfya Dho. Are you still working at Samfya District DHO? Please respond with YES or No
Hello Kawambwa Dho. Are you still working at Kawambwa District DHO? Please respond with YES or No
Hello Peter Hubs. Are you still working at Salanga Clinic and still using Results160? Please respond with YES or No
"""
for msg in msgs:
self.assertTrue(msg.connection.identity in expected_recipients, "%s not in expected recipients" %msg.connection.identity)
self.assertTrue(msg.text in expected_msgs.split("\n"), "%s not in expected messages" %msg.text)
self.assertEqual(len(msgs), 13)
self.stopRouter()
def testSendingOnlyToDefautingUsers(self):
"""
Only defaulting clinic workers should receive user verification messages
"""
script = """
central_worker > i use the system
kashitu_cba > i use the system
luapula_pho > helo
mansa_dho > report
samfya_dho > help
kawambwa_dho > msg
peter_hubs > sent
"""
self.runScript(script)
self.assertEqual(UserVerification.objects.count(), 0, "User verification model is not empty")
time.sleep(.1)
self.startRouter()
tasks.send_verification_request(self.router)
msgs = self.receiveAllMessages()
expected_recipients = ["salanga_worker", "mibenge_worker",
"kashitu_worker", "central_worker",'mibenge_cba',
'kashitu_cba', 'central_cba1', 'central_cba2']
expected_msgs = """Hello Salanga Man. Are you still working at Salanga Clinic and still using Results160? Please respond with YES or No
Hello Mibenge Man. Are you still working at Mibenge Clinic and still using Results160? Please respond with YES or No
Hello Kashitu Man. Are you still working at Kashitu Clinic and still using Results160? Please respond with YES or No
Hello Mibenge Cba. Are you still working as a RemindMi Agent for Mibenge Clinic? Please respond with YES or No
Hello Central Cba1. Are you still working as a RemindMi Agent for Central Clinic? Please respond with YES or No
Hello Central Cba2. Are you still working as a RemindMi Agent for Central Clinic? Please respond with YES or No
"""
for msg in msgs:
self.assertTrue(msg.connection.identity in expected_recipients, "%s not in expected recipients" %msg.connection.identity)
self.assertTrue(msg.text in expected_msgs.split("\n"), "%s not in expected messages" %msg.text)
self.assertEqual(len(msgs), 6)
self.stopRouter()
# let's fake that central_worker used the system a very long time ago
msg = Message.objects.get(direction="I", contact__name="Central Man", connection__identity="central_worker")
msg.date = today - timedelta(days = 100)
msg.save()
# msg.delete()
self.assertEqual(UserVerification.objects.count(), 6, "Number of User verications not equal")
time.sleep(.1)
self.startRouter()
tasks.send_verification_request(self.router)
msgs = self.receiveAllMessages()
self.stopRouter()
self.assertEqual(len(msgs), 1)
self.assertEqual(msgs[0].connection.identity, "central_worker", "Message was sent to wrong recipient")
self.assertEqual(msgs[0].text, "Hello Central Man. Are you still working at Central Clinic and still using Results160? Please respond with YES or No", "Message not as expected")
self.assertEqual(UserVerification.objects.count(), 7, "Number of User verications not equal")
def testUserVerificationWorkflow(self):
self.assertEqual(UserVerification.objects.count(), 0, "User verification model is not empty")
time.sleep(.1)
self.startRouter()
tasks.send_verification_request(self.router)
msgs = self.receiveAllMessages()
expected_recipients = ["salanga_worker", "mibenge_worker",
"kashitu_worker", "central_worker",'mibenge_cba',
'kashitu_cba', 'central_cba1', 'central_cba2',
'luapula_pho', 'mansa_dho', 'samfya_dho',
'kawambwa_dho', 'peter_hubs']
expected_msgs = """Hello Salanga Man. Are you still working at Salanga Clinic and still using Results160? Please respond with YES or No
Hello Mibenge Man. Are you still working at Mibenge Clinic and still using Results160? Please respond with YES or No
Hello Kashitu Man. Are you still working at Kashitu Clinic and still using Results160? Please respond with YES or No
Hello Central Man. Are you still working at Central Clinic and still using Results160? Please respond with YES or No
Hello Mibenge Cba. Are you still working as a RemindMi Agent for Mibenge Clinic? Please respond with YES or No
Hello Kashitu Cba. Are you still working as a RemindMi Agent for Kashitu Clinic? Please respond with YES or No
Hello Central Cba1. Are you still working as a RemindMi Agent for Central Clinic? Please respond with YES or No
Hello Central Cba2. Are you still working as a RemindMi Agent for Central Clinic? Please respond with YES or No
Hello Luapula Pho. Are you still working at Luapula Province PHO? Please respond with YES or No
Hello Mansa Dho. Are you still working at Mansa District DHO? Please respond with YES or No
Hello Samfya Dho. Are you still working at Samfya District DHO? Please respond with YES or No
Hello Kawambwa Dho. Are you still working at Kawambwa District DHO? Please respond with YES or No
Hello Peter Hubs. Are you still working at Salanga Clinic and still using Results160? Please respond with YES or No
"""
for msg in msgs:
self.assertTrue(msg.connection.identity in expected_recipients, "%s not in expected recipients" %msg.connection.identity)
self.assertTrue(msg.text in expected_msgs.split("\n"), "%s not in expected messages" %msg.text)
self.assertEqual(len(msgs), 13)
self.stopRouter()
script = """
salanga_worker > yes
mibenge_worker > no
kashitu_worker > i do
central_worker > some message
luapula_pho > hello
mansa_dho > i do
samfya_dho > yes
kawambwa_dho > yes
peter_hubs > no
"""
self.runScript(script)
msgs = self.receiveAllMessages()
self.assertTrue(0==len(msgs), "%s" % ", ".join(msg.text for msg in msgs))
self.assertEqual(UserVerification.objects.count(), 13, "User verications not equal to 8")
self.assertEqual(UserVerification.objects.filter(response="yes").count(), 3, "User verications not equal")
self.assertEqual(UserVerification.objects.filter(response="no").count(), 2, "User verications not equal")
self.assertEqual(UserVerification.objects.filter(response="i do").count(), 2, "User verications not equal")
self.assertEqual(UserVerification.objects.filter(response="some message").count(), 1, "User verications not equal to 1")
self.assertEqual(UserVerification.objects.filter(response="hello").count(), 1, "User verications not equal to 1")
def testFinalUserVerificationWorkflow(self):
self.assertEqual(UserVerification.objects.count(), 0, "User verification model is not empty")
time.sleep(.1)
self.startRouter()
tasks.send_verification_request(self.router)
msgs = self.receiveAllMessages()
expected_recipients = ["salanga_worker", "mibenge_worker",
"kashitu_worker", "central_worker",'mibenge_cba',
'kashitu_cba', 'central_cba1', 'central_cba2',
'luapula_pho', 'mansa_dho', 'samfya_dho',
'kawambwa_dho', 'peter_hubs']
expected_msgs = """Hello Salanga Man. Are you still working at Salanga Clinic and still using Results160? Please respond with YES or No
Hello Mibenge Man. Are you still working at Mibenge Clinic and still using Results160? Please respond with YES or No
Hello Kashitu Man. Are you still working at Kashitu Clinic and still using Results160? Please respond with YES or No
Hello Central Man. Are you still working at Central Clinic and still using Results160? Please respond with YES or No
Hello Mibenge Cba. Are you still working as a RemindMi Agent for Mibenge Clinic? Please respond with YES or No
Hello Kashitu Cba. Are you still working as a RemindMi Agent for Kashitu Clinic? Please respond with YES or No
Hello Central Cba1. Are you still working as a RemindMi Agent for Central Clinic? Please respond with YES or No
Hello Central Cba2. Are you still working as a RemindMi Agent for Central Clinic? Please respond with YES or No
Hello Luapula Pho. Are you still working at Luapula Province PHO? Please respond with YES or No
Hello Mansa Dho. Are you still working at Mansa District DHO? Please respond with YES or No
Hello Samfya Dho. Are you still working at Samfya District DHO? Please respond with YES or No
Hello Kawambwa Dho. Are you still working at Kawambwa District DHO? Please respond with YES or No
Hello Peter Hubs. Are you still working at Salanga Clinic and still using Results160? Please respond with YES or No
"""
for msg in msgs:
self.assertTrue(msg.connection.identity in expected_recipients, "%s not in expected recipients" %msg.connection.identity)
self.assertTrue(msg.text in expected_msgs.split("\n"), "%s not in expected messages" %msg.text)
self.assertEqual(len(msgs), 13)
self.stopRouter()
script = """
salanga_worker > yes
mibenge_worker > no
mibenge_cba > no
"""
self.runScript(script)
self.assertTrue(0==len(self.receiveAllMessages()))
self.assertEqual(UserVerification.objects.count(), 13, "User verications not equal")
self.assertEqual(UserVerification.objects.filter(response="yes").count(), 1, "User verications not equal to 1")
self.assertEqual(UserVerification.objects.filter(response="no").count(), 2, "User verications not equal to 2")
time.sleep(.1)
self.startRouter()
tasks.send_final_verification_request(self.router)
msgs = self.receiveAllMessages()
self.stopRouter()
self.assertEqual(UserVerification.objects.count(), 23)
for msg in msgs:
self.assertTrue(msg.connection.identity in expected_recipients, "%s not in expected recipients" %msg.connection.identity)
self.assertTrue(msg.text in expected_msgs.split("\n"), "%s not in expected messages" %msg.text)
self.assertEqual(len(msgs), 10)
time.sleep(.1)
self.startRouter()
self.assertEqual(Contact.objects.filter(is_active=False).count(), 2)
tasks.inactivate_lost_users(self.router)
self.assertEqual(Contact.objects.filter(is_active=False).count(), 12)
| [
"sinkalation@gmail.com"
] | sinkalation@gmail.com |
d39710f1547ec275d3551e81e41ed29e3e73529b | 6b374510fbc28c817941875e0d3c1ec7aab09450 | /models/shufflenet_v2.py | 4150ad147cbaac70ef0e7767be353538ce0f39a9 | [
"MIT"
] | permissive | yangtong1989/Light_CNN | 891f68118a69306036c98969e9fdb451cc24e1fe | 1465eceb47033661ee6e5c2260f71732390bac26 | refs/heads/master | 2021-05-24T11:11:09.735708 | 2018-12-19T02:10:21 | 2018-12-19T02:10:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,577 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from collections import OrderedDict
from torch.nn import init
import math
import time
def conv_bn(inp, oup, stride, padding):
return nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, padding, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU(inplace=True)
)
def conv_1x1_bn(inp, oup):
return nn.Sequential(
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU(inplace=True)
)
def channel_shuffle(x, groups):
batchsize, num_channels, height, width = x.data.size()
channels_per_group = num_channels // groups
# reshape
x = x.view(batchsize, groups,
channels_per_group, height, width)
x = torch.transpose(x, 1, 2).contiguous()
# flatten
x = x.view(batchsize, -1, height, width)
return x
class InvertedResidual(nn.Module):
def __init__(self, inp, oup, stride, benchmodel):
super(InvertedResidual, self).__init__()
self.benchmodel = benchmodel
self.stride = stride
assert stride in [1, 2]
oup_inc = oup//2
if self.benchmodel == 1:
# Figure 3(c)
#assert inp == oup_inc
self.banch2 = nn.Sequential(
# pw
nn.Conv2d(oup_inc, oup_inc, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup_inc),
nn.ReLU(inplace=True),
# dw
nn.Conv2d(oup_inc, oup_inc, 3, stride, 1, groups=oup_inc, bias=False),
nn.BatchNorm2d(oup_inc),
# pw
nn.Conv2d(oup_inc, oup_inc, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup_inc),
nn.ReLU(inplace=True),
)
else:
# Figure 3(d)
self.banch1 = nn.Sequential(
# dw
nn.Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False),
nn.BatchNorm2d(inp),
# pw
nn.Conv2d(inp, oup_inc, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup_inc),
nn.ReLU(inplace=True),
)
self.banch2 = nn.Sequential(
# pw
nn.Conv2d(inp, oup_inc, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup_inc),
nn.ReLU(inplace=True),
# dw
nn.Conv2d(oup_inc, oup_inc, 3, stride, 1, groups=oup_inc, bias=False),
nn.BatchNorm2d(oup_inc),
# pw-linear
nn.Conv2d(oup_inc, oup_inc, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup_inc),
nn.ReLU(inplace=True),
)
@staticmethod
def _concat(x, out):
# concatenate along channel axis
return torch.cat((x, out), 1)
def forward(self, x):
if 1==self.benchmodel:
# Figure 3(c)
x1 = x[:, :(x.shape[1]//2), :, :] # first branch
x2 = x[:, (x.shape[1]//2):, :, :] # second branch
out = self._concat(x1, self.banch2(x2)) # concat
elif 2==self.benchmodel:
# Figure 3(d)
out = self._concat(self.banch1(x), self.banch2(x))
return channel_shuffle(out, 2)
class ShuffleNetV2(nn.Module):
def __init__(self, n_class=1000, input_size=224, width_mult=1.):
super(ShuffleNetV2, self).__init__()
assert input_size % 32 == 0
self.stage_repeats = [4, 8, 4]
# index 0 is invalid and should never be called.
# only used for indexing convenience.
if width_mult == 0.5:
self.stage_out_channels = [-1, 24, 48, 96, 192, 1024]
elif width_mult == 1.0:
self.stage_out_channels = [-1, 24, 116, 232, 464, 1024]
elif width_mult == 1.5:
self.stage_out_channels = [-1, 24, 176, 352, 704, 1024]
elif width_mult == 2.0:
self.stage_out_channels = [-1, 24, 224, 488, 976, 2048]
else:
raise ValueError(
"""{} groups is not supported for
1x1 Grouped Convolutions""".format(num_groups))
# building first layer
input_channel = self.stage_out_channels[1]
self.conv1 = conv_bn(3, input_channel, stride=2, padding=1)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.features = []
# building inverted residual blocks
for idxstage in range(len(self.stage_repeats)):
numrepeat = self.stage_repeats[idxstage]
output_channel = self.stage_out_channels[idxstage+2]
for i in range(numrepeat):
if i == 0:
#inp, oup, stride, benchmodel):
self.features.append(InvertedResidual(input_channel, output_channel, 2, 2))
else:
self.features.append(InvertedResidual(input_channel, output_channel, 1, 1))
input_channel = output_channel
# make it nn.Sequential
self.features = nn.Sequential(*self.features)
# building last several layers
self.conv_last = conv_1x1_bn(input_channel, self.stage_out_channels[-1])
self.globalpool = nn.Sequential(nn.AvgPool2d(int(input_size/32)))
# building classifier
self.classifier = nn.Sequential(nn.Linear(self.stage_out_channels[-1], n_class))
self.init_params()
def init_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal(m.weight, mode='fan_out')
if m.bias is not None:
init.constant(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant(m.weight, 1)
init.constant(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal(m.weight, std=0.001)
if m.bias is not None:
init.constant(m.bias, 0)
def forward(self, x):
x = self.conv1(x)
x = self.maxpool(x)
x = self.features(x)
x = self.conv_last(x)
x = self.globalpool(x)
x = x.view(-1, self.stage_out_channels[-1])
x = self.classifier(x)
return x
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
def speed(model, name, inputX, inputY):
t0 = time.time()
input = torch.rand(1,3,inputX,inputY).cuda()
input = Variable(input, volatile = True)
t1 = time.time()
model(input)
t2 = time.time()
print('=> {} cost: {}'.format(name, t2 - t1))
def shufflenet_v2_0_5x_224(num_classes=1000):
model = ShuffleNetV2(n_class=num_classes, input_size=224, width_mult=0.5)
return model
def shufflenet_v2_1x_224(num_classes=1000):
model = ShuffleNetV2(n_class=num_classes, input_size=224, width_mult=1.)
return model
def shufflenet_v2_1_5x_224(num_classes=1000):
model = ShuffleNetV2(n_class=num_classes, input_size=224, width_mult=1.5)
return model
def shufflenet_v2_2x_224(num_classes=1000):
model = ShuffleNetV2(n_class=num_classes, input_size=224, width_mult=2.)
return model
if __name__ == '__main__':
"""Testing
"""
# model = ShuffleNetV2(n_class=1000, input_size=224, width_mult=0.5).cuda()
# # print("=> ShuffleNetV2 0.5x 224:\n {}".format(model))
# speed(model, 'ShuffleNetV2 0.5x 224', 224, 224) # for 224x224
# print("=> ShuffleNetV2 0.5x 224 param : {}".format(count_parameters(model)))
model = ShuffleNetV2(n_class=1000, input_size=224, width_mult=1.).cuda()
print("=> ShuffleNetV2 1x 224:\n {}".format(model))
speed(model, 'ShuffleNetV2 1x 224', 224, 224) # for 224x224
print("=> ShuffleNetV2 1x 224 param : {}".format(count_parameters(model)))
# model = ShuffleNetV2(n_class=1000, input_size=224, width_mult=1.5).cuda()
# # print("=> ShuffleNetV2 1.5x 224:\n {}".format(model))
# speed(model, 'ShuffleNetV2 1.5x 224', 224, 224) # for 224x224
# print("=> ShuffleNetV2 1.5x 224 param : {}".format(count_parameters(model)))
# model = ShuffleNetV2(n_class=1000, input_size=224, width_mult=2.).cuda()
# # print("=> ShuffleNetV2 2x 224:\n {}".format(model))
# speed(model, 'ShuffleNetV2 2x 224', 224, 224) # for 224x224
# print("=> ShuffleNetV2 2x 224 param : {}".format(count_parameters(model)))
| [
"wellzengwei@163.com"
] | wellzengwei@163.com |
c5996923e5133fa8560cb230918c501edf16e042 | ef29c84cdc26f01fd4b578e39c0f3ed4bd8d0d34 | /ozpcenter/migrations/0024_auto_20170829_1944.py | c1349aa6e8dc7416e1bee2950abbcf70a0f1a583 | [
"Apache-2.0"
] | permissive | ozoneplatform/ozp-backend | 831863bb1c6ddf8dfaa33283edd9c4ef439e225a | d31d00bb8a28a8d0c999813f616b398f41516244 | refs/heads/master | 2020-12-14T08:50:11.801877 | 2019-01-17T13:08:06 | 2019-01-17T13:08:06 | 67,625,894 | 1 | 7 | Apache-2.0 | 2019-01-17T13:08:07 | 2016-09-07T16:45:45 | Python | UTF-8 | Python | false | false | 594 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ozpcenter', '0023_auto_20170629_1323'),
]
operations = [
migrations.AddField(
model_name='review',
name='review_parent',
field=models.ForeignKey(blank=True, null=True, to='ozpcenter.Review'),
),
migrations.AlterUniqueTogether(
name='review',
unique_together=set([('review_parent', 'author', 'listing')]),
),
]
| [
"mannyrivera2010@gmail.com"
] | mannyrivera2010@gmail.com |
6c8a442acb14c856d7f761064e44561c82b10f6c | 212028581b4875ac2fefa9acd7b17b88b4b8eccd | /ulugugu/values.py | e5fbdd042dbd0dbd5cebbe5d437a2fa3c34403d1 | [] | no_license | jonashaag/ulugugu | 65a3b55c2fa2d624ba7cc72cc5186eb353e7b016 | 509e3ceadbb50aad34c585b63d33284357a21ed6 | refs/heads/master | 2016-08-06T07:10:53.578924 | 2015-07-25T12:54:15 | 2015-07-25T12:54:15 | 37,680,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 533 | py | class Value:
pass
class Integer(Value):
def __init__(self, value):
self.value = value
def __repr__(self):
return '<Integer %d>' % self.value
class String(Value):
def __init__(self, value):
self.value = value
def __repr__(self):
return '<String %r>' % self.value
class Application(Value):
def __init__(self, operation, op1, op2):
self.operation = operation
self.op1 = op1
self.op2 = op2
def __repr__(self):
return '<Application %s(%s, %s)>' % (self.operation, self.op1, self.op2)
| [
"jonas@lophus.org"
] | jonas@lophus.org |
924c4786c418c5f6ba94014768067df9fd08892e | 62009d7843d120beddd696258686c8b479713384 | /tribune/urls.py | 960254574acbddbcb52308d302a510e032a17349 | [] | no_license | Janice-M/-The-Moringa-Tribune | a81d5e94b325254c78a6c0eea3c48e031935bdba | c960fa8dac1444f6fd4ea1a823f63433ae1645e4 | refs/heads/master | 2021-06-16T20:06:18.140544 | 2019-08-20T09:54:41 | 2019-08-20T09:54:41 | 203,201,486 | 1 | 1 | null | 2021-06-10T21:53:04 | 2019-08-19T15:38:57 | Python | UTF-8 | Python | false | false | 764 | py | """tribune URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| [
"janiceink001@gmail.com"
] | janiceink001@gmail.com |
42fdb67ff09320fd1ae7ed48b1d7252395a78674 | 93db4b48741ff4ab0a3895813a6c7543e01821ea | /leetcode/Python/875_bananas.py | ce09bb5605a8310b0d9dfe00aaffbe27d27ed1c7 | [] | no_license | shubham14/Coding_Contest_solutions | f884c458d3316bdafc6f1b1a52cf3e962c58bc47 | 1b67497f35b892c25e3d9600214fa37a738ffd40 | refs/heads/master | 2021-06-22T13:34:10.581101 | 2019-10-09T02:56:01 | 2019-10-09T02:56:01 | 131,326,516 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Jul 24 00:09:31 2018
@author: Shubham
"""
class Solution(object):
def minEatingSpeed(self, piles, H):
def possible(K):
return sum((p - 1)/ K + 1 for p in piles) <= H
s, e = 1, max(piles)
while s < e:
m = (s + e)/2
if not possible(m):
s = m + 1
else:
e = m
return s
| [
"shubham.ddash@gmail.com"
] | shubham.ddash@gmail.com |
0b6ba993f844ba2f1c864a92e3e56c18727c11d0 | cf7f2f9a89ec0b48a2eedbe2b09ee0eb52944d23 | /july15_numpy/july15_2.py | 0a717f1f756543be4b55ca7db6322a0e1f554e89 | [] | no_license | DongXiaLei/study_day | cc63ea4123cc60c0f50f8d85f47ffdf757bb8abe | 376762b168c4c06c3be1db15016937c2db02f18d | refs/heads/master | 2020-03-23T02:54:20.268671 | 2018-07-15T08:26:59 | 2018-07-15T08:26:59 | 140,998,528 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | from datetime import datetime
import numpy
import sys
def numpysum(n):
a = numpy.arange(n)**2
b = numpy.arange(n)**3
c = a+b
return c
def pythonsum(n):
a = list(range(n))
b = list(range(n))
c=[]
for i in range(len(a)):
a[i] = i**2
b[i] = i**3
c.append(a[i]+b[i])
return c
size = int(sys.argv[1])
start = datetime.now()
c = pythonsum(size)
count = datetime.now()-start
print(count)
start = datetime.now()
c = numpysum(size)
count = datetime.now()-start
print(count)
| [
"1594003819@qq.com"
] | 1594003819@qq.com |
24bb895264a229cc4ada928fa9f9dc331e5c44ff | b6df966960d3f4f2d9aadf6a6984e7c9d9ded540 | /utils/data_loader.py | b68715c3b662c9eaad9872cd14fee4120a39029b | [] | no_license | xhuangcv/lf-reconstruction | f8781465830829862d681c29bf11dd7a576395ec | 2d31e2c599be459d0d488b8aab89b3876cd35832 | refs/heads/main | 2023-06-03T00:33:44.739864 | 2021-06-22T07:16:17 | 2021-06-22T07:16:17 | 379,169,861 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,289 | py | import os
import configparser
import pickle
import tensorflow as tf
import numpy as np
def get_batch(path, sence, h, w, bs, data_type):
with tf.name_scope('data_batch'):
def load_single_image(filename):
contents = tf.read_file(filename)
images = tf.image.convert_image_dtype(tf.image.decode_png(contents), tf.float32)
# images = images[:h,:w,:]
images_resize = tf.image.resize_images(images, [h, w])
images_resize.set_shape([h,w,3])
return images_resize
if data_type == 'HCI':
imgs_glob = 'input*.png'
elif data_type == 'Mic':
imgs_glob = '*.png'
else:
imgs_glob = 'lf*.png'
filepath = tf.data.Dataset.list_files(path + '/' + sence + '/' + imgs_glob, shuffle=False)
dataset = filepath.map(load_single_image,num_parallel_calls=bs)
dataset = dataset.batch(bs).repeat()
iterator = dataset.make_initializable_iterator()
return iterator
# def get_batch(root_path, imgs_glob, h, w, batch_size):
# with tf.name_scope('data_batch'):
# assert tf.gfile.Glob(root_path)
# def load_image_data(scene_name):
# imgs_path = tf.data.Dataset.list_files(scene_name + '/' + imgs_glob, shuffle=False)
# images = tf.contrib.data.get_single_element(imgs_path.map(load_single_image).batch(81))
# config = configparser.ConfigParser()
# config.read( scene_name + '/' + 'parameters.cfg')
# disps= tf.constant([[config.get('meta','disp_min'), config.get('meta','disp_max')]], tf.float32)
# return images, disps
# def load_single_image(filename):
# contents = tf.read_file(filename)
# images = tf.image.convert_image_dtype(tf.image.decode_png(contents), tf.float32)
# images_resize = tf.image.resize_images(images, [h, w])
# images_resize.set_shape([h,w,3])
# return images_resize
# scenes_path = tf.data.Dataset.list_files(root_path + '/*', shuffle=True)
# lf_sences = scenes_path.map(load_image_data)
# lf_sences = lf_sences.batch(batch_size).repeat()
# iterator = lf_sences.make_initializable_iterator()
# return iterator | [
"noreply@github.com"
] | xhuangcv.noreply@github.com |
51b35645ddfec9c5ce1aa5da3d87cd26d207073e | 484804c0ae919cabe5454c6e44d99cf4934027c0 | /lerPaginaWebMostrarLinhas.py | 3f49c874a1d3182bb58d47806cc1d29b876b1d6b | [] | no_license | rlaecio/segur | c00ccbc05e1ebcbf9bd919aa252d1ad7066d4b31 | af7addf6e45f15f75406cd2cafcd11c77607ce56 | refs/heads/master | 2022-10-27T22:40:33.686293 | 2020-06-16T13:12:56 | 2020-06-16T13:12:56 | 272,712,318 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 619 | py | # coding: latin-1
# versao python 3
import urllib.request, urllib.error, urllib.parse, base64
request = urllib.request.Request('http://www.ismai.pt')
result = urllib.request.urlopen(request)
lista = result.read()
listaLinhasDecod = lista.decode()
listaDelinhas = listaLinhasDecod.split('\n')
frase1 = ('ISMAI').lower()
frase2 = ('Instituto').lower()
for indiceLinha in range(len(listaDelinhas)):
umaLinha = (listaDelinhas[indiceLinha])
umalinhaFormatada = umaLinha.lower()
if (umalinhaFormatada.find(frase1) != -1) and (umalinhaFormatada.find(frase2) != -1):
print('{}' .format(umaLinha))
| [
"rlaecio@hotmail.com"
] | rlaecio@hotmail.com |
9fd29c1d0357ec0049c8fb9e6e66c6d6e576c565 | cf8dcb53848e873901d44cf49c3ce0d363dcab0f | /B-555 Machine Learning/SahilTyagiProject/webservice/main.py | 3f479565fee1bc22a0cc844ed1f239952940afe8 | [] | no_license | sahiltyagi4/miscellaneous | d8d24b4be8cf0e2da9a9a8d80414c78d740dabd8 | ab962de1f9980fdfcc8dd8aad8356617cdc0b1a5 | refs/heads/master | 2022-11-07T10:37:48.981921 | 2021-05-23T07:54:47 | 2021-05-23T07:54:47 | 130,197,092 | 0 | 1 | null | 2022-11-04T22:46:26 | 2018-04-19T10:08:43 | Jupyter Notebook | UTF-8 | Python | false | false | 1,088 | py | def hello_world(request):
"""Responds to any HTTP request.
Args:
request (flask.Request): HTTP request object.
Returns:
The response text or any set of values that can be turned into a
Response object using
`make_response <http://flask.pocoo.org/docs/1.0/api/#flask.Flask.make_response>`.
"""
book = {}
book['Emma'] = 'emma,churchill,hartfield'
book['Lady Susan'] = 'courcy,mother,james'
book['Love and Freindship'] = 'father,married,affection'
book['Mansfield Park'] = 'house,thomas,norris'
book['Northanger Abbey'] = 'henry,eleanor,friend'
book['Persuasion'] = 'captain,musgrove,elizabeth'
book['Pride and Prejudice'] = 'family,bennet,darcy'
book['Sense and Sensibility'] = 'colonel,heart,dashwood'
book['The Letters of Jane Austen'] = 'london,love,woman'
book['The Watsons/ By Jane Austen and Concluded by L. Oulton'] = 'country,marry,honor'
if request.method == 'GET':
book_name = request.args.get('link')
topics = book[book_name]
return str(topics) | [
"sahilt.tyagi@gmail.com"
] | sahilt.tyagi@gmail.com |
3cce302430399686b13d6cc49040ace97eb052a2 | 632eee486e432d1bc2a7c771db7e9a06f7cad7a9 | /2812-laercio.py | 3d976d923a342bfca9597d558079aab8c6f0269b | [] | no_license | L4res/URI-Python | d1c578d87201151540876a6b8eca2aecd833a953 | 2f59387ca38e16f6396a6ea677d71f7c2c919fc2 | refs/heads/master | 2023-03-25T17:34:57.635187 | 2020-06-15T18:20:03 | 2020-06-15T18:20:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 864 | py | for i in range(int(input())):
n = int(input())
lista = list(map(int, input().split()))
lista.sort()
impares = []
resultado = []
# Pegando os valores impares:
for j in range(n):
if lista[j] % 2 != 0:
impares.append(lista[j])
# Pegando os valores na ordem maior e menor, sucessivamente:
while len(impares) != 0:
try:
resultado.append(impares[len(impares)-1])
impares.pop()
resultado.append(impares[0])
impares.pop(0)
except IndexError:
break
# Printando o resultado:
if len(resultado) == 0:
print()
else:
for k in range(len(resultado)):
if k != len(resultado)-1:
print(resultado[k], end=" ")
else:
print(resultado[k]) | [
"noreply@github.com"
] | L4res.noreply@github.com |
cca18cb825f175d67fcd6a0e72a4dfe55e9e107f | 2f174ad60697f3447fdfba1361ab98776e740f0f | /Chapter_5/colchester_practice.py | b9d7012d28a26acff9cb8db503cc2f62603038c1 | [] | no_license | cwb4/Impractical_Python_Projects | 3979e9633f3a035be02044afe8b25d87a43b8a86 | 7f0defc9d2f264aa63d85c2b9d3466d21adcee72 | refs/heads/master | 2022-04-24T12:58:04.727716 | 2020-04-28T20:20:53 | 2020-04-28T20:44:42 | 258,514,430 | 1 | 0 | null | 2020-04-24T13:04:53 | 2020-04-24T13:04:53 | null | UTF-8 | Python | false | false | 1,270 | py | """Solve a null cipher based on every nth-letter in every nth-word."""
import sys
def load_text(file):
"""Load a text file as a string."""
with open(file) as f:
return f.read().strip()
# load & process message:
filename = input("\nEnter full filename for message to translate: ")
try:
loaded_message = load_text(filename)
except IOError as e:
print("{}. Terminating program.".format(e), file=sys.stderr)
sys.exit(1)
# check loaded message & # of lines
print("\nORIGINAL MESSAGE = {}\n".format(loaded_message))
# convert message to list and get length
message = loaded_message.split()
end = len(message)
# get user input on interval to check
increment = int(input("Input max word & letter position to \
check (e.g., every 1 of 1, 2 of 2, etc.): "))
print()
# find letters at designated intervals
for i in range(1, increment + 1):
print("\nUsing increment letter {} of word {}".format(i, i))
print()
count = i - 1
location = i - 1
for index, word in enumerate(message):
if index == count:
if location < len(word):
print("letter = {}".format(word[location]))
count += i
else:
print("Interval doesn't work", file=sys.stderr)
| [
"31315095+rlvaugh@users.noreply.github.com"
] | 31315095+rlvaugh@users.noreply.github.com |
562caff292e6ed33755469e42b6fedb48fb84855 | fcff6d0d8cdcaecb7e6c833ccd7c5e888b1533c6 | /ExoPython/conca_t_ner.py | e4a0daec6e96d06712a95941dcb07fd7838d9d85 | [] | no_license | Qais17/PYTEST | a6fb5e28a884b922c9882ecc0b7e3d97e693a593 | ae356519343d9009870df52b46bbba3246e793e6 | refs/heads/master | 2020-03-30T10:56:34.206333 | 2018-11-18T20:01:18 | 2018-11-18T20:01:18 | 151,145,019 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 207 | py | #!/usr/bin/python3.5
# -*-coding:Utf-8 -*
prenom = "Morgan"
Hello = "Good morning"
message = "today is monday March 15, 2032"
chaine_complete = Hello + " " + prenom + " " + message
print(chaine_complete)
| [
"qaisbewebpro@gmail.com"
] | qaisbewebpro@gmail.com |
bccecc6995a1fcb323de9ae3254a02a54008d43a | e1a2c6ed4a4b93b4697974e3b0a32a4d67daa6f6 | /venv/Lib/site-packages/pybrain3/rl/environments/ode/tasks/ccrl.py | 74047621ad42288a4b7988d703bcde82599a35e9 | [
"MIT"
] | permissive | ishatserka/MachineLearningAndDataAnalysisCoursera | cdf0f23a58617e17d6b938e3a9df17daae8585e4 | e82e772df2f4aec162cb34ac6127df10d14a625a | refs/heads/master | 2021-09-11T01:39:26.228392 | 2018-04-05T14:33:39 | 2018-04-05T14:33:39 | 117,153,454 | 0 | 0 | MIT | 2018-03-27T05:20:37 | 2018-01-11T21:05:33 | Python | UTF-8 | Python | false | false | 15,329 | py | __author__ = 'Frank Sehnke, sehnke@in.tum.de'
from pybrain3.rl.environments import EpisodicTask
from pybrain3.rl.environments.ode.sensors import SpecificBodyPositionSensor
from scipy import tanh, zeros, array, random, sqrt, asarray
#Basic class for all ccrl tasks
class CCRLTask(EpisodicTask):
def __init__(self, env):
EpisodicTask.__init__(self, env)
#Overall maximal tourque - is multiplied with relative max tourque for individual joint.
self.maxPower = 100.0
self.reward_history = []
self.count = 0 #timestep counter
self.epiLen = 1500 #suggestet episodic length for normal Johnnie tasks
self.incLearn = 0 #counts the task resets for incrementall learning
self.env.FricMu = 20.0 #We need higher friction for CCRL
self.env.dt = 0.002 #We also need more timly resolution
# normalize standard sensors to (-1, 1)
self.sensor_limits = []
#Angle sensors
for i in range(self.env.actLen):
self.sensor_limits.append((self.env.cLowList[i], self.env.cHighList[i]))
# Joint velocity sensors
for i in range(self.env.actLen):
self.sensor_limits.append((-20, 20))
#Norm all actor dimensions to (-1, 1)
self.actor_limits = [(-1, 1)] * env.actLen
self.oldAction = zeros(env.actLen, float)
self.dist = zeros(9, float)
self.dif = array([0.0, 0.0, 0.0])
self.target = array([-6.5, 1.75, -10.5])
self.grepRew = 0.0
self.tableFlag = 0.0
self.env.addSensor(SpecificBodyPositionSensor(['objectP00'], "glasPos"))
self.env.addSensor(SpecificBodyPositionSensor(['palmLeft'], "palmPos"))
self.env.addSensor(SpecificBodyPositionSensor(['fingerLeft1'], "finger1Pos"))
self.env.addSensor(SpecificBodyPositionSensor(['fingerLeft2'], "finger2Pos"))
#we changed sensors so we need to update environments sensorLength variable
self.env.obsLen = len(self.env.getSensors())
#normalization for the task spezific sensors
for i in range(self.env.obsLen - 2 * self.env.actLen):
self.sensor_limits.append((-4, 4))
def getObservation(self):
""" a filtered mapping to getSample of the underlying environment. """
sensors = self.env.getSensors()
#Sensor hand to target object
for i in range(3):
self.dist[i] = ((sensors[self.env.obsLen - 9 + i] + sensors[self.env.obsLen - 6 + i] + sensors[self.env.obsLen - 3 + i]) / 3.0 - (sensors[self.env.obsLen - 12 + i] + self.dif[i])) * 4.0 #sensors[self.env.obsLen-12+i]
#Sensor hand angle to horizontal plane X-Axis
for i in range(3):
self.dist[i + 3] = (sensors[self.env.obsLen - 3 + i] - sensors[self.env.obsLen - 6 + i]) * 5.0
#Sensor hand angle to horizontal plane Y-Axis
for i in range(3):
self.dist[i + 6] = ((sensors[self.env.obsLen - 3 + i] + sensors[self.env.obsLen - 6 + i]) / 2.0 - sensors[self.env.obsLen - 9 + i]) * 10.0
if self.sensor_limits:
sensors = self.normalize(sensors)
sens = []
for i in range(self.env.obsLen - 12):
sens.append(sensors[i])
for i in range(9):
sens.append(self.dist[i])
for i in self.oldAction:
sens.append(i)
return sens
def performAction(self, action):
#Filtered mapping towards performAction of the underlying environment
#The standard CCRL task uses a PID controller to controll directly angles instead of forces
#This makes most tasks much simpler to learn
self.oldAction = action
#Grasping as reflex depending on the distance to target - comment in for more easy grasping
#if abs(self.dist[2])<2.0: action[15]=(1.0+2.0*action[15])*.3333 #self.grepRew=action[15]*.01
#else: action[15]=(-1.0+2.0*action[15])*.3333 #self.grepRew=action[15]*-.03
isJoints=array(self.env.getSensorByName('JointSensor')) #The joint angles
isSpeeds=array(self.env.getSensorByName('JointVelocitySensor')) #The joint angular velocitys
act=(action+1.0)/2.0*(self.env.cHighList-self.env.cLowList)+self.env.cLowList #norm output to action intervall
action=tanh((act-isJoints-0.9*isSpeeds*self.env.tourqueList)*16.0)*self.maxPower*self.env.tourqueList #simple PID
EpisodicTask.performAction(self, action)
#self.env.performAction(action)
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
self.count += 1
return False
def res(self):
#sets counter and history back, increases incremental counter
self.count = 0
self.incLearn += 1
self.reward_history.append(self.getTotalReward())
self.tableFlag = 0.0
def getReward(self):
#rewarded for approaching the object
dis = sqrt((self.dist[0:3] ** 2).sum())
return (25.0 - dis) / float(self.epiLen) - float(self.env.tableSum) * 0.1
#Learn to grasp a glas at a fixed location
class CCRLGlasTask(CCRLTask):
def __init__(self, env):
CCRLTask.__init__(self, env)
self.dif = array([0.0, 0.0, 0.0])
self.epiLen = 1000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1: self.pertGlasPos(0)
self.count += 1
return False
def pertGlasPos(self, num):
if num == 0: self.env.pert = asarray([0.0, 0.0, 0.5])
def getReward(self):
if self.env.glasSum >= 2: grip = 1.0 + float(self.env.glasSum - 2)
else: grip = 0.0
if self.env.tableSum > 0: self.tableFlag = 10.0
self.dist[3] = 0.0
self.dist[8] = 0.0
dis = sqrt((self.dist ** 2).sum())
nig = (abs(self.dist[4]) + 1.0)
if self.env.stepCounter == self.epiLen:
return 25.0 + grip / nig - dis - self.tableFlag #-dis
else:
return (25.0 - dis) / float(self.epiLen) + (grip / nig - float(self.env.tableSum)) * 0.1 #+self.grepRew (10.0-dis)/float(self.epiLen)+
#Learn to grasp a plate at a fixed location
class CCRLPlateTask(CCRLTask):
def __init__(self, env):
CCRLTask.__init__(self, env)
self.dif = array([0.0, 0.2, 0.8])
self.epiLen = 1000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1: self.pertGlasPos(0)
self.count += 1
return False
def pertGlasPos(self, num):
if num == 0: self.env.pert = asarray([0.0, 0.0, 0.5])
def getReward(self):
if self.env.glasSum >= 2: grip = 1.0
else: grip = 0.0
if self.env.tableSum > 0: self.tableFlag = 10.0
#self.dist[4]=0.0
#self.dist[8]=0.0
dis = sqrt((self.dist[0:3] ** 2).sum())
if self.count == self.epiLen:
return 25.0 + grip - dis - self.tableFlag #/nig
else:
return (25.0 - dis) / float(self.epiLen) + (grip - float(self.env.tableSum)) * 0.1 #/nig -(1.0+self.oldAction[15])
#Learn to grasp a glas at 5 different locations
class CCRLGlasVarTask(CCRLGlasTask):
def __init__(self, env):
CCRLGlasTask.__init__(self, env)
self.epiLen = 5000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1:
self.pertGlasPos(0)
if self.count == self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(1)
if self.count == 2 * self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(2)
if self.count == 3 * self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(3)
if self.count == 4 * self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(4)
self.count += 1
return False
def pertGlasPos(self, num):
if num == 0: self.env.pert = asarray([1.0, 0.0, 0.5])
if num == 1: self.env.pert = asarray([-1.0, 0.0, 0.5])
if num == 2: self.env.pert = asarray([1.0, 0.0, 0.0])
if num == 3: self.env.pert = asarray([-1.0, 0.0, 0.0])
if num == 4: self.env.pert = asarray([0.0, 0.0, 0.25])
def getReward(self):
if self.env.glasSum >= 2: grip = 1.0
else: grip = 0.0
if self.env.tableSum > 0: self.tableFlag = 10.0
self.dist[3] = 0.0
self.dist[8] = 0.0
dis = sqrt((self.dist ** 2).sum())
nig = (abs(self.dist[4]) + 1.0)
if self.count == self.epiLen or self.count == self.epiLen / 5 or self.count == 2 * self.epiLen / 5 or self.count == 3 * self.epiLen / 5 or self.count == 4 * self.epiLen / 5:
return 25.0 + grip / nig - dis - self.tableFlag #/nig
else:
return (25.0 - dis) / float(self.epiLen) + (grip / nig - float(self.env.tableSum)) * 0.1 #/nig
#Learn to grasp a glas at random locations
class CCRLGlasVarRandTask(CCRLGlasVarTask):
def pertGlasPos(self, num):
self.env.pert = asarray([random.random()*2.0 - 1.0, 0.0, random.random()*0.5 + 0.5])
#Some experimental stuff
class CCRLPointTask(CCRLGlasVarTask):
def __init__(self, env):
CCRLGlasVarTask.__init__(self, env)
self.epiLen = 1000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1:
self.pertGlasPos(0)
self.count += 1
return False
def getObservation(self):
""" a filtered mapping to getSample of the underlying environment. """
sensors = self.env.getSensors()
sensSort = []
#Angle and angleVelocity
for i in range(32):
sensSort.append(sensors[i])
#Angles wanted (old action)
for i in self.oldAction:
sensSort.append(i)
#Hand position
for i in range(3):
sensSort.append((sensors[38 + i] + sensors[41 + i]) / 2)
#Hand orientation (Hack - make correkt!!!!)
sensSort.append((sensors[38] - sensors[41]) / 2 - sensors[35]) #pitch
sensSort.append((sensors[38 + 1] - sensors[41 + 1]) / 2 - sensors[35 + 1]) #yaw
sensSort.append((sensors[38 + 1] - sensors[41 + 1])) #roll
#Target position
for i in range(3):
sensSort.append(self.target[i])
#Target orientation
for i in range(3):
sensSort.append(0.0)
#Object type (start with random)
sensSort.append(float(random.randint(-1, 1))) #roll
#normalisation
if self.sensor_limits:
sensors = self.normalize(sensors)
sens = []
for i in range(32):
sens.append(sensors[i])
for i in range(29):
sens.append(sensSort[i + 32])
#calc dist to target
self.dist = array([(sens[54] - sens[48]), (sens[55] - sens[49]), (sens[56] - sens[50]), sens[51], sens[52], sens[53], sens[15]])
return sens
def pertGlasPos(self, num):
if num == 0: self.target = asarray([0.0, 0.0, 1.0])
self.env.pert = self.target.copy()
self.target = self.target.copy() + array([-6.5, 1.75, -10.5])
def getReward(self):
dis = sqrt((self.dist ** 2).sum())
return (25.0 - dis) / float(self.epiLen) - float(self.env.tableSum) * 0.1
class CCRLPointVarTask(CCRLPointTask):
def __init__(self, env):
CCRLPointTask.__init__(self, env)
self.epiLen = 2000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1:
self.pertGlasPos(0)
if self.count == self.epiLen / 2 + 1:
self.env.reset()
self.pertGlasPos(1)
self.count += 1
return False
def getObservation(self):
""" a filtered mapping to getSample of the underlying environment. """
sensors = self.env.getSensors()
sensSort = []
#Angle and angleVelocity
for i in range(32):
sensSort.append(sensors[i])
#Angles wanted (old action)
for i in self.oldAction:
sensSort.append(i)
#Hand position
for i in range(3):
sensSort.append((sensors[38 + i] + sensors[41 + i]) / 2)
#Hand orientation (Hack - make correkt!!!!)
sensSort.append((sensors[38] - sensors[41]) / 2 - sensors[35]) #pitch
sensSort.append((sensors[38 + 1] - sensors[41 + 1]) / 2 - sensors[35 + 1]) #yaw
sensSort.append((sensors[38 + 1] - sensors[41 + 1])) #roll
#Target position
for i in range(3):
sensSort.append(self.target[i])
#Target orientation
for i in range(3):
sensSort.append(0.0)
#Object type (start with random)
sensSort.append(float(random.randint(-1, 1))) #roll
#normalisation
if self.sensor_limits:
sensors = self.normalize(sensors)
sens = []
for i in range(32):
sens.append(sensors[i])
for i in range(29):
sens.append(sensSort[i + 32])
#calc dist to target
self.dist = array([(sens[54] - sens[48]) * 10.0, (sens[55] - sens[49]) * 10.0, (sens[56] - sens[50]) * 10.0, sens[51], sens[52], sens[53], 1.0 + sens[15]])
return sens
def pertGlasPos(self, num):
if num == 0: self.target = asarray([1.0, 0.0, 1.0])
if num == 1: self.target = asarray([-1.0, 0.0, 1.0])
if num == 2: self.target = asarray([1.0, 0.0, 0.0])
if num == 3: self.target = asarray([-1.0, 0.0, 0.0])
if num == 4: self.target = asarray([0.0, 0.0, 0.5])
self.env.pert = self.target.copy()
self.target = self.target.copy() + array([-6.5, 1.75, -10.5])
def getReward(self):
dis = sqrt((self.dist ** 2).sum())
subEpi = self.epiLen / 2
if self.count == self.epiLen or self.count == subEpi:
return (25.0 - dis) / 2.0
else:
return (25.0 - dis) / float(self.epiLen) - float(self.env.tableSum) * 0.1
| [
"shatserka@gmail.com"
] | shatserka@gmail.com |
7b020eaaeb0622fa1efc754bb3ed788aeedd1ff8 | d17706b3061c93b9f83f2e9c28e58eadcbe058cb | /problems/9.py | effd6ea462593ec4f6d513b7cffab8a37e27a993 | [] | no_license | Luctia/project-euler | b89c75a40c175216c9db00288edfcceb19e77ae3 | 56c207b37d67dcb9ae24e2afe61f4b145d69773a | refs/heads/master | 2023-08-25T12:58:49.430470 | 2021-10-30T17:03:20 | 2021-10-30T17:03:20 | 391,651,581 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 294 | py | def bruteforce(target):
for a in range(1, target + 1):
for b in range(a, target + 1 - a):
for c in range(b, target + 1 - a - b):
if a**2 + b**2 == c**2 and a + b + c == target:
return a * b * c
return -1
print(bruteforce(1000))
| [
"timmerman.luc1999@gmail.com"
] | timmerman.luc1999@gmail.com |
aa611c1670449e133c290241f9c2bbbc38d1505a | ed21823488a1cca51009793efa0b124e40d224a4 | /neurobioseg/170111_avoid_redundant_path_calculation/p170111_03_compute_paths.py | b61c0656c06859d665bca2fe34b87a67a2cc9716 | [] | no_license | jhennies/py_devel | 4a41e13ec8cd9b834c3d5acf64becc0fa8ffc479 | 9fc860be95ae91064a40f25e26d4024fbae6eb1f | refs/heads/master | 2021-01-16T23:25:56.716283 | 2017-03-10T17:49:55 | 2017-03-10T17:49:55 | 45,381,183 | 1 | 0 | null | 2017-03-10T17:49:56 | 2015-11-02T08:21:35 | Python | UTF-8 | Python | false | false | 4,945 | py |
import os
import inspect
from hdf5_image_processing import Hdf5ImageProcessing as IP, Hdf5ImageProcessingLib as ipl
from hdf5_processing import RecursiveDict as rdict
from shutil import copy, copyfile
import numpy as np
import matplotlib.pyplot as plt
import processing_libip as libip
import sys
from yaml_parameters import YamlParams
__author__ = 'jhennies'
def load_images(filepath, skeys=None, recursive_search=False, logger=None):
if logger is not None:
logger.logging('Loading data from \n{}', filepath)
logger.logging('With skeys = {}', skeys)
else:
print 'Loading data from \n{}'.format(filepath)
data = ipl()
data.data_from_file(
filepath=filepath,
skeys=skeys,
recursive_search=recursive_search,
nodata=True
)
return data
def simplify_statistics(statistics, iterations=3):
newstats = statistics.dcp()
for i in xrange(0, iterations):
for d, k, v, kl in statistics.data_iterator(yield_short_kl=True):
if v == 0 or not v:
newstats[kl].pop(k)
statistics = newstats.dcp()
return newstats
def compute_paths(yparams):
all_params = yparams.get_params()
# Zero'th layer:
# --------------
zeroth = rdict(all_params['compute_paths'])
if 'default' in zeroth:
zeroth_defaults = zeroth.pop('default')
else:
zeroth_defaults = ipl()
for exp_lbl, experiment in zeroth.iteritems():
# First layer
# -----------
# An experiment is now selected and performed
yparams.logging('Performing experiment {}\n==============================\n', exp_lbl)
first = zeroth_defaults.dcp()
first.merge(experiment)
if 'default' in first:
first_defaults = first.pop('default')
else:
first_defaults = ipl()
statistics = rdict()
for exp_class_lbl in ['truepaths', 'falsepaths']:
# Final layer
# -----------
# The true or false paths for the current experiment are here computed, respectively
yparams.logging('Computing {}...\n------------------------------\n', exp_class_lbl)
final = first_defaults.dcp()
final.merge(first[exp_class_lbl])
exp_sources = final['sources']
exp_params = final['params']
exp_target = final['target']
# Load the necessary images
data=ipl()
for datakey, content in exp_sources.iteritems():
data[datakey] = load_images(
all_params[content[0]] + all_params[content[1]],
skeys=content[2]['skeys'],
recursive_search=False,
logger=yparams
)
yparams.logging('\nInitial datastructure: \n\n{}', data.datastructure2string(maxdepth=4))
yparams.logging('experiment_params: \n{}', exp_params)
# Compute the paths
# -----------------
paths = ipl()
for_class = False
if exp_class_lbl == 'truepaths':
for_class = True
paths[exp_lbl][exp_class_lbl], statistics[exp_lbl][exp_class_lbl] = libip.compute_paths_for_class(
data, 'segm', 'conts', 'dt', 'gt',
exp_params, for_class=for_class, ignore=[], debug=all_params['debug'],
logger=yparams
)
yparams.logging(
'\nPaths datastructure after running {}: \n\n{}',
exp_class_lbl,
paths.datastructure2string()
)
def val(x):
return x
yparams.logging(
'\nStatistics after {}: \n\n{}', exp_class_lbl,
simplify_statistics(statistics[exp_lbl]).datastructure2string(function=val)
)
# Save the result to disk
# -----------------------
targetfile = all_params[exp_target[0]] + all_params[exp_target[1]]
paths.write(filepath=targetfile)
def val(x):
return x
yparams.logging(
'\nStatistics after full experiment: \n\n{}',
simplify_statistics(statistics[exp_lbl]).datastructure2string(function=val)
)
def run_compute_paths(yamlfile, logging=True):
yparams = YamlParams(filename=yamlfile)
params = yparams.get_params()
# Logger stuff
yparams.set_indent(1)
yparams.startlogger(
filename=params['resultfolder'] + 'compute_paths.log',
type='w', name='ComputePaths'
)
try:
compute_paths(yparams)
yparams.logging('')
yparams.stoplogger()
except:
yparams.errout('Unexpected error')
if __name__ == '__main__':
yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters_ref.yml'
run_compute_paths(yamlfile, logging=False) | [
"julianhennies@hotmail.de"
] | julianhennies@hotmail.de |
15248ba595c0c8d26c0e60ebde078f523c33804a | ce9386a806af873c566f281527795e4b2db0276a | /dwr/ds_partition/query26/voila_sampling.py | 07b0ce14528adca9e898145663ce2f9d6ffd1318 | [] | no_license | rischanlab/cvopt | 05d7df9406a441207ba2ad7e508e82bedbd5bbb4 | bbd75b61957c97507e45613a041b6d84213cf897 | refs/heads/master | 2023-03-10T02:31:36.563970 | 2021-02-24T22:57:47 | 2021-02-24T22:57:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,022 | py | from dwr import *
from dwr.ds_partition.query26 import *
SAMPLE_TYPE = VOILA
for DS in ds_list:
logging.info("DS: {0} - Get statistic".format(DS))
# language=HQL
hiveql.execute("""
SELECT
i_item_id,
COUNT(*),
ABS(STDDEV(cs_quantity) / AVG(cs_quantity)),
ABS(STDDEV(cs_list_price) / AVG(cs_list_price)),
ABS(STDDEV(cs_coupon_amt) / AVG(cs_coupon_amt)),
ABS(STDDEV(cs_sales_price) / AVG(cs_sales_price))
FROM catalog_sales
JOIN item
ON catalog_sales.cs_item_sk = i_item_sk
WHERE catalog_sales.ds = '{0}'
AND item.ds = '{0}'
GROUP BY i_item_id
""".format(DS))
result = hiveql.fetchall()
catalog_sales_count = {r[0]: r[1] for r in result}
catalog_sales_coeff = {r[0]: r[2:] for r in result}
for sample_rate in sample_rates:
logging.info("DS: {0} sample rate: {1}".format(DS, sample_rate))
# senate sample for catalog sale
catalog_sales_sampled = masg_sample(
sample_rate=sample_rate,
table_name=CATALOG_SALES,
group_by=['i_item_id'],
aggregate_column=['agg1', 'agg2', 'agg3', 'agg4'],
partition={'ds': DS},
frequency=catalog_sales_count,
coefficient=catalog_sales_coeff,
# language=HQL
select="""
SELECT *
FROM {table_name} full_table
JOIN item
ON full_table.cs_item_sk = i_item_sk
AND item.ds='{ds}'
LEFT JOIN voila_allocation
ON voila_allocation.ds = '{ds}'
AND voila_allocation.table_name = '{table_name}'
AND voila_allocation.default_rate = {sample_rate}
AND item.i_item_id = voila_allocation.stratum
WHERE full_table.ds = '{ds}'
AND rand() <= COALESCE(sample_rate, '{sample_rate}')
""".format(
ds=DS,
table_name=CATALOG_SALES,
sample_rate=sample_rate,
),
overwrite= True
)
# create result table
hiveql.execute(query26_create_table.format(sample_table_name('query26', SAMPLE_TYPE, sample_rate, 'ds')))
# run query over sample table
hiveql.execute(query26_sampled.format(
ds=DS,
query26_table=sample_table_name('query26', SAMPLE_TYPE, sample_rate, 'ds'),
catalog_sales_table=catalog_sales_sampled,
))
sample_evaluate(
table_name='query26',
sample_type=SAMPLE_TYPE,
sample_rate=sample_rate,
group_by_columns=['i_item_id'],
aggregation_columns=['agg1','agg2','agg3','agg4'],
partition={'ds': DS},
)
| [
"trongdn@fb.com"
] | trongdn@fb.com |
9bee2ad20e29a95bb54d466366ddd596291713d2 | 8d2ebc43f91e1d5d84c621e6cbc781472880344e | /capital.py | bf88b28d11a6b3695ccbdd31699bc20485c55b55 | [] | no_license | ifferus/bottravel | 93f2f0da1d39c93b859b9c2deaed9dac27e198d8 | 61e4ff966f5065876b12bbd6ffd5b51c63af15b0 | refs/heads/master | 2020-09-16T10:07:31.963071 | 2019-11-25T07:20:11 | 2019-11-25T07:20:11 | 223,737,855 | 0 | 0 | null | 2019-11-24T12:07:19 | 2019-11-24T12:07:18 | null | UTF-8 | Python | false | false | 282 | py | class Search_capital:
def __init__(self,search):
self.searchIn=search
def search(self):
a=self.searchIn
file=open('base_date_city.txt','r',encoding='UTF-8')
for line in file:
if a in line:
capital=line[0:line.index(':')]
return(capital)
| [
"noreply@github.com"
] | ifferus.noreply@github.com |
8b3422f2c6a46bbedc7e442bd7ee3dd98a44e761 | cbb3afbbf76d68d694276612155674e8333f6785 | /apps/user/__init__.py | 5274503e412eab81a44d38b8246ae436e9a2cce7 | [] | no_license | lyzhwj/MobileManageItem | 6a745dbd022273a958bc5875ba8f0418e026ca11 | 71ddc736b97a5a2f03464d7193d28246e8d62652 | refs/heads/master | 2022-12-13T06:20:50.943115 | 2019-12-05T02:21:27 | 2019-12-05T02:21:27 | 224,790,996 | 0 | 0 | null | 2022-12-08T06:17:01 | 2019-11-29T06:29:47 | JavaScript | UTF-8 | Python | false | false | 152 | py | from flask.blueprints import Blueprint
user_bp = Blueprint('user', __name__)
category_bp = Blueprint('category', __name__)
from apps.user import view
| [
"614680720@qq.com"
] | 614680720@qq.com |
46e7155c122fe2b89291a70967d3ced59f4c38ce | cf1f1d3f7a4aaaaaee322b0101f7b294909c5a67 | /Code/Al/loop_index.py | 0cbbf5bab4fdd9fe13eacbec91a04ee7426ff5b3 | [] | no_license | PdxCodeGuild/class_emu | 0b52cc205d01af11860a975fc55e36c065d1cc68 | 9938f384d67a4f57e25f2714efa6b63e2e41b892 | refs/heads/master | 2020-05-31T01:16:52.911660 | 2019-12-09T05:22:06 | 2019-12-09T05:22:06 | 190,046,342 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185 | py | # Write your code here :-)
import string
print(string.ascii_lowercase)
abc_list = list(string.ascii_lowercase)
print(abc_list)
for num in range(len(abc_list)):
print(abc_list[num])
| [
"al.burns.email@gmail.com"
] | al.burns.email@gmail.com |
c0943021abd5401db74b8a9ee7af084331bb607c | e58290627862b6a3f3c7f4552832170b524967aa | /main.py | 89f47c48e0d471e9b4cd7dcd3d03f95bd722ee02 | [] | no_license | Phay-mann-khaing/Script_for_Excel_Odoo | f886580eb44719d97edd8800dfaa6f757acfb9e8 | a054f38b7b4df9a1a3e182845707283c2d228f6c | refs/heads/master | 2021-01-11T14:10:21.908454 | 2017-06-26T04:57:50 | 2017-06-26T04:57:50 | 94,989,744 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,039 | py | import csv2xlsx
import os
from Tkinter import Tk
from tkFileDialog import askopenfilename
import convert_into_importable
Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
ft = [('Excel file', '*.xlsx'), ('All Files', '*')]
filename = askopenfilename(filetypes=ft) # show an "Open" dialog box and return the path to the selected file
# print(filename)
# filename = raw_input("File Name: ")
if os.path.isfile(filename):
if filename.endswith('.csv'):
csv_to_xlsx = csv2xlsx.Convert2xlsx(filename, is_csv=1)
print 'csv file converted to xlsx extension'
# convert_into_importable.Convert2importable(csv2xlsx)
elif filename.endswith('.xls'):
conv_2_xlsx = csv2xlsx.Convert2xlsx(filename, is_xls=1)
print 'xls file converted to xlsx extension'
# convert_into_importable.Convert2importable(conv_2_xlsx)
elif filename.endswith('.xlsx'):
con_2_importable = convert_into_importable.Convert2importable(filename)
else:
print 'Choose a file'
| [
"phaymann009@gmail.com"
] | phaymann009@gmail.com |
c76da95e5f7e2c4118bcc22f272ef5631f04054b | 0e21ecd5dc834b8a164cbeda770d536f07584390 | /Fingerprints.py | 42afb8fcee456ae535d0c6a497976fe985184aa2 | [
"MIT"
] | permissive | NehaAgarwal2598/Codeforces | 162b33334998fa27f03e2d8889ccf6017712db5d | 859196a734cb24834736adf9517b351647da6e73 | refs/heads/master | 2022-10-19T02:04:59.631309 | 2020-06-13T14:45:32 | 2020-06-13T14:45:32 | 272,025,667 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | n, m = list(map(int, input().split()))
x = list(map(int, input().split()))
y = list(map(int, input().split()))
for i in x:
if i in y:
print(i,end = ' ')
| [
"noreply@github.com"
] | NehaAgarwal2598.noreply@github.com |
06abedeeb86ea84b63cc44a7445aca6a9ef0a24a | 9e213e75806ea91b30085d51502423f044f5df07 | /q08_gradientboosting_regressor/tests/test.py | cea0d15749f1285be701bfb5af7dcedd3348baa0 | [] | no_license | greyatomtestuser2/time_series_day_02_project | c9073e33a16b5d8c7605196141d742daba13bb57 | 18e83b1ac59e3c1a2fd29b1fba866b66b582524d | refs/heads/master | 2021-08-28T06:02:41.871599 | 2017-12-11T10:06:31 | 2017-12-11T10:06:31 | 113,818,783 | 0 | 0 | null | 2017-12-11T06:00:14 | 2017-12-11T06:00:14 | null | UTF-8 | Python | false | false | 1,645 | py | import unittest
from inspect import getargspec
from ..build import q08_gradientboosting_regressor as student
from greyatomlib.time_series_day_02_project.q08_gradientboosting_regressor.build import q08_gradientboosting_regressor as original
import dill
import pandas as pd
from pandas.util.testing import assert_frame_equal
fe = ["WorkDay", "Peakhours", "Peakmonths"]
class Testing(unittest.TestCase):
def setUp(self):
with open('q08_gradientboosting_regressor/tests/user_sol.pkl', 'wb') as f:
dill.dump(student, f)
with open('q08_gradientboosting_regressor/tests/test_sol.pkl', 'wb') as f:
dill.dump(original, f)
with open('q08_gradientboosting_regressor/tests/user_sol.pkl', 'rb') as f:
self.student_func = dill.load(f)
with open('q08_gradientboosting_regressor/tests/test_sol.pkl', 'rb') as f:
self.solution_func = dill.load(f)
self.data = 'data/elecdemand.csv'
self.student_return = self.student_func(self.data)
self.original_return = self.solution_func(self.data)
# Check the arguements of the function
def test_timeseries(self):
# Input parameters tests
args = getargspec(student)
self.assertEqual(len(args[0]), 3, "Expected argument(s) %d, Given %d" % (3, len(args)))
def test_timeseries_default(self):
args = getargspec(student)
self.assertEqual(args[3], (fe,9), "Expected default values do not match given default values")
def test_return(self):
self.assertEqual(self.student_return, self.original_return, "The return values do not match expected values")
| [
"alex19427@gmail.com"
] | alex19427@gmail.com |
501efc03e712d21a0a76e29634ed02d611170f9e | 8fbd8b98cdf04d319f7b5789d6dc1a738a90566b | /th_mastodon/tests.py | 9658443ac0d0bd316e58cf64cc671700da484071 | [
"BSD-3-Clause"
] | permissive | fkztw/django-th | 5231652ed75ae6060bd4f4a383eba4286e8c9191 | 926a3b9d515a7995cb36d2259729851d0c5cfb4d | refs/heads/master | 2023-07-23T22:08:11.898683 | 2017-10-27T12:38:21 | 2017-10-27T12:38:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,978 | py | # coding: utf-8
from django.conf import settings
from django.core.cache import caches
from django_th.tests.test_main import MainTest
from django_th.models import ServicesActivated
from mastodon import Mastodon as MastodonAPI
from th_mastodon.forms import MastodonProviderForm, MastodonConsumerForm
from th_mastodon.models import Mastodon
from th_mastodon.my_mastodon import ServiceMastodon
from unittest.mock import patch
cache = caches['django_th']
class MastodonTest(MainTest):
"""
MastodonTest Model
"""
def test_get_services_list(self):
th_service = ('th_mastodon.my_mastodon.ServiceMastodon',)
for service in th_service:
self.assertIn(service, settings.TH_SERVICES)
def create_masto(self, tooter='foxmask@mamot.fr', timeline='home',
tag='mastodon', fav=False, since_id=1, max_id=0):
trigger = self.create_triggerservice(consumer_name='ServiceMastodon')
ServicesActivated.objects.get(name='ServiceMastodon')
resu = Mastodon.objects.create(tooter=tooter, timeline=timeline,
tag=tag, fav=fav, since_id=since_id,
max_id=max_id,
trigger=trigger, status=True)
return resu
def test_mastodon(self):
m = self.create_masto()
self.assertTrue(isinstance(m, Mastodon))
self.assertEqual(m.show(), "My Mastodon %s %s" %
(m.timeline, m.trigger))
self.assertEqual(m.__str__(), "{}".format(m.timeline))
"""
Form
"""
# provider
def test_valid_provider_form(self):
m = self.create_masto()
data = {'tooter': m.tooter,
'timeline': m.timeline,
'tag': m.tag,
'fav': m.fav}
form = MastodonProviderForm(data=data)
self.assertTrue(form.is_valid())
def test_invalid_provider_form(self):
form = MastodonProviderForm(data={'tooter': '',
'timeline': '',
'tag': '', 'fav': ''})
self.assertFalse(form.is_valid())
# consumer
def test_valid_consumer_form(self):
m = self.create_masto()
data = {'tooter': m.tooter,
'timeline': m.timeline,
'tag': m.tag,
'fav': m.fav}
form = MastodonConsumerForm(data=data)
self.assertTrue(form.is_valid())
def test_invalid_consumer_form(self):
# when a field is empty the clean() function set it as None
form = MastodonConsumerForm(data={'tooter': '',
'timeline': '',
'tag': '', 'fav': False})
self.assertFalse(form.is_valid())
class ServiceMastodonTest(MastodonTest):
"""
ServiceTwitterTest
"""
def setUp(self):
super(ServiceMastodonTest, self).setUp()
self.data = {'text': 'something #thatworks'}
self.token = 'AZERTY1234'
self.trigger_id = 1
self.service = ServiceMastodon(self.token)
"""
def test_read_data_tooter(self):
search = {'id': 1}
t = self.create_masto(since_id=0, tag='')
kwargs = dict({'date_triggered': '2013-05-11 13:23:58+00:00',
'model_name': 'Mastodon',
'trigger_id': t.trigger_id,
'user': 'foxmask'})
user_id = []
user_id[0]['id'] = 1
with patch.object(MastodonAPI, 'account_statuses') as mock1:
se = ServiceMastodon(self.token)
with patch.object(MastodonAPI, 'account_search') as mock2:
se.read_data(**kwargs)
mock2.assert_called_with(q='foxmask@mamot.fr')
mock2.return_value = user_id[0]['id']
mock1.assert_called_once_with(**search)
"""
@patch.object(MastodonAPI, 'favourites')
def test_read_data_fav(self, mock1):
search = {'max_id': 0, 'since_id': 1}
t = self.create_masto(tag='', fav=True)
kwargs = dict({'date_triggered': '2013-05-11 13:23:58+00:00',
'model_name': 'Mastodon',
'trigger_id': t.trigger_id,
'user': 'foxmask'})
se = ServiceMastodon(self.token)
se.read_data(**kwargs)
mock1.assert_called_with(**search)
@patch.object(MastodonAPI, 'search')
def test_read_data_tag(self, mock1):
search = {'q': 'mastodon', 'since_id': 1}
t = self.create_masto()
kwargs = dict({'date_triggered': '2013-05-11 13:23:58+00:00',
'model_name': 'Mastodon',
'trigger_id': t.trigger_id,
'user': 'foxmask'})
se = ServiceMastodon(self.token)
se.read_data(**kwargs)
mock1.assert_called_with(**search)
@patch.object(MastodonAPI, 'status_post')
def test_save_data_toot(self, mock1):
self.create_masto()
token = self.token
trigger_id = self.trigger_id
kwargs = {'user': 1}
self.data['title'] = 'Toot from'
self.data['link'] = 'http://domain.ltd'
content = str("{title} {link}").format(
title=self.data.get('title'),
link=self.data.get('link'))
content += ' #mastodon'
self.data['content'] = content
self.assertTrue(token)
self.assertTrue(isinstance(trigger_id, int))
se = ServiceMastodon(self.token, **kwargs)
se.save_data(trigger_id, **self.data)
mock1.assert_called_with(content, media_ids=None)
"""
@patch.object(MastodonAPI, 'status_post')
@patch.object(MastodonAPI, 'media_post')
@patch.object(ServiceMastodon, 'media_in_content')
def test_save_data_toot_media(self, mock1, mock2, mock3):
self.create_masto()
token = self.token
trigger_id = self.trigger_id
kwargs = {'user': 1}
self.data['title'] = 'Tweet from xxxx'
self.data['link'] = 'http://domain.ltd'
content = ' https://pbs.twimg.com/media/foobar.jpg '
local_file = os.path.dirname(__file__) + '/../cache/foobar.jpg'
self.data['content'] = content
content += str("{link} #mastodon").format(
link=self.data.get('link'))
self.assertTrue(token)
self.assertTrue(isinstance(trigger_id, int))
self.assertIn('text', self.data)
self.assertNotEqual(self.data['text'], '')
se = ServiceMastodon(self.token, **kwargs)
se.save_data(trigger_id, **self.data)
mock1.assert_called_with(content)
mock1.return_value = (content, local_file)
mock2.assert_called_with(content)
mock2.return_value = 1234 # fake media id
mock3.assert_called_with(content)
"""
def test_auth(self):
pass
def test_callback(self):
pass
| [
"foxmaskhome@gmail.com"
] | foxmaskhome@gmail.com |
48f85e06c833dfe92109428305e04cffda329a1b | 01411b609236c258c5592704befb5cb99a7314f8 | /65.ham.py | fcac558e99b7bff4a17c6faedc4fddaf2293ca07 | [] | no_license | ZeroHX/PSIT2018 | 31515002ec124651dff49ee7e3a08d6594cdfcda | 5ef16bc51ef8b8b402ace197e26ae63377602ce1 | refs/heads/master | 2020-03-29T09:30:16.034742 | 2018-10-07T08:07:46 | 2018-10-07T08:07:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py | """61070023 Hamburger"""
def main(front, back):
"""print hamburger like ||**********|||
inp = 2, 3 <a bread front and back hamburger>
in middle print "*" * (inp1 + inp2) *2"""
print("|" * front, end="")
print("*" * ((front+back)*2), end="")
print("|" * back, end="")
main(int(input()), int(input()))
| [
"41178369+ZeroHX@users.noreply.github.com"
] | 41178369+ZeroHX@users.noreply.github.com |
3a1e60bcb78bcb63bbc2f4c77fdc013d158aeacf | 60503391d3d64a4689d1cc4fc75b0f4d692df694 | /build/catkin_generated/order_packages.py | f0b3b9b316ca56f03d476473071c1d3fdec667b6 | [] | no_license | Cutiino/ROSWebCam | 821d5ec0dd94ff5476e818577d79b5fc25bb4477 | 4528472b6eaa7c115954e5482481d7781c68e488 | refs/heads/master | 2023-06-28T22:11:21.796628 | 2021-07-23T00:50:23 | 2021-07-23T00:50:23 | 388,627,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | # generated from catkin/cmake/template/order_packages.context.py.in
source_root_dir = '/home/agustin/TareaRos/src'
whitelisted_packages = ''.split(';') if '' != '' else []
blacklisted_packages = ''.split(';') if '' != '' else []
underlay_workspaces = '/home/agustin/TareaRos/devel;/opt/ros/noetic'.split(';') if '/home/agustin/TareaRos/devel;/opt/ros/noetic' != '' else []
| [
"agustin.cutino@gmail.com"
] | agustin.cutino@gmail.com |
3fc5b0bea4c9effacae47dda4bdb51a60a002a2d | 1c8554b39967ce19fb67de80ca01722fc8517181 | /vk.py | 6c882b4c6e274c8199fe22ea23c6082a141400f8 | [] | no_license | vadikTheBest/vki_api_python | 7a7d449652975e7ac39358a09cd01fd0eb848b5d | 6863327b5855446dde9c9313a9357ae7805544b3 | refs/heads/main | 2023-03-24T08:38:21.946704 | 2021-03-20T08:43:27 | 2021-03-20T08:43:27 | 343,154,874 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,118 | py | import requests
from api_key import my_token
api_url = 'https://api.vk.com/method/'
id_group = '-202262558'
def getjson(api_method):
response = requests.get(f"{api_url}{api_method}", params={"owner_id": id_group, "count": 10, "offset": 0, "access_token": my_token, "v": "5.52"})
response = response.json()
return response
def crud_read():
wall = getjson("wall.get")
counter = wall['response']['count']
while counter != 0:
print("id поста =", wall['response']['items'][counter-1]['id'], " Текст сообщения = ", wall['response']['items'][counter-1]['text'], '\n')
counter -= 1
def crud_create(text_message):
api_method = 'wall.post'
response = requests.get(f"{api_url}{api_method}", params={"owner_id": id_group, "message": text_message, "offset": 0, "access_token": my_token, "v": "5.52"})
print("Запись на стене успешно была добавлена\n")
crud_read()
def crud_delete(id_post):
api_method = 'wall.delete'
response = requests.get(f"{api_url}{api_method}", params={"owner_id": id_group, "post_id": id_post, "offset": 0, "access_token": my_token, "v": "5.52"})
print("Запись на стене успешно была удалена, если вы ввели правильный id\n")
crud_read()
def crud_update(id_post, new_message):
api_method = 'wall.edit'
response = requests.get(f"{api_url}{api_method}", params={"owner_id": id_group, "post_id": id_post, "message": new_message, "offset": 0, "access_token": my_token, "v": "5.52"})
print("Запись на стене успешно была изменена, если вы ввели правильный id\n")
crud_read()
print("Добро пожаловать в приложение для работы с текстовами постами в вк\nЧтобы создать запись, напишите create\nЧтобы удалить запись, напишите delete\nЧтобы изменить запись, напишите edit\nЧтобы прочесть записи, напишите read\nЧтобы выйти из приложения, напишите exit\n")
while True:
keyboard = input()
if keyboard == 'create':
new_message = input("Пожалуйста, введите новое сообщение\n")
crud_create(new_message)
elif keyboard == 'delete':
id_delete_post = input("Пожалуйста, введите существующий id сообщения, который хотите удалить\n")
crud_delete(id_delete_post)
elif keyboard == 'edit':
id_edit_post = input("Пожалуйста, введите существующий id сообщения, который собирайтесь изменить\n")
edit_message = input("Пожалуйста, введите новое измененное сообщение\n")
crud_update(id_edit_post, edit_message)
elif keyboard == 'read':
crud_read()
elif keyboard == 'exit':
break
| [
"noreply@github.com"
] | vadikTheBest.noreply@github.com |
1a8c95fb36269e7d2b68c2e4d27a52658f6cc0a7 | ed5dc6a659c131027e2d90bf5810d428515ddfce | /auctions/migrations/0008_alter_listing_image_url.py | e247f6076782f1eb49be240c5998a96d53ca9893 | [] | no_license | ivanmclennon/cs50_commerce | 1e0e4dd9a03867a20d3c97dfbcd6cef73ab7fc7a | 4835d4a25419c2c5a0689caacfd161b5b635143d | refs/heads/main | 2023-06-20T15:02:31.754773 | 2021-07-17T18:03:20 | 2021-07-17T18:03:20 | 376,960,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | # Generated by Django 3.2.4 on 2021-06-16 13:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auctions', '0007_auto_20210616_1556'),
]
operations = [
migrations.AlterField(
model_name='listing',
name='image_url',
field=models.CharField(blank=True, max_length=128, null=True),
),
]
| [
"iagninenko@edu.hse.ru"
] | iagninenko@edu.hse.ru |
84ed986e2026f850538971e4e12cd76e5eb232c5 | 8218ac4b1ad2cf0ac55d7eda19b2e466ad078402 | /venv/lib/python3.7/site-packages/pyecharts/options/series_options.py | 444ae93cfe26a54b0aa1751d222b1c4eb49f1bbe | [] | no_license | william-xiangzi/NetworkTest | 07044c58976aa0d3d6325f81d3b17d51e5e9bc54 | 89500dabd09b64407056c8a45997cfdea2b14a41 | refs/heads/master | 2020-07-04T01:53:57.757693 | 2019-08-13T09:52:48 | 2019-08-13T09:52:48 | 202,114,781 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,042 | py | # coding=utf-8
from ..commons.types import JSFunc, List, Numeric, Optional, Sequence, Tuple, Union
class ItemStyleOpts:
def __init__(
self,
color: Optional[str] = None,
color0: Optional[str] = None,
border_color: Optional[str] = None,
border_color0: Optional[str] = None,
border_width: Optional[Numeric] = None,
opacity: Optional[Numeric] = None,
):
self.opts: dict = {
"color": color,
"color0": color0,
"borderColor": border_color,
"borderColor0": border_color0,
"borderWidth": border_width,
"opacity": opacity,
}
class TextStyleOpts:
def __init__(
self,
color: Optional[str] = None,
font_style: Optional[str] = None,
font_weight: Optional[str] = None,
font_family: Optional[str] = None,
font_size: Optional[Numeric] = None,
line_height: Optional[str] = None,
):
self.opts: dict = {
"color": color,
"fontStyle": font_style,
"fontWeight": font_weight,
"fontFamily": font_family,
"fontSize": font_size,
"lineHeight": line_height,
}
class LabelOpts:
def __init__(
self,
is_show: bool = True,
position: Union[str, Sequence] = "top",
color: Optional[str] = None,
font_size: Numeric = 12,
font_style: Optional[str] = None,
font_weight: Optional[str] = None,
font_family: Optional[str] = None,
rotate: Optional[Numeric] = None,
horizontal_align: Optional[str] = None,
vertical_align: Optional[str] = None,
formatter: Union[str, JSFunc, None] = None,
):
self.opts: dict = {
"show": is_show,
"position": position,
"color": color,
"rotate": rotate,
"fontSize": font_size,
"fontStyle": font_style,
"fontWeight": font_weight,
"fontFamily": font_family,
"align": horizontal_align,
"verticalAlign": vertical_align,
"formatter": formatter,
}
class LineStyleOpts:
def __init__(
self,
width: Numeric = 1,
opacity: Numeric = 1,
curve: Numeric = 0,
type_: str = "solid",
color: Optional[str] = None,
):
self.opts: dict = {
"width": width,
"opacity": opacity,
"curveness": curve,
"type": type_,
"color": color,
}
class SplitLineOpts:
def __init__(
self, is_show: bool = False, linestyle_opts: LineStyleOpts = LineStyleOpts()
):
if isinstance(linestyle_opts, LineStyleOpts):
linestyle_opts = linestyle_opts.opts
self.opts: dict = {"show": is_show, "lineStyle": linestyle_opts}
class MarkPointItem:
def __init__(
self,
name: Optional[str] = None,
type_: Optional[str] = None,
value_index: Optional[Numeric] = None,
value_dim: Optional[str] = None,
coord: Optional[List] = None,
x: Optional[Numeric] = None,
y: Optional[Numeric] = None,
value: Optional[Numeric] = None,
symbol: Optional[str] = None,
symbol_size: Union[Numeric, List] = None,
):
self.opts: dict = {
"name": name,
"type": type_,
"valueIndex": value_index,
"valueDim": value_dim,
"coord": coord,
"x": x,
"y": y,
"value": value,
"symbol": symbol,
"symbol_size": symbol_size,
}
class MarkPointOpts:
def __init__(
self,
data: List[Union[MarkPointItem, dict]] = None,
symbol: Optional[str] = None,
symbol_size: Union[None, Numeric] = None,
label_opts: LabelOpts = LabelOpts(position="inside", color="#fff"),
):
if isinstance(label_opts, LabelOpts):
label_opts = label_opts.opts
_data = []
if data:
for d in data:
if isinstance(d, dict):
_data.append(d)
else:
_data.append(d.opts)
self.opts: dict = {
"symbol": symbol,
"symbolSize": symbol_size,
"label": label_opts,
"data": _data,
}
class MarkLineItem:
def __init__(
self,
name: Optional[str] = None,
type_: Optional[str] = None,
x: Union[str, Numeric, None] = None,
y: Union[str, Numeric, None] = None,
value_index: Optional[Numeric] = None,
value_dim: Optional[str] = None,
coord: Optional[Sequence] = None,
symbol: Optional[str] = None,
symbol_size: Optional[Numeric] = None,
):
self.opts: dict = {
"name": name,
"type": type_,
"valueIndex": value_index,
"valueDim": value_dim,
"xAxis": x,
"yAxis": y,
"coord": coord,
"symbol": symbol,
"symbolSize": symbol_size,
}
class MarkLineOpts:
def __init__(
self,
is_silent: bool = False,
data: List[Union[MarkLineItem, dict]] = None,
symbol: Optional[str] = None,
symbol_size: Union[None, Numeric] = None,
precision: int = 2,
label_opts: LabelOpts = LabelOpts(),
):
if isinstance(label_opts, LabelOpts):
label_opts = label_opts.opts
_data = []
if data:
for d in data:
if isinstance(d, dict):
_data.append(d)
else:
_data.append(d.opts)
self.opts: dict = {
"silent": is_silent,
"symbol": symbol,
"symbolSize": symbol_size,
"precision": precision,
"label": label_opts,
"data": _data,
}
class MarkAreaItem:
def __init__(
self,
name: Optional[str] = None,
type_: Tuple[Optional[str], Optional[str]] = (None, None),
value_index: Tuple[Optional[Numeric], Optional[Numeric]] = (None, None),
value_dim: Tuple[Optional[str], Optional[str]] = (None, None),
x: Tuple[Union[str, Numeric, None], Union[str, Numeric, None]] = (None, None),
y: Tuple[Union[str, Numeric, None], Union[str, Numeric, None]] = (None, None),
label_opts: Union[LabelOpts, dict, None] = None,
itemstyle_opts: Union[ItemStyleOpts, dict, None] = None,
):
if isinstance(label_opts, LabelOpts):
label_opts = label_opts.opts
if isinstance(itemstyle_opts, ItemStyleOpts):
itemstyle_opts = itemstyle_opts.opts
self.opts: List = [
{
"name": name,
"type": type_[0],
"valueIndex": value_index[0],
"valueDim": value_dim[0],
"xAxis": x[0],
"yAxis": y[0],
"label": label_opts,
"itemStyle": itemstyle_opts,
},
{
"type": type_[1],
"valueIndex": value_index[1],
"valueDim": value_dim[1],
"xAxis": x[1],
"yAxis": y[1],
},
]
class MarkAreaOpts:
def __init__(
self,
is_silent: bool = False,
label_opts: LabelOpts = LabelOpts(),
data: List[Union[MarkAreaItem, dict]] = None,
):
if isinstance(label_opts, LabelOpts):
label_opts = label_opts.opts
_data = []
if data:
for d in data:
if isinstance(d, dict):
_data.append(d)
else:
_data.append(d.opts)
self.opts: dict = {"silent": is_silent, "label": label_opts, "data": _data}
class EffectOpts:
def __init__(
self,
is_show: bool = True,
brush_type: str = "stroke",
scale: Numeric = 2.5,
period: Numeric = 4,
color: Optional[str] = None,
symbol: Optional[str] = None,
symbol_size: Optional[Numeric] = None,
):
self.opts: dict = {
"show": is_show,
"brushType": brush_type,
"scale": scale,
"period": period,
"color": color,
"symbol": symbol,
"symbolSize": symbol_size,
}
class AreaStyleOpts:
def __init__(self, opacity: Optional[Numeric] = 0, color: Optional[str] = None):
self.opts: dict = {"opacity": opacity, "color": color}
class SplitAreaOpts:
def __init__(self, is_show=True, areastyle_opts: AreaStyleOpts = AreaStyleOpts()):
if isinstance(areastyle_opts, AreaStyleOpts):
areastyle_opts = areastyle_opts.opts
self.opts: dict = {"show": is_show, "areaStyle": areastyle_opts}
class GraphNode:
def __init__(
self,
name: Optional[str] = None,
x: Optional[Numeric] = None,
y: Optional[Numeric] = None,
is_fixed: bool = False,
value: Union[str, Sequence, None] = None,
category: Optional[int] = None,
symbol: Optional[str] = None,
symbol_size: Union[Numeric, Sequence, None] = None,
label_opts: Optional[LabelOpts] = None,
):
if isinstance(label_opts, LabelOpts):
label_opts = label_opts.opts
self.opts: dict = {
"name": name,
"x": x,
"y": y,
"fixed": is_fixed,
"value": value,
"category": category,
"symbol": symbol,
"symbolSize": symbol_size,
"label": label_opts,
}
class GraphLink:
def __init__(
self,
source: Union[str, int, None] = None,
target: Union[str, int, None] = None,
value: Optional[Numeric] = None,
symbol: Union[str, Sequence, None] = None,
symbol_size: Union[Numeric, Sequence, None] = None,
linestyle_opts: Optional[LineStyleOpts] = None,
label_opts: Optional[LabelOpts] = None,
):
if isinstance(linestyle_opts, LineStyleOpts):
linestyle_opts = linestyle_opts.opts
if isinstance(label_opts, LabelOpts):
label_opts = label_opts.opts
self.opts: dict = {
"source": source,
"target": target,
"value": value,
"symbol": symbol,
"symbolSize": symbol_size,
"lineStyle": linestyle_opts,
"label": label_opts,
}
class GraphCategory:
def __init__(
self,
name: Optional[str] = None,
symbol: Optional[str] = None,
symbol_size: Union[Numeric, Sequence, None] = None,
label_opts: Optional[LabelOpts] = None,
):
if isinstance(label_opts, LabelOpts):
label_opts = label_opts.opts
self.opts: dict = {
"name": name,
"symbol": symbol,
"symbolSize": symbol_size,
"label": label_opts,
}
class TreeItem:
def __init__(
self,
name: Optional[str] = None,
value: Optional[Numeric] = None,
label_opts: Optional[LabelOpts] = None,
children: Optional[Sequence] = None,
):
if isinstance(label_opts, LabelOpts):
label_opts = label_opts.opts
self.opts: dict = {
"name": name,
"value": value,
"children": children,
"label": label_opts,
}
| [
"jiaochengxiang@3commas.cn"
] | jiaochengxiang@3commas.cn |
55ee36fda763641ceb51241c524d5e1b9ba444e2 | ea0f1fda7e7826bc9ef64aa08c062207be2bbcc0 | /DollyFish/ChickyDaoWord.py | 4ada0647869c2883c7be17fd17582556eb25e82b | [] | no_license | DollyFish/Myproject | bd7bfc620e4663bab507de9cc06350436bad1fa6 | c02b0e23f96e5c0960fe52bb84039c62071811a4 | refs/heads/main | 2023-08-21T16:03:25.017100 | 2021-10-30T12:46:44 | 2021-10-30T12:46:44 | 422,083,517 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,870 | py | import random, Menu, csv, pickle,time
def maingame():
username = "Guess"
stamp = ["NOT HAVE", "NOT HAVE", "NOT HAVE"]
with open('stamprally.txt', 'rb') as file:
try:
data = pickle.load(file)
except EOFError:
data = {}
with open('stamprally.txt', 'wb') as file:
data[username] = stamp
pickle.dump(data, file, protocol=pickle.HIGHEST_PROTOCOL)
file.close()
main(username)
def main(username):
welcome =[" ▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼",
"",
" Welcome to ChickyDaoWord",
"",
" ▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲"]
for i in welcome:
print(i)
time.sleep(0.3)
print()
choice = input(" >> Press Enter << ")
print()
menugame(username)
def menugame(username):
print("=======================♥ ChickyDaoWord ♥========================")
choice = input("""
-> 1. Start Game
-> 2. Tutorial
-> S. CheckStamp
-> R. Return to Menu
Please enter your choice: """)
if choice == "1":
game(username)
elif choice == "2":
Tutorial(username)
elif choice == "S" or choice == "s":
stamp(username)
elif choice == "R" or choice == "r":
print()
print("Chicky : Bye Bye ")
time.sleep(1)
print()
Menu.displaygame(username)
else:
print("Chicky : You must only select from the given choice")
print("Chicky : Please try again")
menugame(username)
def Tutorial(username):
print()
tutorial = ["Chicky : Welcome to ChickyDaoWord",
"Chicky : I will give you the meaning of the word",
"Chicky : Enter the letter that you think. You have 10 chance to wrong and every chance I will substract your score.",
"",
"Example: Word : Ant",
"Mean: A small insect, often with a sting, that usually lives in a complex social colony with breeding queens.",
"Please enter one letter or a 3-letter word.",
"",
"Chicky : If you enter A or a , the game will display",
"Oh! You correct this word contain 1 \"A\"",
"A**",
"",
"Chicky : If you enter wrong letter example is X , the game will display",
"Huh! This word doesn't contain this letter.",
"",
"Chicky : If you enter same letter that you already enter it, the game will display",
"You have already guessed letter",
"",
"Chicky : If you know the word, you can enter full word",
"",
"Chicky : If you enter wrong 10 times, the game will over",
"Chicky : !! But !!",
"Chicky : If you win, the game will display the word",
"Chicky : And you will get CHICKYDAO STAMP!"]
for i in tutorial:
print(i)
time.sleep(1.3)
print()
print(" >> Press Enter << ")
choice = input(" - Return To Menu - ")
print()
menugame(username)
#random word
def ran_word():
words = ['academy', 'addition', 'address', 'adventure', 'alternate', 'archaeologist',
'blackmail', 'brilliant', 'camouflage', 'capillary', 'casualties', 'communicate',
'decentralize', 'democracy', 'demonstrate', 'equivalent', 'exile', 'experiment',
'family', 'federal', 'fiscal', 'fluke', 'generate', 'geography', 'grateful',
'handicraft', 'history', 'hospital', 'millennium', 'read']
return random.choice(words).upper()
#Check and print * for put the letter to it
def check(word,guesses,guess,counter):
status = ""
matches = 0
for letter in word:
if letter in guesses:
status += letter
else:
status += "*"
if letter == guess:
matches += 1
if matches > 1:
print("Chicky : Oh! You correct this word contain",matches,'"'+ guess +'"'+'s' )
elif matches == 1:
print("Chicky : Oh! You correct this word contain",matches,'"'+ guess +'"')
else:
print("Chicky : Huh! This word doesn't contain this letter.")
counterincrease(counter)
counter = counterincrease(counter)
return status, counter
def counterincrease(counter):
counter += 1
return counter
def meanword(word):
if word.lower() == "academy":
mean = "connected with education, especially studying in schools and universities."
elif word.lower() == "addition":
mean = "a thing that is added to something else."
elif word.lower() == "address":
mean = "details of where somebody lives or works and where letters, etc. can be sent."
elif word.lower() == "adventure":
mean = "an unusual, exciting or dangerous experience, journey or series of events."
elif word.lower() == "alternate":
mean = "if something happens on alternate days, nights, etc. it happens on one day, etc. but not on the next."
elif word.lower() == "archaeologist":
mean = "a person who studies archaeology."
elif word.lower() == "blackmail":
mean = "the crime of demanding money from a person by threatening to tell somebody else a secret about them."
elif word.lower() == "brilliant":
mean = "extremely clever or impressive."
elif word.lower() == "camouflage":
mean = "a way of hiding soldiers and military equipment, using paint, leaves or nets, so that they look like part of what is around or near them."
elif word.lower() == "capillary":
mean = "any of the smallest tubes in the body that carry blood."
elif word.lower() == "casualties":
mean = "a person who is killed or injured in war or in an accident Our primary objective is reducing road casualties."
elif word.lower() == "communicate":
mean = "to share or exchange information, news, ideas, feelings, etc."
elif word.lower() == "decentralize":
mean = "to give some of the power of a central government, organization, etc. to smaller parts or organizations around the country."
elif word.lower() == "democracy":
mean = "a system of government in which the people of a country can vote to elect their representatives."
elif word.lower() == "demonstrate":
mean = "to show something clearly by giving proof or evidence."
elif word.lower() == "democracy":
mean = "a system of government in which the people of a country can vote to elect their representatives."
elif word.lower() == "demonstrate":
mean = "to show something clearly by giving proof or evidence."
elif word.lower() == "equivalent":
mean = "a thing, amount, word, etc. that is equal in value, meaning or purpose to something else."
elif word.lower() == "exile":
mean = "the state of being sent to live in another country that is not your own, especially for political reasons or as a punishment."
elif word.lower() == "experiment":
mean = "a scientific test that is done in order to study what happens and to gain new knowledge."
elif word.lower() == "family":
mean = "a group consisting of one or two parents and their children."
elif word.lower() == "federal":
mean = "having a system of government in which the individual states of a country have control over their own affairs, but are controlled by a central government for national decisions, etc."
elif word.lower() == "fiscal":
mean = "connected with government or public money, especially taxes."
elif word.lower() == "fluke":
mean = "a lucky or unusual thing that happens by accident, not because of planning or skill."
elif word.lower() == "generate":
mean = "generate something to produce energy, especially electricity."
elif word.lower() == "geography":
mean = "the scientific study of the earth’s surface, physical features, divisions, products, population, etc."
elif word.lower() == "grateful":
mean = "feeling or showing thanks because somebody has done something kind for you or has done as you asked."
elif word.lower() == "handicraft":
mean = "the activity of making attractive objects by hand."
elif word.lower() == "history":
mean = "all the events that happened in the past."
elif word.lower() == "hospital":
mean = "a large building where people who are ill or injured are given medical treatment and care."
elif word.lower() == "millennium":
mean = "a period of 1000 years, especially as calculated before or after the birth of Christ."
elif word.lower() == "read":
mean = "to look at and understand the meaning of written or printed words or symbols."
time.sleep(0.8)
print ("Chicky : The word contain",len(word),"letters.")
time.sleep(0.8)
print("Chicky : This word means", mean)
time.sleep(0.8)
def game(username):
print()
print("========================== ",end='')
gen = "ChickyDaoWord "
for i in range(14):
print(gen[i], sep='', end='', flush=True);
time.sleep(0.1)
print(end ="===========================")
print("\n\n")
word = ran_word()
guesses = []
counter = 0
guessed = False
generate = 'Loading to ChickyDaoWord...'
for i in range(27):
print(generate[i], sep=' ', end=' ', flush=True);
time.sleep(0.1)
print()
print()
meanword(word)
time.sleep(1)
while not guessed:
text = "Please enter one letter or a {}-letter word. ".format(len(word))
guess = input(text)
guess = guess.upper()
if guess in guesses:
print('Chicky : You have already guessed '+ guess +'')
counterincrease(counter)
counter = counterincrease(counter)
elif len(guess) == len(word):
guesses.append(guess)
if guess == word:
guessed = True
else:
print("Chicky : Sorry, That's not correct😊")
counterincrease(counter)
counter = counterincrease(counter)
elif len(guess) == 1:
guesses.append(guess)
result = check(word,guesses,guess,counter)
counter = result[1]
if result[0] == word:
guessed = True
else:
print(result[0])
else:
print("Chicky : Invalid value")
counterincrease(counter)
counter = counterincrease(counter)
if counter >= 10:
gameover(username, word)
guessed = True
time.sleep(0.3)
print("Chicky : Ah! You got it. The word is", word,"you got it in",len(guesses),"tries.")
time.sleep(0.6)
with open('stamprally.txt', 'rb') as file:
try:
data = pickle.load(file)
except EOFError:
data = {}
with open('stamprally.txt', 'wb') as file:
data[username][0] = "CHICKYDAO STAMP"
pickle.dump(data, file, protocol=pickle.HIGHEST_PROTOCOL)
print("%s, YOU GET \"CHICKYDAO STAMP\" !! " % username)
print()
save = 'SAVING SUCCESS ...'
for i in save:
print(i , sep=' ', end=' ', flush=True);
time.sleep(0.1)
print()
print()
time.sleep(0.5)
file.close()
endgame(username)
def gameover(username,word):
print("Chicky : HaHa!! Game Over...")
time.sleep(1)
print("Chicky : You Lost")
time.sleep(1)
print("Chicky : The answer is ", word)
time.sleep(1)
print()
print(" >> Press Enter << ")
choice = input(" - Return To Menu - ")
print()
menugame(username)
def endgame(username):
print("Please select your choice")
choice = input("""
-> R. Replay
-> Q. Back to menu
-> S. CheckStamp
Your Choice: """)
if choice.lower() == "r":
game(username)
elif choice.lower() == "q":
menugame(username)
elif choice.lower() == "s":
stamp(username)
else:
print("You must only select from the given choice")
print("Please try again")
endgame(username)
def stamp(username):
print()
with open('stamprally.txt', 'rb') as file:
display = pickle.load(file)
for row in display:
if row == username:
print("================================================================")
print(" > |%s 's stamps| < " % row,
"\n ♦", display[row][0],"♦",
"\n ♦", display[row][1],"♦",
"\n ♦", display[row][2],"♦")
print("================================================================")
else:
pass
choice = input(" >> Press Enter To Return << ")
print()
menugame(username)
if __name__ == '__main__':
maingame() | [
"noreply@github.com"
] | DollyFish.noreply@github.com |
9c3e7efe4f11de9d2d352605026b21608815d9e9 | 6e4448d99733d6cabba8fc725e3f5132161e49f7 | /pre_2016_17_cool_season/prism_precip_ncar.py | 642981c52f13da509e7e2be9ea7433f5f4bee27c | [] | no_license | tomgowan/model-climatology | 00a4d75e11bb0a8599121aeb0cd6831f32b04329 | 1fab1c15535311c3ff4258bd4670ccdd81239ca2 | refs/heads/master | 2020-03-22T20:28:45.907527 | 2018-07-11T16:54:32 | 2018-07-11T16:54:32 | 140,603,732 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,501 | py | import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap, maskoceans
import pygrib, os, sys
from netCDF4 import Dataset
from numpy import *
import numpy as np
from pylab import *
import time
from datetime import date, timedelta
import pyart
from matplotlib import animation
import matplotlib.animation as animation
import types
###############################################################################
############## Read in ncar and prism precip #############################
###############################################################################
Date2= '20150930'
Date = zeros((184))
precip_ncar = zeros((621,1405))
precip_tot = zeros((621,1405))
num_days = 183
for i in range(0,183):
t=time.strptime(Date2,'%Y%m%d')
newdate=date(t.tm_year,t.tm_mon,t.tm_mday)+timedelta(i)
Date3 = newdate.strftime('%Y%m%d')
Date[i] = int(Date3)
y = 0
for i in range(0,num_days-1):
x = 0
z = 0
#### Make sure all ncar and prism files are present
for j in range(13,37):
NCARens_file = '/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/model_raw_output/ncarens/regridded.precip.ncar_3km_%08d00' % Date[i] + '_mem1_f0%02d' % j + '.grb2'
if os.path.exists(NCARens_file):
x = x + 1
try:
prism_file = '/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/climatology/prism/PRISM_ppt_stable_4kmD2_%08d' % Date[i] + '_asc.asc'
if os.path.exists(prism_file):
z = 1
except:
pass
try:
prism_file = '/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/climatology/prism/PRISM_ppt_provisional_4kmD2_%08d' % Date[i] + '_asc.asc'
if os.path.exists(prism_file):
z = 1
except:
pass
print x
if x == 24 and z == 1:
y = y + 1
for j in range(13,37):#32
############# NCAR ############################
NCARens_file = '/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/model_raw_output/ncarens/regridded.precip.ncar_3km_%08d00' % Date[i] + '_mem1_f0%02d' % j + '.grb2'
print NCARens_file
grb = grbs.select(name='Total Precipitation')[0]
lat_ncar,lon_ncar = grb.latlons()
grbs = pygrib.open(NCARens_file)
tmpmsgs = grbs.select(name='Total Precipitation')
msg = grbs[1]
precip_vals = msg.values
precip_vals = precip_vals*0.0393689*25.4
precip_ncar = precip_ncar + precip_vals
############### Prism #####################################
try:
precip = np.loadtxt("/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/climatology/prism/PRISM_ppt_stable_4kmD2_%08d" % Date[i] + "_asc.asc", skiprows = 6)
except:
print(prism_file)
try:
precip = np.loadtxt("/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/climatology/prism/PRISM_ppt_provisional_4kmD2_%08d" % Date[i] + "_asc.asc", skiprows = 6)
except:
print(prism_file)
precip_tot = precip_tot + precip
precip_tot = precip_tot/y
precip_ncar = precip_ncar/y
## Attempt to fix notation of lons so basemap understands it
lon_ncar = lon_ncar-360
###############################################################################
############## Create lat lon grid for psirm #############################
###############################################################################
lats_prism = zeros((621,1405))
lons_prism = zeros((621,1405))
for i in range(621):
lats_prism[620-i,:] = 24.062500000000 + i*.0416666666666666666666666667
for i in range(1405):
lons_prism[:,i] = -125.02083333333333333333 + i*.0416666666666666666666667
################## Saveprism and ncar array ################################
np.savetxt('ncar_dailymean.txt', precip_ncar)
np.savetxt('prism_ncar_dailymean.txt', precip_tot)
'''
###############################################################################
######################## Plot #############################################
###############################################################################
cmap = matplotlib.cm.get_cmap('pyart_NWSRef')
fig = plt.figure(figsize=(20,13))
levels = [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2, 2.5, 3, 3.5, 4, 4.5, 5, 6, 6.5, 7, 7.5, 8 ,8.5, 9,9.5, 10,11, 12, 13, 14, 15, 16, 18, 20, 22,26,30,34,38,42]
######################## NCAR #############################################
ax = fig.add_subplot(231)
map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
x, y = map(lons_prism, lats_prism)
precip_ncar = maskoceans(lons_prism, lats_prism, precip_ncar)
#map.drawlsmask(land_color=(0, 0, 0, 0), ocean_color='deeppink', lakes=False)
csAVG = map.contourf(x,y,precip_ncar, levels, cmap = cmap, norm=matplotlib.colors.BoundaryNorm(levels,cmap.N))
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar = map.colorbar(csAVG, location='bottom', pad="5%")
cbar.ax.tick_params(labelsize=12)
plt.title('NCAR Ensemble Control', fontsize = 18)
#cbar.ax.set_xlabel('Mean Daily Precipitation from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
######################## prism #############################################
ax = fig.add_subplot(232)
map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
x, y = map(lons_prism, lats_prism)
precip_tot = maskoceans(lons_prism, lats_prism, precip_tot)
csAVG = map.contourf(x,y,precip_tot, levels, cmap = cmap, norm=matplotlib.colors.BoundaryNorm(levels,cmap.N))
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar = map.colorbar(csAVG, location='bottom', pad="5%")
cbar.ax.tick_params(labelsize=12)
plt.title('PRISM', fontsize = 18)
#cbar.ax.set_xlabel('Mean Daily Precipitation from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
avg1 = precip_ncar[17:453, 0:540]/precip_tot[17:453, 0:540]
avg = avg1[(avg1 > 0.1) & (avg1 < 5)]
bias_mean = np.average(avg)
######################## bias #############################################
ax = fig.add_subplot(233)
map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
cmap=plt.cm.BrBG
levels = [0.1, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8,2, 5]
#plt.text(1,1,'Mean bias = %1.3f' % bias_mean,rotation = 0, fontsize = 12)
#levels = np.arange(.45.000001,.1)
ax.set_title('NCAR/PRISM', fontsize = 18)
x, y = map(lons_prism, lats_prism)
csAVG = map.contourf(x,y,precip_ncar/precip_tot, levels,cmap=cmap, norm=matplotlib.colors.BoundaryNorm(levels,cmap.N), vmin = 0.1, vmax = 5)
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar.ax.tick_params(labelsize=12)
cbar = map.colorbar(csAVG, location='bottom', pad="5%", ticks= [0.1, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8,2,5])
cbar.ax.set_xticklabels(['<0.5','0.5','0.6', '0.7', '0.8', '0.9', '1', '1.2', '1.4', '1.6', '1.8','2','>2'])
#set(cbar,'visible','off')
#cbar.ax.set_xlabel('Mean Daily Precipitation Bias from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
plt.annotate('Mean bias = %1.3f' % bias_mean, xy=(0.01, .01), xycoords='axes fraction', fontsize = 11)
plt.savefig("./plots/ncar_prism_climo_%s" % region + ".pdf")
plt.show()
###############################################################################
############ plot hrr data also #############################################
###############################################################################
'''
'''
precip_hrrr = np.loadtxt('hrrr_dailymean.txt')
precip_tot = np.loadtxt('prism_hrrr_dailymean.txt')
###############################################################################
######################## Plot #############################################
###############################################################################
cmap = matplotlib.cm.get_cmap('pyart_NWSRef')
levels = np.arange(.0001,37,.5)
levels = [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2, 2.5, 3, 3.5, 4, 4.5, 5, 6, 6.5, 7, 7.5, 8 ,8.5, 9,9.5, 10,11, 12, 13, 14, 15, 16, 18, 20, 22,26,30,34,38,42]
######################## hrrr #############################################
ax = fig.add_subplot(234)
#map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
x, y = map(lons_prism, lats_prism)
precip_hrrr = maskoceans(lons_prism, lats_prism, precip_hrrr)
#map.drawlsmask(land_color=(0, 0, 0, 0), ocean_color='deeppink', lakes=False)
csAVG = map.contourf(x,y,precip_hrrr, levels, cmap = cmap,norm=matplotlib.colors.BoundaryNorm(levels,cmap.N))
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar = map.colorbar(csAVG, location='bottom', pad="5%")
cbar.ax.tick_params(labelsize=12)
plt.title('HRRR', fontsize = 18)
cbar.ax.set_xlabel('Mean Daily Precipitation from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
######################## prism #############################################
ax = fig.add_subplot(235)
map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
x, y = map(lons_prism, lats_prism)
precip_tot = maskoceans(lons_prism, lats_prism, precip_tot)
csAVG = map.contourf(x,y,precip_tot, levels, cmap = cmap, norm=matplotlib.colors.BoundaryNorm(levels,cmap.N))
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar = map.colorbar(csAVG, location='bottom', pad="5%")
cbar.ax.tick_params(labelsize=12)
plt.title('PRISM', fontsize = 18)
cbar.ax.set_xlabel('Mean Daily Precipitation from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
### Calcualte bias mean of whole array (only include data from the WESTERN US)
avg1 = precip_hrrr[17:453, 0:540]/precip_tot[17:453, 0:540]
avg = avg1[(avg1 > 0.1) & (avg1 < 5)]
bias_mean = np.average(avg)
######################## bias #############################################
ax = fig.add_subplot(236)
map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
cmap=plt.cm.BrBG
levels = [0.1, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8,2, 5]
#levels = np.arange(.45.000001,.1)
x, y = map(lons_prism, lats_prism)
csAVG = map.contourf(x,y,precip_hrrr/precip_tot, levels,cmap=cmap, norm=matplotlib.colors.BoundaryNorm(levels,cmap.N), vmin = 0.1, vmax = 5)
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar.ax.tick_params(labelsize=12)
cbar = map.colorbar(csAVG, location='bottom', pad="5%", ticks= [0.1, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8,2,5])
cbar.ax.set_xticklabels(['<0.5','0.5','0.6', '0.7', '0.8', '0.9', '1', '1.2', '1.4', '1.6', '1.8','2','>2'])
plt.title('HRRR/PRISM', fontsize = 18)
cbar.ax.set_xlabel('Mean Daily Precipitation Bias from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
#leg = ([], [], label='Mean bias = %1.3f' % bias_mean)
#plt.legend(handles = [leg],loc = "lower left")
#plt.text(.5,.5,'Mean bias = %1.3f' % bias_mean,rotation = 0, fontsize = 12)
plt.annotate('Mean bias = %1.3f' % bias_mean, xy=(0.01, .01), xycoords='axes fraction', fontsize = 11)
plt.savefig("./plots/hrrr_ncar_prism_climo_%s" % region + ".pdf")
plt.show()
'''
| [
"tom.gowan@gmail.com"
] | tom.gowan@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.