blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ec0809d7d4f26628ab9e2b11572f22c7eb2e0b88 | b652c9863d0f89b71c43164bb56f742253525241 | /manage.py | 3f9046916aa11d07cb2d3aa015ffa7d90ffe772a | [] | no_license | juney3/belt_second | 71bb0a4e6c8bc04cefc3db541ba1f2e2f4072486 | a637fe9d6ef8b340269a9b064ebe4351a49ebad0 | refs/heads/master | 2020-06-18T03:42:42.477003 | 2016-11-28T08:08:34 | 2016-11-28T08:08:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "beltsequel.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"juney37@gmail.com"
] | juney37@gmail.com |
5a3439982d4b8a6ea165a851ae6837298986e0d3 | 6612e406240f2799689bbdc1e53ae93f635268d6 | /superlists/urls.py | ab9126d067d6b5df78837c4e2c71f25a0ebe684d | [] | no_license | jpapadopoulos/ttd | 046bb6380a860d02c20aa608c9fc83d98421fc52 | 29383f604a82591d04edef1407308e537b507ebb | refs/heads/master | 2021-01-25T06:00:56.870274 | 2015-02-26T09:52:05 | 2015-02-26T09:52:05 | 29,437,166 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 345 | py | from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^$', 'lists.views.home_page', name='home'),
url(r'^lists/the-only-list-in-the-world/$',
'lists.views.view_list', name='view_list'
),
url(r'^lists/new$', 'lists.views.new_list', name='new_list'),
)
| [
"pan.papadopoulos80@gmail.com"
] | pan.papadopoulos80@gmail.com |
56292ac34351028af2060c65ffeaacfbb1f9b8b9 | 994d62f3ac064c74e739eac73c615c87f29b8fa1 | /assignment-3/question22.py | b669ae9ebeee2de099078ea487a30a9a081f42ed | [] | no_license | chethanadukkipati/Documents-Similarity | 1190e87c07b0ab4259a4bf4689deb741426a5943 | e6669413f72d81eb89e41fa2e1e768c8695f8e30 | refs/heads/master | 2020-12-18T18:06:31.877566 | 2020-01-22T01:56:38 | 2020-01-22T01:56:38 | 235,479,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,548 | py | # -*- coding: utf-8 -*-
from pyspark import SparkContext,SparkConf
from pyspark.sql import SQLContext
from pyspark import sql
from pyspark.sql.types import Row
from pyspark.streaming import StreamingContext
from operator import add,truediv,mul
from nltk.corpus import stopwords
from nltk.tokenize import wordpunct_tokenize
import re
conf=SparkConf()
conf.setAppName("Inverted Index")
conf.set("spark.executor.memory","4g")
stop_words = set(stopwords.words('english'))
sc=SparkContext(conf=conf)
sqlContext=sql.SQLContext(sc)
textrdd=sc.wholeTextFiles("/cosc6339_hw2/gutenberg-500/")
lowerwords=textrdd.map(lambda(file,contents):(file,contents.lower()))
removespecial=lowerwords.map(lambda(file,contents):(file[57:],re.sub('[^a-zA-Z0-9]',' ',contents)))
splitingwords=removespecial.map(lambda(file,contents):(file,contents.split()))
numberof_words=splitingwords.flatMap(lambda(file,contents):[(file,len(contents),word) for word in contents])
count_words=numberof_words.map(lambda w:(w,1))
no_words=count_words.reduceByKey(add,numPartitions=5)
questiontext=sc.textFile("/bigd11/output_t1_m_5")
list1=[]
for value in questiontext.take(questiontext.count()):
list1.append(value.split(",")[0][3:-1])
no1_words=no_words.filter(lambda (x,y):x[2] in list1)
no2_words=no1_words.map(lambda (x,y):(x[2],(x[0],truediv(y,x[1]))))
no3_words=no2_words.map(lambda (x,y):(x,[y])).reduceByKey(add,numPartitions=5)
dataframe_result=no3_words.toDF(["Word","List"])
dataframe_result.write.format("com.databricks.spark.avro").save("/bigd11/output3b53.avro")
| [
"noreply@github.com"
] | noreply@github.com |
e57bd5b1c83a97d32364399c502c8686a7c0dce3 | f5a9990f83a4d65cb4cb03d435417f13a66f4dd0 | /229. Majority Element II/majority.py | 4256dca29390fb5c032f1b0e6bf41e6da10f4050 | [] | no_license | Macielyoung/LeetCode | 8faafd8be8f2297625680b01a40ca6759cf661f7 | 0fc4c7af59246e3064db41989a45d9db413a624b | refs/heads/master | 2021-07-06T10:23:39.742416 | 2020-07-16T15:27:39 | 2020-07-16T15:27:39 | 99,790,690 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,537 | py | #-*- coding: UTF-8 -*-
from collections import Counter
class Solution:
# 统计次数
def majorityElement(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
dict = Counter(nums)
num = len(nums) // 3
res = []
for key in dict.keys():
if dict[key] > num:
res.append(key)
return res
# 二赢问题
def majorityElement2(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
num1, num2 = nums[0], nums[0]
count1 = count2 = 0
res = []
for i in range(len(nums)):
if nums[i] == num1:
count1 += 1
elif nums[i] == num2:
count2 += 1
elif count1 == 0:
num1 = nums[i]
count1 += 1
elif count2 == 0:
num2 = nums[i]
count2 += 1
else:
count1 -= 1
count2 -= 1
count1 = count2 = 0
for i in range(len(nums)):
if(nums[i] == num1):
count1 += 1
elif(nums[i] == num2):
count2 += 1
if(count1 > len(nums)//3):
res.append(num1)
if (count2 > len(nums) // 3):
res.append(num2)
return res
if __name__ == '__main__':
solu = Solution()
nums = [1,1,1,3,3,2,2,2]
# res = solu.majorityElement(nums)
res = solu.majorityElement2(nums)
print(res)
| [
"291938109@qq.com"
] | 291938109@qq.com |
4c33e3aca15d5af803da190c1a4f63b0f3779bc9 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2016_09_01/operations/_operations.py | 13f814257aab9d3cc4f66a805c212581c9ced361 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 251,605 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
import sys
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_deployments_delete_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_deployments_check_existence_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs)
def build_deployments_create_or_update_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployments_get_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployments_cancel_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/cancel",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="POST", url=_url, params=_params, **kwargs)
def build_deployments_validate_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/validate",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployments_export_template_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/exportTemplate",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployments_list_request(
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str")
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployments_calculate_template_hash_request(*, json: JSON, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.Resources/calculateTemplateHash")
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, json=json, **kwargs)
def build_providers_unregister_request(
resource_provider_namespace: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/unregister"
)
path_format_arguments = {
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_providers_register_request(
resource_provider_namespace: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/register")
path_format_arguments = {
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_providers_list_request(
subscription_id: str, *, top: Optional[int] = None, expand: Optional[str] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers")
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_providers_get_request(
resource_provider_namespace: str, subscription_id: str, *, expand: Optional[str] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}")
path_format_arguments = {
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_list_resources_request(
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
expand: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/resources")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str")
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_check_existence_request(
resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs)
def build_resource_groups_create_or_update_request(
resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_delete_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_resource_groups_get_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_patch_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_export_template_request(
resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/exportTemplate"
)
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_list_request(
subscription_id: str, *, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups")
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str")
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_move_resources_request(
source_resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/moveResources"
) # pylint: disable=line-too-long
path_format_arguments = {
"sourceResourceGroupName": _SERIALIZER.url(
"source_resource_group_name",
source_resource_group_name,
"str",
max_length=90,
min_length=1,
pattern=r"^[-\w\._\(\)]+$",
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_list_request(
subscription_id: str,
*,
filter: Optional[str] = None,
expand: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resources")
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str")
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_check_existence_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
api_version: str,
**kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs)
def build_resources_delete_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
api_version: str,
**kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_resources_create_or_update_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
api_version: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_update_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
api_version: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_get_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
api_version: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_check_existence_by_id_request(resource_id: str, *, api_version: str, **kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
# Construct URL
_url = kwargs.pop("template_url", "/{resourceId}")
path_format_arguments = {
"resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs)
def build_resources_delete_by_id_request(resource_id: str, *, api_version: str, **kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
# Construct URL
_url = kwargs.pop("template_url", "/{resourceId}")
path_format_arguments = {
"resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_resources_create_or_update_by_id_request(resource_id: str, *, api_version: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/{resourceId}")
path_format_arguments = {
"resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_update_by_id_request(resource_id: str, *, api_version: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/{resourceId}")
path_format_arguments = {
"resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_get_by_id_request(resource_id: str, *, api_version: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/{resourceId}")
path_format_arguments = {
"resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_tags_delete_value_request(tag_name: str, tag_value: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/tagNames/{tagName}/tagValues/{tagValue}")
path_format_arguments = {
"tagName": _SERIALIZER.url("tag_name", tag_name, "str"),
"tagValue": _SERIALIZER.url("tag_value", tag_value, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_tags_create_or_update_value_request(
tag_name: str, tag_value: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/tagNames/{tagName}/tagValues/{tagValue}")
path_format_arguments = {
"tagName": _SERIALIZER.url("tag_name", tag_name, "str"),
"tagValue": _SERIALIZER.url("tag_value", tag_value, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_tags_create_or_update_request(tag_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/tagNames/{tagName}")
path_format_arguments = {
"tagName": _SERIALIZER.url("tag_name", tag_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_tags_delete_request(tag_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/tagNames/{tagName}")
path_format_arguments = {
"tagName": _SERIALIZER.url("tag_name", tag_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_tags_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/tagNames")
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployment_operations_get_request(
resource_group_name: str, deployment_name: str, operation_id: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations/{operationId}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"operationId": _SERIALIZER.url("operation_id", operation_id, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployment_operations_list_request(
resource_group_name: str, deployment_name: str, subscription_id: str, *, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class DeploymentsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`deployments` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, deployment_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_deployments_delete_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
@distributed_trace
def begin_delete(self, resource_group_name: str, deployment_name: str, **kwargs: Any) -> LROPoller[None]:
"""Deletes a deployment from the deployment history.
A template deployment that is currently running cannot be deleted. Deleting a template
deployment removes the associated deployment operations. Deleting a template deployment does
not affect the state of the resource group. This is an asynchronous operation that returns a
status of 202 until the template deployment is successfully deleted. The Location response
header contains the URI that is used to obtain the status of the process. While the process is
running, a call to the URI in the Location header returns a status of 202. When the process
finishes, the URI in the Location header returns a status of 204 on success. If the
asynchronous request failed, the URI in the Location header returns an error-level status code.
:param resource_group_name: The name of the resource group with the deployment to delete. The
name is case insensitive. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment to delete. Required.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
deployment_name=deployment_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
@distributed_trace
def check_existence(self, resource_group_name: str, deployment_name: str, **kwargs: Any) -> bool:
"""Checks whether the deployment exists.
:param resource_group_name: The name of the resource group with the deployment to check. The
name is case insensitive. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment to check. Required.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_deployments_check_existence_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.check_existence.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
def _create_or_update_initial(
self, resource_group_name: str, deployment_name: str, parameters: Union[_models.Deployment, IO], **kwargs: Any
) -> _models.DeploymentExtended:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DeploymentExtended] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "Deployment")
request = build_deployments_create_or_update_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("DeploymentExtended", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("DeploymentExtended", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
_create_or_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
@overload
def begin_create_or_update(
self,
resource_group_name: str,
deployment_name: str,
parameters: _models.Deployment,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.DeploymentExtended]:
"""Deploys resources to a resource group.
You can provide the template and parameters directly in the request or link to JSON files.
:param resource_group_name: The name of the resource group to deploy the resources to. The name
is case insensitive. The resource group must already exist. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Additional parameters supplied to the operation. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.Deployment
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either DeploymentExtended or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExtended]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update(
self,
resource_group_name: str,
deployment_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.DeploymentExtended]:
"""Deploys resources to a resource group.
You can provide the template and parameters directly in the request or link to JSON files.
:param resource_group_name: The name of the resource group to deploy the resources to. The name
is case insensitive. The resource group must already exist. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Additional parameters supplied to the operation. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either DeploymentExtended or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExtended]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update(
self, resource_group_name: str, deployment_name: str, parameters: Union[_models.Deployment, IO], **kwargs: Any
) -> LROPoller[_models.DeploymentExtended]:
"""Deploys resources to a resource group.
You can provide the template and parameters directly in the request or link to JSON files.
:param resource_group_name: The name of the resource group to deploy the resources to. The name
is case insensitive. The resource group must already exist. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Additional parameters supplied to the operation. Is either a Deployment type
or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.Deployment or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either DeploymentExtended or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExtended]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DeploymentExtended] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("DeploymentExtended", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
@distributed_trace
def get(self, resource_group_name: str, deployment_name: str, **kwargs: Any) -> _models.DeploymentExtended:
"""Gets a deployment.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment to get. Required.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentExtended or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExtended
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.DeploymentExtended] = kwargs.pop("cls", None)
request = build_deployments_get_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("DeploymentExtended", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
@distributed_trace
def cancel( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, deployment_name: str, **kwargs: Any
) -> None:
"""Cancels a currently running template deployment.
You can cancel a deployment only if the provisioningState is Accepted or Running. After the
deployment is canceled, the provisioningState is set to Canceled. Canceling a template
deployment stops the currently running template deployment and leaves the resource group
partially deployed.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment to cancel. Required.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_deployments_cancel_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.cancel.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
cancel.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/cancel"
}
@overload
def validate(
self,
resource_group_name: str,
deployment_name: str,
parameters: _models.Deployment,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.DeploymentValidateResult:
"""Validates whether the specified template is syntactically correct and will be accepted by Azure
Resource Manager..
:param resource_group_name: The name of the resource group the template will be deployed to.
The name is case insensitive. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Parameters to validate. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.Deployment
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentValidateResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentValidateResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def validate(
self,
resource_group_name: str,
deployment_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.DeploymentValidateResult:
"""Validates whether the specified template is syntactically correct and will be accepted by Azure
Resource Manager..
:param resource_group_name: The name of the resource group the template will be deployed to.
The name is case insensitive. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Parameters to validate. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentValidateResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentValidateResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def validate(
self, resource_group_name: str, deployment_name: str, parameters: Union[_models.Deployment, IO], **kwargs: Any
) -> _models.DeploymentValidateResult:
"""Validates whether the specified template is syntactically correct and will be accepted by Azure
Resource Manager..
:param resource_group_name: The name of the resource group the template will be deployed to.
The name is case insensitive. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Parameters to validate. Is either a Deployment type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.Deployment or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentValidateResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentValidateResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DeploymentValidateResult] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "Deployment")
request = build_deployments_validate_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.validate.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("DeploymentValidateResult", pipeline_response)
if response.status_code == 400:
deserialized = self._deserialize("DeploymentValidateResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
validate.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/validate"
}
@distributed_trace
def export_template(
self, resource_group_name: str, deployment_name: str, **kwargs: Any
) -> _models.DeploymentExportResult:
"""Exports the template used for specified deployment.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment from which to get the template. Required.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentExportResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExportResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.DeploymentExportResult] = kwargs.pop("cls", None)
request = build_deployments_export_template_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.export_template.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("DeploymentExportResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
export_template.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/exportTemplate"
}
@distributed_trace
def list(
self, resource_group_name: str, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.DeploymentExtended"]:
"""Get all the deployments for a resource group.
:param resource_group_name: The name of the resource group with the deployments to get. The
name is case insensitive. Required.
:type resource_group_name: str
:param filter: The filter to apply on the operation. For example, you can use
$filter=provisioningState eq '{state}'. Default value is None.
:type filter: str
:param top: The number of results to get. If null is passed, returns all deployments. Default
value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeploymentExtended or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExtended]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.DeploymentListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_deployments_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("DeploymentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/"
}
@distributed_trace
def calculate_template_hash(self, template: JSON, **kwargs: Any) -> _models.TemplateHashResult:
"""Calculate the hash of the given template.
:param template: The template provided to calculate hash. Required.
:type template: JSON
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TemplateHashResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.TemplateHashResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json"))
cls: ClsType[_models.TemplateHashResult] = kwargs.pop("cls", None)
_json = self._serialize.body(template, "object")
request = build_deployments_calculate_template_hash_request(
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.calculate_template_hash.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("TemplateHashResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
calculate_template_hash.metadata = {"url": "/providers/Microsoft.Resources/calculateTemplateHash"}
class ProvidersOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`providers` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def unregister(self, resource_provider_namespace: str, **kwargs: Any) -> _models.Provider:
"""Unregisters a subscription from a resource provider.
:param resource_provider_namespace: The namespace of the resource provider to unregister.
Required.
:type resource_provider_namespace: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Provider or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.Provider
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.Provider] = kwargs.pop("cls", None)
request = build_providers_unregister_request(
resource_provider_namespace=resource_provider_namespace,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.unregister.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Provider", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
unregister.metadata = {"url": "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/unregister"}
@distributed_trace
def register(self, resource_provider_namespace: str, **kwargs: Any) -> _models.Provider:
"""Registers a subscription with a resource provider.
:param resource_provider_namespace: The namespace of the resource provider to register.
Required.
:type resource_provider_namespace: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Provider or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.Provider
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.Provider] = kwargs.pop("cls", None)
request = build_providers_register_request(
resource_provider_namespace=resource_provider_namespace,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.register.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Provider", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
register.metadata = {"url": "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/register"}
@distributed_trace
def list(
self, top: Optional[int] = None, expand: Optional[str] = None, **kwargs: Any
) -> Iterable["_models.Provider"]:
"""Gets all resource providers for a subscription.
:param top: The number of results to return. If null is passed returns all deployments. Default
value is None.
:type top: int
:param expand: The properties to include in the results. For example, use &$expand=metadata in
the query string to retrieve resource provider metadata. To include property aliases in
response, use $expand=resourceTypes/aliases. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Provider or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.Provider]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.ProviderListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_providers_list_request(
subscription_id=self._config.subscription_id,
top=top,
expand=expand,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ProviderListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers"}
@distributed_trace
def get(self, resource_provider_namespace: str, expand: Optional[str] = None, **kwargs: Any) -> _models.Provider:
"""Gets the specified resource provider.
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param expand: The $expand query parameter. For example, to include property aliases in
response, use $expand=resourceTypes/aliases. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Provider or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.Provider
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.Provider] = kwargs.pop("cls", None)
request = build_providers_get_request(
resource_provider_namespace=resource_provider_namespace,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Provider", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}"}
class ResourceGroupsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`resource_groups` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_resources(
self,
resource_group_name: str,
filter: Optional[str] = None,
expand: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> Iterable["_models.GenericResourceExpanded"]:
"""Get all the resources for a resource group.
:param resource_group_name: The resource group with the resources to get. Required.
:type resource_group_name: str
:param filter: The filter to apply on the operation. Default value is None.
:type filter: str
:param expand: Comma-separated list of additional properties to be included in the response.
Valid values include ``createdTime``\ , ``changedTime`` and ``provisioningState``. For example,
``$expand=createdTime,changedTime``. Default value is None.
:type expand: str
:param top: The number of results to return. If null is passed, returns all resources. Default
value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GenericResourceExpanded or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResourceExpanded]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.ResourceListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_resource_groups_list_resources_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
expand=expand,
top=top,
api_version=api_version,
template_url=self.list_resources.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_resources.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/resources"}
@distributed_trace
def check_existence(self, resource_group_name: str, **kwargs: Any) -> bool:
"""Checks whether a resource group exists.
:param resource_group_name: The name of the resource group to check. The name is case
insensitive. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resource_groups_check_existence_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.check_existence.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
@overload
def create_or_update(
self,
resource_group_name: str,
parameters: _models.ResourceGroup,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ResourceGroup:
"""Creates a resource group.
:param resource_group_name: The name of the resource group to create or update. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to the create or update a resource group. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update(
self, resource_group_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.ResourceGroup:
"""Creates a resource group.
:param resource_group_name: The name of the resource group to create or update. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to the create or update a resource group. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update(
self, resource_group_name: str, parameters: Union[_models.ResourceGroup, IO], **kwargs: Any
) -> _models.ResourceGroup:
"""Creates a resource group.
:param resource_group_name: The name of the resource group to create or update. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to the create or update a resource group. Is either a
ResourceGroup type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ResourceGroup] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ResourceGroup")
request = build_resource_groups_create_or_update_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("ResourceGroup", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("ResourceGroup", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resource_groups_delete_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
@distributed_trace
def begin_delete(self, resource_group_name: str, **kwargs: Any) -> LROPoller[None]:
"""Deletes a resource group.
When you delete a resource group, all of its resources are also deleted. Deleting a resource
group deletes all of its template deployments and currently stored operations.
:param resource_group_name: The name of the resource group to delete. The name is case
insensitive. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
@distributed_trace
def get(self, resource_group_name: str, **kwargs: Any) -> _models.ResourceGroup:
"""Gets a resource group.
:param resource_group_name: The name of the resource group to get. The name is case
insensitive. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.ResourceGroup] = kwargs.pop("cls", None)
request = build_resource_groups_get_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ResourceGroup", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
@overload
def patch(
self,
resource_group_name: str,
parameters: _models.ResourceGroup,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ResourceGroup:
"""Updates a resource group.
Resource groups can be updated through a simple PATCH operation to a group address. The format
of the request is the same as that for creating a resource group. If a field is unspecified,
the current value is retained.
:param resource_group_name: The name of the resource group to update. The name is case
insensitive. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to update a resource group. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def patch(
self, resource_group_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.ResourceGroup:
"""Updates a resource group.
Resource groups can be updated through a simple PATCH operation to a group address. The format
of the request is the same as that for creating a resource group. If a field is unspecified,
the current value is retained.
:param resource_group_name: The name of the resource group to update. The name is case
insensitive. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to update a resource group. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def patch(
self, resource_group_name: str, parameters: Union[_models.ResourceGroup, IO], **kwargs: Any
) -> _models.ResourceGroup:
"""Updates a resource group.
Resource groups can be updated through a simple PATCH operation to a group address. The format
of the request is the same as that for creating a resource group. If a field is unspecified,
the current value is retained.
:param resource_group_name: The name of the resource group to update. The name is case
insensitive. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to update a resource group. Is either a ResourceGroup
type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ResourceGroup] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ResourceGroup")
request = build_resource_groups_patch_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.patch.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ResourceGroup", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
patch.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
@overload
def export_template(
self,
resource_group_name: str,
parameters: _models.ExportTemplateRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ResourceGroupExportResult:
"""Captures the specified resource group as a template.
:param resource_group_name: The name of the resource group to export as a template. Required.
:type resource_group_name: str
:param parameters: Parameters for exporting the template. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ExportTemplateRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroupExportResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroupExportResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def export_template(
self, resource_group_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.ResourceGroupExportResult:
"""Captures the specified resource group as a template.
:param resource_group_name: The name of the resource group to export as a template. Required.
:type resource_group_name: str
:param parameters: Parameters for exporting the template. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroupExportResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroupExportResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def export_template(
self, resource_group_name: str, parameters: Union[_models.ExportTemplateRequest, IO], **kwargs: Any
) -> _models.ResourceGroupExportResult:
"""Captures the specified resource group as a template.
:param resource_group_name: The name of the resource group to export as a template. Required.
:type resource_group_name: str
:param parameters: Parameters for exporting the template. Is either a ExportTemplateRequest
type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ExportTemplateRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroupExportResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroupExportResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ResourceGroupExportResult] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ExportTemplateRequest")
request = build_resource_groups_export_template_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.export_template.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ResourceGroupExportResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
export_template.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/exportTemplate"
}
@distributed_trace
def list(
self, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.ResourceGroup"]:
"""Gets all the resource groups for a subscription.
:param filter: The filter to apply on the operation. Default value is None.
:type filter: str
:param top: The number of results to return. If null is passed, returns all resource groups.
Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceGroup or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.ResourceGroupListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_resource_groups_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceGroupListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups"}
class ResourcesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`resources` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
def _move_resources_initial( # pylint: disable=inconsistent-return-statements
self, source_resource_group_name: str, parameters: Union[_models.ResourcesMoveInfo, IO], **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ResourcesMoveInfo")
request = build_resources_move_resources_request(
source_resource_group_name=source_resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._move_resources_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_move_resources_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/moveResources"
}
@overload
def begin_move_resources(
self,
source_resource_group_name: str,
parameters: _models.ResourcesMoveInfo,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[None]:
"""Moves resources from one resource group to another resource group.
The resources to move must be in the same source resource group. The target resource group may
be in a different subscription. When moving resources, both the source group and the target
group are locked for the duration of the operation. Write and delete operations are blocked on
the groups until the move completes.
:param source_resource_group_name: The name of the resource group containing the resources to
move. Required.
:type source_resource_group_name: str
:param parameters: Parameters for moving resources. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourcesMoveInfo
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_move_resources(
self, source_resource_group_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> LROPoller[None]:
"""Moves resources from one resource group to another resource group.
The resources to move must be in the same source resource group. The target resource group may
be in a different subscription. When moving resources, both the source group and the target
group are locked for the duration of the operation. Write and delete operations are blocked on
the groups until the move completes.
:param source_resource_group_name: The name of the resource group containing the resources to
move. Required.
:type source_resource_group_name: str
:param parameters: Parameters for moving resources. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_move_resources(
self, source_resource_group_name: str, parameters: Union[_models.ResourcesMoveInfo, IO], **kwargs: Any
) -> LROPoller[None]:
"""Moves resources from one resource group to another resource group.
The resources to move must be in the same source resource group. The target resource group may
be in a different subscription. When moving resources, both the source group and the target
group are locked for the duration of the operation. Write and delete operations are blocked on
the groups until the move completes.
:param source_resource_group_name: The name of the resource group containing the resources to
move. Required.
:type source_resource_group_name: str
:param parameters: Parameters for moving resources. Is either a ResourcesMoveInfo type or a IO
type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourcesMoveInfo or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._move_resources_initial( # type: ignore
source_resource_group_name=source_resource_group_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_move_resources.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/moveResources"
}
@distributed_trace
def list(
self, filter: Optional[str] = None, expand: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.GenericResourceExpanded"]:
"""Get all the resources in a subscription.
:param filter: The filter to apply on the operation. Default value is None.
:type filter: str
:param expand: Comma-separated list of additional properties to be included in the response.
Valid values include ``createdTime``\ , ``changedTime`` and ``provisioningState``. For example,
``$expand=createdTime,changedTime``. Default value is None.
:type expand: str
:param top: The number of results to return. If null is passed, returns all resources. Default
value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GenericResourceExpanded or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResourceExpanded]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.ResourceListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_resources_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
expand=expand,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/resources"}
@distributed_trace
def check_existence(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs: Any
) -> bool:
"""Checks whether a resource exists.
:param resource_group_name: The name of the resource group containing the resource to check.
The name is case insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider of the resource to check. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type. Required.
:type resource_type: str
:param resource_name: The name of the resource to check whether it exists. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resources_check_existence_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.check_existence.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resources_delete_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs: Any
) -> LROPoller[None]:
"""Deletes a resource.
:param resource_group_name: The name of the resource group that contains the resource to
delete. The name is case insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type. Required.
:type resource_type: str
:param resource_name: The name of the resource to delete. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
def _create_or_update_initial(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: Union[_models.GenericResource, IO],
**kwargs: Any
) -> Optional[_models.GenericResource]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.GenericResource]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "GenericResource")
request = build_resources_create_or_update_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("GenericResource", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
@overload
def begin_create_or_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: _models.GenericResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Creates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to create. Required.
:type resource_type: str
:param resource_name: The name of the resource to create. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for creating or updating the resource. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Creates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to create. Required.
:type resource_type: str
:param resource_name: The name of the resource to create. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for creating or updating the resource. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: Union[_models.GenericResource, IO],
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Creates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to create. Required.
:type resource_type: str
:param resource_name: The name of the resource to create. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for creating or updating the resource. Is either a
GenericResource type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
api_version=api_version,
parameters=parameters,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
def _update_initial(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: Union[_models.GenericResource, IO],
**kwargs: Any
) -> Optional[_models.GenericResource]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.GenericResource]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "GenericResource")
request = build_resources_update_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
@overload
def begin_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: _models.GenericResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to update. Required.
:type resource_type: str
:param resource_name: The name of the resource to update. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for updating the resource. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to update. Required.
:type resource_type: str
:param resource_name: The name of the resource to update. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for updating the resource. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: Union[_models.GenericResource, IO],
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to update. Required.
:type resource_type: str
:param resource_name: The name of the resource to update. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for updating the resource. Is either a GenericResource type or a
IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
api_version=api_version,
parameters=parameters,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
@distributed_trace
def get(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs: Any
) -> _models.GenericResource:
"""Gets a resource.
:param resource_group_name: The name of the resource group containing the resource to get. The
name is case insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource. Required.
:type resource_type: str
:param resource_name: The name of the resource to get. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GenericResource or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
request = build_resources_get_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
@distributed_trace
def check_existence_by_id(self, resource_id: str, api_version: str, **kwargs: Any) -> bool:
"""Checks by ID whether a resource exists.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resources_check_existence_by_id_request(
resource_id=resource_id,
api_version=api_version,
template_url=self.check_existence_by_id.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence_by_id.metadata = {"url": "/{resourceId}"}
def _delete_by_id_initial( # pylint: disable=inconsistent-return-statements
self, resource_id: str, api_version: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resources_delete_by_id_request(
resource_id=resource_id,
api_version=api_version,
template_url=self._delete_by_id_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_by_id_initial.metadata = {"url": "/{resourceId}"}
@distributed_trace
def begin_delete_by_id(self, resource_id: str, api_version: str, **kwargs: Any) -> LROPoller[None]:
"""Deletes a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_by_id_initial( # type: ignore
resource_id=resource_id,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete_by_id.metadata = {"url": "/{resourceId}"}
def _create_or_update_by_id_initial(
self, resource_id: str, api_version: str, parameters: Union[_models.GenericResource, IO], **kwargs: Any
) -> Optional[_models.GenericResource]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.GenericResource]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "GenericResource")
request = build_resources_create_or_update_by_id_request(
resource_id=resource_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_by_id_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("GenericResource", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_by_id_initial.metadata = {"url": "/{resourceId}"}
@overload
def begin_create_or_update_by_id(
self,
resource_id: str,
api_version: str,
parameters: _models.GenericResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Create a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Create or update resource parameters. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update_by_id(
self,
resource_id: str,
api_version: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Create a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Create or update resource parameters. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update_by_id(
self, resource_id: str, api_version: str, parameters: Union[_models.GenericResource, IO], **kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Create a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Create or update resource parameters. Is either a GenericResource type or a
IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._create_or_update_by_id_initial(
resource_id=resource_id,
api_version=api_version,
parameters=parameters,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update_by_id.metadata = {"url": "/{resourceId}"}
def _update_by_id_initial(
self, resource_id: str, api_version: str, parameters: Union[_models.GenericResource, IO], **kwargs: Any
) -> Optional[_models.GenericResource]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.GenericResource]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "GenericResource")
request = build_resources_update_by_id_request(
resource_id=resource_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_by_id_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_by_id_initial.metadata = {"url": "/{resourceId}"}
@overload
def begin_update_by_id(
self,
resource_id: str,
api_version: str,
parameters: _models.GenericResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Update resource parameters. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_update_by_id(
self,
resource_id: str,
api_version: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Update resource parameters. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_update_by_id(
self, resource_id: str, api_version: str, parameters: Union[_models.GenericResource, IO], **kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Update resource parameters. Is either a GenericResource type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._update_by_id_initial(
resource_id=resource_id,
api_version=api_version,
parameters=parameters,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_update_by_id.metadata = {"url": "/{resourceId}"}
@distributed_trace
def get_by_id(self, resource_id: str, api_version: str, **kwargs: Any) -> _models.GenericResource:
"""Gets a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GenericResource or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
request = build_resources_get_by_id_request(
resource_id=resource_id,
api_version=api_version,
template_url=self.get_by_id.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_id.metadata = {"url": "/{resourceId}"}
class TagsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`tags` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def delete_value( # pylint: disable=inconsistent-return-statements
self, tag_name: str, tag_value: str, **kwargs: Any
) -> None:
"""Deletes a tag value.
:param tag_name: The name of the tag. Required.
:type tag_name: str
:param tag_value: The value of the tag to delete. Required.
:type tag_value: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_tags_delete_value_request(
tag_name=tag_name,
tag_value=tag_value,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete_value.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_value.metadata = {"url": "/subscriptions/{subscriptionId}/tagNames/{tagName}/tagValues/{tagValue}"}
@distributed_trace
def create_or_update_value(self, tag_name: str, tag_value: str, **kwargs: Any) -> _models.TagValue:
"""Creates a tag value. The name of the tag must already exist.
:param tag_name: The name of the tag. Required.
:type tag_name: str
:param tag_value: The value of the tag to create. Required.
:type tag_value: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TagValue or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.TagValue
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.TagValue] = kwargs.pop("cls", None)
request = build_tags_create_or_update_value_request(
tag_name=tag_name,
tag_value=tag_value,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.create_or_update_value.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("TagValue", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("TagValue", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
create_or_update_value.metadata = {"url": "/subscriptions/{subscriptionId}/tagNames/{tagName}/tagValues/{tagValue}"}
@distributed_trace
def create_or_update(self, tag_name: str, **kwargs: Any) -> _models.TagDetails:
"""Creates a tag in the subscription.
The tag name can have a maximum of 512 characters and is case insensitive. Tag names created by
Azure have prefixes of microsoft, azure, or windows. You cannot create tags with one of these
prefixes.
:param tag_name: The name of the tag to create. Required.
:type tag_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TagDetails or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.TagDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.TagDetails] = kwargs.pop("cls", None)
request = build_tags_create_or_update_request(
tag_name=tag_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("TagDetails", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("TagDetails", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/tagNames/{tagName}"}
@distributed_trace
def delete(self, tag_name: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements
"""Deletes a tag from the subscription.
You must remove all values from a resource tag before you can delete it.
:param tag_name: The name of the tag. Required.
:type tag_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_tags_delete_request(
tag_name=tag_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/subscriptions/{subscriptionId}/tagNames/{tagName}"}
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.TagDetails"]:
"""Gets the names and values of all resource tags that are defined in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either TagDetails or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.TagDetails]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.TagsListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_tags_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("TagsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/tagNames"}
class DeploymentOperationsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`deployment_operations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def get(
self, resource_group_name: str, deployment_name: str, operation_id: str, **kwargs: Any
) -> _models.DeploymentOperation:
"""Gets a deployments operation.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param operation_id: The ID of the operation to get. Required.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentOperation or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentOperation
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.DeploymentOperation] = kwargs.pop("cls", None)
request = build_deployment_operations_get_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
operation_id=operation_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("DeploymentOperation", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations/{operationId}"
}
@distributed_trace
def list(
self, resource_group_name: str, deployment_name: str, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.DeploymentOperation"]:
"""Gets all deployments operations for a deployment.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment with the operation to get. Required.
:type deployment_name: str
:param top: The number of results to return. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeploymentOperation or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentOperation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.DeploymentOperationsListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_deployment_operations_list_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("DeploymentOperationsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations"
}
| [
"noreply@github.com"
] | noreply@github.com |
24cae832f6190135baaa6606b8ed53befc8c96ff | 680c8334b09a7d85f351e50eea37b6658e9f58a8 | /api/v1/externalApi/agentsApi.py | 657577159929696411e9e6f3ec33e9bcdb4dde46 | [] | no_license | matiascifuentes/incidenciasApi | 4bd2b78ccb9ae1da2da860ab35dab61c13afa57e | 41e17d14751c7641576c3f09f896873e0ea7fde3 | refs/heads/main | 2023-03-04T09:01:51.393679 | 2021-02-17T14:46:30 | 2021-02-17T14:46:30 | 338,838,133 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,182 | py | import requests
import json
class AgentsApi:
def __init__(self):
self.endpoint = 'https://60280ddadd4afd001754aea8.mockapi.io/agents'
def create_agent(self,agent):
success, exists = self.agent_exists(agent['nombre'])
if success:
if not exists:
result = requests.post(self.endpoint, data = agent)
if(result.status_code == 201):
return True, 201
return False, 502
return False, 409
return False, 502
def get_all(self):
result = requests.get(self.endpoint)
if result.status_code == 200:
return True, result.json()
return False, None
def verify_agent(self, agent):
success, agents = self.get_all()
if success:
i = 0
exists = False
while i < len(agents) and not exists:
if(agents[i]['nombre'] == agent['nombre'] and agents[i]['contrasena'] == agent['contrasena']):
exists = True
i = i + 1
return success, exists
return success, None
def agent_exists(self, usuario):
success, agents = self.get_all()
if success:
i = 0
exists = False
while i < len(agents) and not exists:
if(agents[i]['nombre'] == usuario):
exists = True
i = i + 1
return success, exists
return success, None
| [
"matiascifuenteslara@gmail.com"
] | matiascifuenteslara@gmail.com |
2a7ba652370ca242830a34b52559c18602520ce7 | ed4447323b53ad551ba174681772fc5556aa5d58 | /collegeTime/python/powfunc.py | c4146473a79f2d09b21171ed4fe2dc1059c0252a | [] | no_license | haikentcode/code | 6a0704f214e2174bc14b283baa9f00fd8a3ec789 | ea8cf5be99e408d6f751e3bd7e15aeddecc01305 | refs/heads/master | 2022-08-15T03:36:28.203987 | 2022-07-27T15:09:49 | 2022-07-27T15:09:49 | 39,865,837 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | def power(x,y):
if y==0:
return 1
temp=power(x,y/2)
if y%2==0:
return temp*temp
else:
return x*temp*temp
x=input()
y=input()
print power(x,y)
| [
"haikenthk@gmail.com"
] | haikenthk@gmail.com |
b8457379ab2d5f54fb43bbbc50a627ba24fc6ac7 | 83a52a38689f2f2202271f40b38aa3371efdf1e4 | /data/handle_raw.py | cfc8f34be7e841369362375ad21748c270eae6b5 | [] | no_license | zhysora/PSGan-Family | 0131f8ddb17ef0e2d0cd21fd7ec9af394515a52d | 8b7ba358365500d8079634bde3aa13ebd198f6c1 | refs/heads/master | 2022-05-23T15:07:32.778291 | 2022-03-31T07:07:00 | 2022-03-31T07:07:00 | 246,801,374 | 37 | 13 | null | null | null | null | UTF-8 | Python | false | false | 4,411 | py | ############## import ##############
from __future__ import division
import gdal, ogr, os, osr
import numpy as np
import cv2
import sys
sys.path.append('../')
from utils import array2raster
############## arguments ##############
dataDir = '/data/zh/PSData' # root of data directory
satellite = 'WV-2' # name of dataset
tot = 9 # number of raw images
imagesIndex1 = range(1, tot + 1) # image index for training and testing set
imagesIndex2 = range(1, 2) # image index for origin_test set
def downsample(img, ratio=4):
h, w = img.shape[:2]
return cv2.resize(img, (w // ratio, h // ratio))
def upsample(img, ratio=4):
h, w = img.shape[:2]
return cv2.resize(img, (w * ratio, h * ratio))
if __name__ == "__main__":
rasterOrigin = (0, 0)
outDir = '%s/Dataset/%s' % (dataDir, satellite)
if not os.path.exists(outDir):
os.makedirs(outDir)
# MUL -> mul(1), lr(1/4), lr_u(1/4*4)
# PAN -> pan(1/4), pan_d(1/4*1/4*4)
for i in imagesIndex1:
newMul = '%s/Dataset/%s/%d_mul.tif' % (dataDir, satellite , i)
newLR = '%s/Dataset/%s/%d_lr.tif' % (dataDir, satellite, i)
newLR_U = '%s/Dataset/%s/%d_lr_u.tif' % (dataDir, satellite, i)
newPan = '%s/Dataset/%s/%d_pan.tif' % (dataDir, satellite, i)
newPan_D = '%s/Dataset/%s/%d_pan_d.tif' % (dataDir, satellite, i)
rawMul = gdal.Open( '%s/Raw/%s/%d-MUL.TIF' % (dataDir, satellite, i) ).ReadAsArray()
rawPan = gdal.Open( '%s/Raw/%s/%d-PAN.TIF' % (dataDir, satellite, i) ).ReadAsArray()
print ("rawMul:", rawMul.shape, " rawPan:", rawPan.shape)
rawMul = rawMul.transpose(1, 2, 0) # (h, w, c)
h, w = rawMul.shape[:2]
h = h // 4 * 4
w = w // 4 * 4
imgMul = cv2.resize(rawMul, (w, h))
imgLR = cv2.resize(imgMul, (w // 4, h // 4))
imgLR_U = cv2.resize(imgLR, (w, h))
imgPan = cv2.resize(rawPan, (w, h))
imgPan_D = upsample(downsample(imgPan))
imgMul = imgMul.transpose(2, 0, 1)
imgLR = imgLR.transpose(2, 0, 1)
imgLR_U = imgLR_U.transpose(2, 0, 1)
array2raster(newMul, rasterOrigin, 2.4, 2.4, imgMul, 4) # mul
array2raster(newLR_U, rasterOrigin, 2.4, 2.4, imgLR_U, 4) # lr_u
array2raster(newLR, rasterOrigin, 2.4, 2.4, imgLR, 4) # lr
array2raster(newPan, rasterOrigin, 2.4, 2.4, imgPan, 1) # pan
array2raster(newPan_D, rasterOrigin, 2.4, 2.4, imgPan_D, 1) # pan_d
print ('mul:', imgMul.shape, ' lr_u:', imgLR_U.shape,
' lr:', imgLR.shape, ' pan:', imgPan.shape, ' pan_d:', imgPan_D.shape)
print ('done%s' % i)
# origin
# MUL(crop 1/4) -> mul_o(1), mul_o_u(1*4)
# PAN(crop 1/4) -> pan_o(1), pan_o_d(1/4*4)
for i in imagesIndex2:
newMul_o = '%s/Dataset/%s/%d_mul_o.tif' % (dataDir, satellite , i)
newMul_o_u = '%s/Dataset/%s/%d_mul_o_u.tif' % (dataDir, satellite, i)
newPan_o = '%s/Dataset/%s/%d_pan_o.tif' % (dataDir, satellite, i)
newPan_o_d = '%s/Dataset/%s/%d_pan_o_d.tif' % (dataDir, satellite, i)
rawMul = gdal.Open( '%s/Raw/%s/%d-MUL.TIF' % (dataDir, satellite, i) ).ReadAsArray()
rawPan = gdal.Open( '%s/Raw/%s/%d-PAN.TIF' % (dataDir, satellite, i) ).ReadAsArray()
print ("rawMul:", rawMul.shape, " rawPan:", rawPan.shape)
rawMul = rawMul.transpose(1, 2, 0) # (h, w, c)
h, w = rawMul.shape[:2]
h = h // 4 * 4
w = w // 4 * 4
imgMul_o = cv2.resize(rawMul, (w, h))[1500:1500+h//4, 1500:1500+w//4, :] # (1/4, 1/4)
imgPan_o = cv2.resize(rawPan, (w*4, h*4))[6000:6000+h, 6000:6000+w] # (1, 1)
imgMul_o_u = upsample(imgMul_o)
imgPan_o_d = upsample(downsample(imgPan_o))
imgMul_o = imgMul_o.transpose(2, 0, 1)
imgMul_o_u = imgMul_o_u.transpose(2, 0, 1)
array2raster(newMul_o, rasterOrigin, 2.4, 2.4, imgMul_o, 4)
array2raster(newMul_o_u, rasterOrigin, 2.4, 2.4, imgMul_o_u, 4)
array2raster(newPan_o, rasterOrigin, 2.4, 2.4, imgPan_o, 1)
array2raster(newPan_o_d, rasterOrigin, 2.4, 2.4, imgPan_o_d, 1)
print ('mul_o:', imgMul_o.shape, ' mul_o_u:', imgMul_o_u.shape,
' pan_o:', imgPan_o.shape, ' pan_o_d:', imgPan_o_d.shape, )
print ('done %d' % i)
| [
"602171565@qq.com"
] | 602171565@qq.com |
8849959f26a02a64d2d77a028c48084c8fc9310d | 955060597d643c695dff53b6cff0ea649db68a94 | /dequorum/urls.py | 44cfd19538837d6340d8c57944e6fc065b461a4c | [
"BSD-2-Clause"
] | permissive | pombredanne/django-dequorum | e99386fd01d640776d3ac6f2851c4ddc15316713 | b790e9b8b0920581a48c67679648a6df811e505b | refs/heads/master | 2021-01-18T10:10:33.571111 | 2013-12-13T23:04:16 | 2013-12-13T23:04:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py |
from django.conf.urls import patterns, include, url
from nap import api
urlpatterns = patterns('',
(u'^$', 'django.shortcuts.render', {'template_name': 'dequorum/index.html'}),
(u'^api/', include(api.APIS['dequorum'].patterns(flat=True))),
)
| [
"curtis@tinbrain.net"
] | curtis@tinbrain.net |
c97ceedc42fc5ab575532a6b5a504c4ce998ed0f | 6fb51c87493003399b019f36f8b99ca060455593 | /Game/tmp3.py | 9395817a842e323a8f32b332e5172932200ac9aa | [] | no_license | eran505/nnPyTorch | 3ffb98321de3e8f1e71872eeb41d0580fe3aa694 | 60ebe4790e72a8a453d2325de6f2562274ecd873 | refs/heads/master | 2023-09-02T15:19:59.561552 | 2021-11-22T09:01:30 | 2021-11-22T09:01:30 | 288,995,526 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,554 | py | # Instantiate the env
import argparse
from simulator import Game_Sim
from stable_baselines3.common.callbacks import EvalCallback, StopTrainingOnRewardThreshold
from stable_baselines3.common.evaluation import evaluate_policy
#from stable_baselines.common.policies import MlpPolicy, MlpLstmPolicy, MlpLnLstmPolicy
# from stable_baselines import ACER, ACKTR, DQN, PPO2
from stable_baselines3 import PPO
import os
import numpy as np
# from stable_baselines.common.policies import FeedForwardPolicy, register_policy
from time import time
import torch as th
import torch
from os_util import walk_rec
def get_config_csv(p):
res = walk_rec(p,[],"csv")
d={}
for item in res:
name = str(item).split(os.sep)[-1][:-4]
id_exp = int(name.split("_")[-1])
if id_exp not in d:
d[id_exp]=[item]
else:
d[id_exp].append(item)
for entry in d.keys():
assert len(d[entry])==2
return d
def inset_file_env(list_file,dest="./env_files/"):
res= walk_rec(dest,[],"")
for item in res:
os.system("rm {}".format(item))
for item in list_file:
os.system("cp {} {}".format(item,dest))
res = walk_rec(dest, [], "")
assert len(res)==len(list_file)
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('--steps', type=int,default=200000,
help='A required integer positional argument')
parser.add_argument('--mode', type=str, choices=["cpu","gpu"],default="cpu",
help='device name')
parser.add_argument('--eval_freq', type=int,default=5000,
help='policy eval freq')
parser.add_argument('--train', type=bool,default=True,
help='Train the RL agent')
args = parser.parse_args()
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
if args.mode=="cpu":
device='cpu'
print('Using device:', device)
# Additional Info when using cuda
if device == 'cuda':
print(torch.cuda.get_device_name(0))
print('Memory Usage:')
print('Allocated:', round(torch.cuda.memory_allocated(0) / 1024 ** 3, 1), 'GB')
print('Cached: ', round(torch.cuda.memory_cached(0) / 1024 ** 3, 1), 'GB')
if device=="cpu" and args.mode=="gpu":
os.system("nvidia-smi")
print(torch.backends.cudnn.enabled)
os.system("nvcc --version")
print(torch.__version__)
print(torch.version.cuda)
print(torch.cuda.is_available())
assert False
policy_kwargs = dict(activation_fn=th.nn.ReLU,
net_arch=[dict(pi=[128,128,128], vf=[128,128,128])])
get_all_config = get_config_csv("./conf/")
for ky,val in get_all_config.items():
path_to_log = "./logs/{}/".format(ky)
inset_file_env(val)
env = Game_Sim("./env_files/",is_option=False, action_mode='all',is_scale="None",discrete_actions=True)
callback_on_best = StopTrainingOnRewardThreshold(reward_threshold=1.0, verbose=1)
eval_callback = EvalCallback(env, best_model_save_path=path_to_log, n_eval_episodes=len(env.path_indexes),
log_path=path_to_log, eval_freq=args.eval_freq, callback_on_new_best=callback_on_best,
deterministic=True, verbose=1, render=False)
if args.train:
start_time = time()
model = PPO("MlpPolicy", env, policy_kwargs=policy_kwargs,batch_size=128,verbose=1).learn(total_timesteps=args.steps,callback=eval_callback)
end_time = time()
print( "Leraning Time:", end_time -start_time)
with open(path_to_log+"time.txt","+w") as f:
f.write("train time:{}".format(end_time -start_time))
'''####################################### EVAL ###################################################'''
model = PPO.load(path_to_log+"best_model.zip")
num_iter = 100
l_reward = np.zeros(num_iter)
l_len_ep = np.zeros(num_iter)
for j in range(num_iter):
res = evaluate_policy(model, env, n_eval_episodes=1, return_episode_rewards=True)
l_reward[j] = res[0][0]
l_len_ep[j] = res[1][0]
l_reward[l_reward < 1] = 0
print("------ Policy Eval ------\nEpisodes: {}".format(num_iter))
print("Collision rate:", np.mean(l_reward))
print("Mean episodes length:", np.mean(l_len_ep))
print("-------------------------")
with open(path_to_log + "eval.txt", "+w") as f:
f.write("Episodes:{}".format(num_iter))
f.write("Collision rate:{}".format(np.mean(l_reward)))
f.write("Mean episodes length:{}".format(np.mean(l_len_ep)) )
| [
"eran505@gmail.com"
] | eran505@gmail.com |
727954071496b5b9620e8e952c2e407adc55867c | 59d65d101e5a6149a9252b0f01ac9cd86c3dd669 | /shout.py | 3c30128c1684cb34033b2b1e3c00ec60cff89fb4 | [] | no_license | pjlorenz/myappsample | 1753f4097429a18e01b4b2827036b512edfd3de1 | dc2100f491257901de5a01ffc2b588666a891151 | refs/heads/master | 2023-03-06T03:04:05.265372 | 2021-02-10T18:56:43 | 2021-02-10T18:56:43 | 258,569,528 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | def shout_text():
fhand = open('mbox-short.txt')
count = 0
for line in fhand:
line = line.upper()
print(line)
shout_text() | [
"patrickjlorenz@gmail.com"
] | patrickjlorenz@gmail.com |
9b07e90479e6556a9f1310bbceee661ebf9051fc | 0107160f73c6f46a0c693f0aa8b2b22bb04aaa07 | /flex/redis.py | 8a0c1594fac0646ca5ab8f762a94f41024245212 | [
"MIT"
] | permissive | centergy/flex | 8a9054171a121671e09646a88259c947d0d87cc4 | 4fc11d3ad48e4b5016f53256015e3eed2157daae | refs/heads/master | 2022-12-13T06:01:09.561457 | 2018-08-22T20:32:34 | 2018-08-22T20:32:34 | 145,748,684 | 0 | 0 | MIT | 2022-12-08T00:45:07 | 2018-08-22T18:40:47 | Python | UTF-8 | Python | false | false | 1,958 | py | from flask import current_app
from threading import Lock
from flex.utils.module_loading import import_string
__all__ = ('RedisManager', 'redis')
class _Connector(object):
__slots__ = ('app', 'lock', '_client', 'config')
def __init__(self, app, config):
self.app = app
self.config = config
self._client = None
self.lock = Lock()
@property
def client(self):
with self.lock:
if self._client is None:
cls = self.config.CLIENT_CLASS
if isinstance(cls, str):
cls = import_string(cls)
self._client = cls.from_url(
self.config.URL,
**self.config.CLIENT_OPTIONS
)
return self._client
class RedisManager(object):
__slots__ = ('_app', )
config_prefix = 'REDIS_'
default_config = dict(
url='redis://localhost:6379/0',
client_class='redis.StrictRedis',
client_options={}
)
def __init__(self, app=None):
self._app = None
if app is not None:
self.init_app(app)
self._app = app
@property
def _redis_client(self):
try:
return self._get_app().extensions['redis'].client
except KeyError:
raise RuntimeError('Redis not setup on app.')
def _get_app(self, app=None):
"""Helper method that implements the logic to look up an application."""
if app is not None:
return app
if current_app:
return current_app
if self._app is not None:
return self._app
raise RuntimeError(
'Application not registered on cache instance and no application'\
'bound to current context'
)
def init_app(self, app, **kwargs):
config = app.config.namespace(self.config_prefix)
config.setdefaults(self.default_config)
app.extensions['redis'] = _Connector(app, config)
def __getattr__(self, name):
return getattr(self._redis_client, name)
def __getitem__(self, name):
return self._redis_client[name]
def __setitem__(self, name, value):
self._redis_client[name] = value
def __delitem__(self, name):
del self._redis_client[name]
redis = RedisManager() | [
"davidmkyalo@gmail.com"
] | davidmkyalo@gmail.com |
29700dc76b4ec32c57dff0cffda4ac1f36999be3 | 64ee5dc0176fe48ac4f020a984a27ebb498f1730 | /ex21.Extra1.py | d0a794d75b169800a60db676a300c3cfad83263b | [] | no_license | Grey-EightyPercent/Python-Learning | 942b8120641e4e8315a2524ecd21e50fbba125f7 | f59d6b6b5d04ba281d0800fbaf822364ba07dc9e | refs/heads/master | 2020-08-24T15:08:43.357776 | 2019-10-27T15:30:46 | 2019-10-27T15:30:46 | 216,851,832 | 0 | 0 | null | 2019-10-27T15:30:47 | 2019-10-22T15:48:57 | null | UTF-8 | Python | false | false | 1,059 | py | def a_power(a, power):
print(f"The {power} times of {a} is {a} ** {power}") # This is only a statement without computation!!!
return a ** power # This line is to do the real computation and return the value.
def rate_of_increase(n, n_1):
print(f"The YoY rate of increase is {n_1} / {n} * 100 %")
return (n_1 / n) * 100
revenue_3 = a_power(2, 3) # assign the result of the function to the variable.
revenue_2 = a_power(2, 2)
growth_rate = rate_of_increase(revenue_2, revenue_3)
print(f"The 3rd-year revenue is {revenue_3}")
print(f"The 2nd-year revenue is {revenue_2}")
print(f"The YoY growth rate from 2nd year to 3rd year is {growth_rate} % .")
# Below is to breakdown the functions to formulas
# Helping you understand what is "inside out"
revenue_3_b = 2 ** 3
revenue_2_b = 2 ** 2
growth_rate_b = (revenue_3_b / revenue_2_b) * 100
print(f"The 3rd-year revenue is {revenue_3_b}")
print(f"The 2nd-year revenue is {revenue_2_b}")
print(f"The YoY growth rate from 2nd year to 3rd year is {growth_rate_b} % .")
| [
"noreply@github.com"
] | noreply@github.com |
d50343783b27e292f1460cef1df6f3dd1d572ccd | 22295cda10cf11472fee987093e0b245f6f96ef3 | /lan/test060_billy_write_min_behav_performance_switching.py | 982b3c0f8591dee5c7fdf3d3db7240bef557e3c5 | [] | no_license | sjara/jaratest | aecb9e3bcc1ff91db35e7cd551c0f4f3da0b690a | 09bf2c76bd5bf45191a2c37c14171ae1e8902c4b | refs/heads/master | 2023-08-11T09:55:17.684814 | 2023-08-03T22:03:31 | 2023-08-03T22:03:31 | 63,100,718 | 2 | 5 | null | 2023-04-11T18:14:08 | 2016-07-11T20:43:04 | Python | UTF-8 | Python | false | false | 5,161 | py | '''
finds all behavior sessions in allcells that have more than a minimum percentage correct in performance for all frequencies in the Switching Task. The only argument is the mouse name
Billy Walker
'''
from jaratoolbox import loadbehavior
from jaratoolbox import settings
import os
import numpy as np
import sys
import importlib
mouseName = str(sys.argv[1]) #the first argument is the mouse name to tell the script which allcells file to use
allcellsFileName = 'allcells_'+mouseName
sys.path.append(settings.ALLCELLS_PATH)
allcells = importlib.import_module(allcellsFileName)
minPerf = 0.60 #the minimum performance or percentage correct of the end frequencies
minCorrectPerBlock = 50 #the minimum number of correct trials in each block
minNumBlocks = 3 #the minimum number of blocks with at least minCorrectPerBlock number of correct trials
subject = allcells.cellDB[0].animalName
behavSession = ''
ephysSession = ''
#nameOfOutputFile = nameOfFile + '_' + subject
numOfCells = len(allcells.cellDB) #number of cells that were clustered on all sessions clustered
ephysRootDir = settings.EPHYS_PATH
#experimenter = 'santiago'
paradigm = '2afc'
outputDir = '/home/languo/data/ephys/'+subject
nameOfFile = 'minPerformance'
finalOutputDir = outputDir+'/'+subject+'_stats'
minPerfList = []
try:
text_file = open("%s/%s.txt" % (finalOutputDir,nameOfFile), "r+") #open a text file to read and write in
text_file.readline()
minPerfList=text_file.read().split()
except:
text_file = open("%s/%s.txt" % (finalOutputDir,nameOfFile), "w") #open a text file to read and write in
text_file.write("minimum performance percentage: %s\n" % minPerf)
for cellID in range(0,numOfCells):
oneCell = allcells.cellDB[cellID]
if (behavSession != oneCell.behavSession):
subject = oneCell.animalName
behavSession = oneCell.behavSession
ephysSession = oneCell.ephysSession
ephysRoot = os.path.join(ephysRootDir,subject)
if (behavSession in minPerfList): #if it is already in the list, dont add it again
continue
# -- Load Behavior Data --
behaviorFilename = loadbehavior.path_to_behavior_data(subject,paradigm,behavSession)
bdata = loadbehavior.BehaviorData(behaviorFilename)
numberOfTrials = len(bdata['choice'])
correct = bdata['outcome']==bdata.labels['outcome']['correct']
incorrect = bdata['outcome']==bdata.labels['outcome']['error']
possibleFreq = np.unique(bdata['targetFrequency'])
firstFreq = bdata['targetFrequency'] == possibleFreq[0]
lastFreq = bdata['targetFrequency'] == possibleFreq[2]
correctFirst = sum(correct & firstFreq)
correctLast = sum(correct & lastFreq)
incorrectFirst = sum(incorrect & firstFreq)
incorrectLast = sum(incorrect & lastFreq)
firstPerf = float(correctFirst)/(correctFirst+incorrectFirst)
lastPerf = float(correctLast)/(correctLast+incorrectLast)
middleFreq = bdata['targetFrequency'] == possibleFreq[1]
highBlock = bdata['currentBlock'] == bdata.labels['currentBlock']['high_boundary']
lowBlock = bdata['currentBlock'] == bdata.labels['currentBlock']['low_boundary']
middleFreqHighBlock = middleFreq & highBlock
middleFreqLowBlock = middleFreq & lowBlock
correctMidHigh = sum(middleFreqHighBlock & correct)
incorrectMidHigh = sum(middleFreqHighBlock & incorrect)
correctMidLow = sum(middleFreqLowBlock & correct)
incorrectMidLow = sum(middleFreqLowBlock & incorrect)
highBlPerf = float(correctMidHigh)/(correctMidHigh+incorrectMidHigh)
lowBlPerf = float(correctMidLow)/(correctMidLow+incorrectMidLow)
blocks = bdata['currentBlock']
highBlock = bdata.labels['currentBlock']['high_boundary']
lowBlock = bdata.labels['currentBlock']['low_boundary']
goodBlockCount = 0 #Keeps track of the number of good blocks
badBlockCheck = False #Checks if there in a block in the first 3 blocks that does not pass the requirements
curBlock = blocks[0]
curTrial = 0
startTrial = 0
endTrial = 0
while (curTrial < (len(blocks)-1)):
startTrial = curTrial
while ((curTrial < (len(blocks)-1)) & (blocks[curTrial] == curBlock)):
curTrial += 1
endTrial = curTrial #finds the end of the current block
curBlock = blocks[curTrial]
if(sum(correct[startTrial:(endTrial+1)]) >= minCorrectPerBlock): #counts all the correct trials in the current block and counts it as good if it has the min num of correct trials
goodBlockCount += 1
else:
if (goodBlockCount < minNumBlocks):
badBlockCheck = True
break
if ((firstPerf >= minPerf) & (lastPerf >= minPerf) & (highBlPerf >= minPerf) & (lowBlPerf >= minPerf) & (goodBlockCount >= minNumBlocks) & (not badBlockCheck)):
text_file.write("\n%s" % behavSession)
text_file.close()
print 'finished min behavior performance check'
| [
"lan.guo14@gmail.com"
] | lan.guo14@gmail.com |
e40d567bc106d63bf2fb606e47fcee7c00ae7b38 | 1b8f6104616803e893dc54f7bad3b7f7a58e2fc1 | /ALGO_V2/binary_search/addition.py | 906b20ca4a1210d39c8d21e3a28bded3072fd643 | [] | no_license | hanrick2000/DSAL | bbb1af525b9e56105c2f6c2b5e20af8211729608 | ae8fba686ea94ceb05085ae8323b16a636afad57 | refs/heads/master | 2022-08-24T19:25:52.691731 | 2020-05-26T04:09:49 | 2020-05-26T04:09:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,224 | py | # 三步翻转法及相关练习题
# 矩阵找数问题的Follow up
# 快速幂算法
# 辗转相除法
# recover roated array
# [4,5,1,2,3]
# [4, 5] [1, 2, 3]
# [5, 4], [3,2,1]
# [1,2,3,4,5]
# https://www.lintcode.com/problem/recover-rotated-sorted-array/description
def recoverRotatedSortedArray(nums):
# write your code here
# find the place recover
i = 1
while 1 <= i < len(nums):
if nums[i - 1] > nums[i]:
break
i += 1
print((nums[:i][::-1] + nums[i:][::-1])[::-1])
return -1
recoverRotatedSortedArray([4, 5, 1, 2, 3])
# 写法力求优美,用三段反转法,先用一个O(LogN)求最小值的辅助子程序,你要是用O(N)迭代法或Python的min函数就失分了。
#
# Reverse显然要单放一个函数。面试时可以最后实现。
#
# 三段反转法构成主程序,这要先在白板上写下来,目的是展示你的思维层次感。
#
# 最后,复杂度分析:O(LogN) + 2 * O(N),和理论上的最低复杂度O(N)无限接近。
class Solution:
"""
@param nums: An integer array
@return: nothing
"""
def recoverRotatedSortedArray(self, nums):
minIndex = self.findMinimum(nums)
if minIndex == 0:
return
start, end = 0, len(nums) - 1
self.rotateArray(nums, start, minIndex - 1)
self.rotateArray(nums, minIndex, end)
self.rotateArray(nums, start, end)
"""
@param nums: An integer array
@return: The index to the left most minimum element
"""
def findMinimum(self, nums):
if nums is None or len(nums) < 2:
return 0
left, right = 0, len(nums) - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] > nums[right]:
left = mid + 1
elif nums[mid] < nums[right]:
right = mid
else:
right = right - 1
return left
# if there is repetitive numbers
# 套用二分法 find min in RSA的前提条件是:没有重复数!这题目遇到 1 1 1 1 1 1 1 1 1 0 1 1 1 1,用二分法会找错地方.
def find_split(self, nums):
# DO NOT use binary search!
# Binary Search does not work on this prob
if nums is None or len(nums) < 2:
return 0
for i in range(1, len(nums)):
if nums[i] < nums[i - 1]:
return i
# return i = len()-1 if it's already a sorted array
return i
"""
@param nums: An integer array
@param left: The start index into the array, inclusive
@param right: The end index into the array, inclusive
@return: nothing
"""
def rotateArray(self, nums, left, right):
while left <= right:
nums[left], nums[right] = nums[right], nums[left]
left += 1
right -= 1
# https://www.lintcode.com/problem/rotate-string/description
# https://www.lintcode.com/problem/greatest-common-divisor/description
# 算法介绍
# 辗转相除法, 又名欧几里德算法, 是求最大公约数的一种方法。它的具体做法是:用较大的数除以较小的数,再用除数除以出现的余数(第一余数),再用第一余数除以出现的余数(第二余数),如此反复,直到最后余数是0为止。如果是求两个数的最大公约数,那么最后的除数就是这两个数的最大公约数。
def gcd(big, small):
if small != 0:
return gcd(small, big % small)
else:
return big
# 基本原理
# 计算x的n次方, 即计算x^nx
# n
# 。
#
# 由公式可知: x^n = x^{n/2} * x^{n/2}x
# n
# =x
# n/2
# ∗x
# n/2
# 。
#
# 如果我们求得x^{n/2}x
# n/2
# , 则可以O(1)求出x^nx
# n
# , 而不需要再去循环剩下的n/2n/2次。
#
# 以此类推,若求得x^{n/4}x
# n/4
# , 则可以O(1)求出x^{n/2}x
# n/2
# 。
# 。。。
#
# 因此一个原本O(n)O(n)的问题,我们可以用O(logn)O(logn)复杂度的算法来解决。
def power(x, n):
if n == 0:
return 1
if n % 2 == 0:
tmp = power(x, n // 2)
return tmp * tmp
else:
tmp = power(x, n // 2)
return tmp * tmp * x
# 注意:
#
# 不要重复计算,在计算x^{n/2} * x^{n/2}x
# n/2
# ∗x
# n/2
# 的时候,先计算出x^{n/2}x
# n/2
# ,存下来然后返回tmp*tmptmp∗tmp;
# nn为奇数的时候要记得再乘上一个xx。
# 非递归版本
def power(x, n):
ans = 1
base = x
while n > 0:
if n % 2 == 1:
ans *= base
base *= base
n = n // 2
return ans
# 非递归版本与递归版本原理相同,计算顺序略有不同。
#
# 因为递归是从大问题进入,划分子问题然后层层返回再求解大问题。这里要从小问题开始,直接求解大问题。
# 你可以打印出每次循环中 basebase 和 ansans 的值,来理清楚其中的算法思路。
#
# 递归版本和非递归版本都应该熟练掌握,虽然递归版本更容易掌握和理解,且 lognlogn 的计算深度也不会导致 Stack Overflow。但是面试官是很有可能为了加大难度让你在用非递归的版本写一遍的。
| [
"herongrong2011@gmail.com"
] | herongrong2011@gmail.com |
6daf55c53a0b148bb2e242b24a9fcebce1cf5235 | 3ac2be5bf0cd5aca45a75bc13ac2aadabe71d5e1 | /rnn_regressor.py | b4086177c2d5e61a71b5316e5796486fda7b281a | [] | no_license | TzuChiehHung/DataAnalysis | 4cc6009cd83cd80064253e2da2e2842d16880ca2 | 6c028978814f3f0e5dd6a23dce41d104e5bd9df5 | refs/heads/master | 2020-04-16T11:13:19.431454 | 2019-02-24T09:54:51 | 2019-02-24T10:03:45 | 165,528,100 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,203 | py | """
View more, visit my tutorial page: https://morvanzhou.github.io/tutorials/
My Youtube Channel: https://www.youtube.com/user/MorvanZhou
Dependencies:
torch: 0.4
matplotlib
numpy
"""
# %%
import torch
from torch import nn
import numpy as np
import matplotlib.pyplot as plt
from libs.models import SimpleRNN
# torch.manual_seed(1) # reproducible
# %% Hyper Parameters
TIME_STEP = 10 # rnn time step
INPUT_SIZE = 1 # rnn input size
LR = 0.02 # learning rate
# %% show data
steps = np.linspace(0, np.pi*2, 100, dtype=np.float32) # float32 for converting torch FloatTensor
x_np = np.sin(steps)
y_np = np.cos(steps)
plt.figure(1)
plt.plot(steps, y_np, 'r-', label='target (cos)')
plt.plot(steps, x_np, 'b-', label='input (sin)')
plt.legend(loc='best')
# plt.show()
# %% define model
model = SimpleRNN()
print(model)
# %% train
optimizer = torch.optim.Adam(model.parameters(), lr=LR) # optimize all rnn parameters
loss_func = nn.MSELoss()
h_state = None # for initial hidden state
plt.figure(2, figsize=(12, 5))
plt.ion() # continuously plot
for step in range(100):
start, end = step * np.pi, (step+1)*np.pi # time range
# use sin predicts cos
steps = np.linspace(start, end, TIME_STEP, dtype=np.float32, endpoint=False) # float32 for converting torch FloatTensor
x_np = np.sin(steps)
y_np = np.cos(steps)
x = torch.from_numpy(x_np[np.newaxis, :, np.newaxis]) # shape (batch, time_step, input_size)
y = torch.from_numpy(y_np[np.newaxis, :, np.newaxis])
prediction, h_state = model(x, h_state) # rnn model output
# !! next step is important !!
h_state = h_state.data # repack the hidden state, break the connection from last iteration
loss = loss_func(prediction, y) # calculate loss
optimizer.zero_grad() # clear gradients for this training step
loss.backward() # backpropagation, compute gradients
optimizer.step() # apply gradients
# plotting
plt.plot(steps, y_np.flatten(), 'r-')
plt.plot(steps, prediction.data.numpy().flatten(), 'b-')
plt.draw(); plt.pause(0.05)
plt.ioff()
plt.show()
| [
"hungtc@solab.me.ntu.edu.tw"
] | hungtc@solab.me.ntu.edu.tw |
a6bb7565046d1167d9e9197922d226f094cd2067 | acbed396f16d61c9e0366b0a9f0b84dc42c7126a | /test.py | 0f4d38160db3b281cce2c025a5f2b1feb286acb0 | [] | no_license | MarckK/yahtzee-kata | 1db30222960cb27010e521e886867ee23f73fd92 | ea22723cc2f179eafa6d8e1f4402261bf33f8f31 | refs/heads/master | 2020-04-30T00:50:02.138494 | 2019-03-19T08:15:28 | 2019-03-19T08:15:28 | 176,512,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 942 | py | from yahtzee import *
def test_six_dice_rolled():
rolls = roll(6)
assert len(rolls) == 6
def test_pair_score():
rolls = [3, 3, 3, 4, 4]
assert multiples_score(rolls, 2) == 8
def test_score_numbers():
rolls = [1, 1, 2, 4, 4]
num = 4
assert score_numbers(rolls, num) == 8
def test_triples_score():
rolls = [3, 3, 3, 4, 5]
assert multiples_score(rolls, 3) == 9
def test_four_kind_score():
rolls = [2, 2, 2, 2, 5]
assert multiples_score(rolls, 4) == 8
def test_straight_score():
rolls = [1, 2, 3, 4, 5]
assert straight_score(rolls) == 15
def test_full_house_score():
rolls = [1, 1, 2, 2, 2]
assert full_house_score(rolls) == 8
def test_yahtzee_score():
rolls = [1, 1, 1, 1, 1]
assert yahtzee_score(rolls) == 50
@pytest.mark.parametrize("rolls, category", [([1, 1, 1, 1, 1], "Yahtzee")])
def test_score(rolls, category):
assert score(rolls, category) == 50
| [
"karadelamarck@gmail.com"
] | karadelamarck@gmail.com |
26dd8909bb3153b364140142b7d61195eced387e | f299c9abe06bcecad2ace17923d92dc9cc9ba7d1 | /tests/test_hello.py | 993e266b1e7edaad6e5258bb18e04105188b957a | [] | no_license | jofas/travis_lab | fba04f869412231eb15ce3560a94be26d3badd05 | 7a028ec05d9ddbe13b6208eefb7d64acebcfc6f6 | refs/heads/master | 2020-08-26T13:07:11.377507 | 2020-01-13T11:10:22 | 2020-01-13T11:10:22 | 217,020,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | from hello import hello
def test_hello():
assert hello() == "How are you, world?"
| [
"jonas@fc-web.de"
] | jonas@fc-web.de |
b0ad8f4c4236fcd0b4bbf10115bc868e1de27859 | 790426ccb2e751be93ae9aec04b3ab053b07bd5c | /Panama/panama.py | 1de9a6b27dcc6e2e48db829f9535916cf43bbb28 | [] | no_license | falkirks/panama | b2a273a3bde29d715316b3e409bc12d2918edbf9 | bd4b852c2d1c20ff2cb65cb532a8b2a6e176efd9 | refs/heads/main | 2023-01-24T18:46:16.735644 | 2020-12-12T00:16:15 | 2020-12-12T00:16:15 | 311,744,038 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,262 | py | import zmq
import time
import sys
import json
import threading
import argparse
import subprocess
import pickle
import zlib
# Set to true for verbose execution
debug = True
# ZeroMQ context allows socket creation
context = zmq.Context()
# The clients are stored with each client ID as a key to a
# boolean. The bool represents whether or not the client is auditing.
clients = {}
# Audit port is where the leader broadcasts audit requests
# to all of the clients. In PUB/SUB pattern where leader is PUB
audit_port = "5557"
# Flag for which version of panama to run
is_leader = True
# The IP address to connect to
ip_address = "127.0.0.1"
def get_audit_log():
log_data = None
try:
with open("/tmp/audit.log") as audit_log:
log_data = audit_log.read()
except:
log_data = None
return(log_data)
def submit_prov(counter):
for prov_count in range(counter):
cmd = "sudo /home/vagrant/SPADE/bin/spade control | add reporter"\
+ " Camflow inputLog=/home/vagrant/audits/audit"\
+ str(prov_count) + ".log"
ps = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
output = ps.communicate()[0]
# This function sits and waits on the connect port
# any message that comes to the leader is processed here
# It is run in a new thread
def process_follower_msgs():
context = zmq.Context.instance()
# This counter is how followers get unique IDs
# each follower gets current val than it is incremented
follower_counter = 0
# Track followers who completed/started audits
audit_counter = 0
start_counter = 0
# Binds to REP socket
socket = context.socket(zmq.REP)
socket.bind("tcp://*:%s" % connect_port)
# Do this until program is closed
while True:
# Wait for next request from client
message = pickle.loads(socket.recv())
if(debug): print("Received request: ", message)
# Messages will have a topic key indicating what it is for
if(message["topic"] == "connect"): # New follower trying to connect
# Grab counter value and increment
new_id = str(follower_counter)
follower_counter += 1
# Construct message to send back, telling the follower its
# ID and which port to connect for audit requests
message = {"ID": new_id, "audit_port": audit_port}
message = pickle.dumps(message)
socket.send(message)
# Add client to dict and continue
clients[str(new_id)] = False
if(debug): print("assigned ", str(new_id))
elif (message["topic"] == "audit"): # Follower changing audit state
clients[message["ID"]] = message["current_state"]
if("data" in message.keys() and message["data"] is not None):
with open("audits/audit" + str(message["ID"]) + ".log", "w+") as new_log:
new_log.write(zlib.decompress(message["data"]).decode("utf-8"))
audit_counter += 1
if (audit_counter == follower_counter):
submit_prov(audit_counter)
audit_counter = 0
if (message["current_state"] == True):
start_counter += 1
if (start_counter == follower_counter):
print("All Auditing!")
start_counter = 0
socket.send_string("ACK")
else:
pass
def run_leader(connect_port):
# context creates sockets
context = zmq.Context.instance()
# Audit port is where the leader broadcasts audit requests
# to all of the clients. In PUB/SUB pattern where leader is PUB
audit_port = "5557"
# Run follower process function as daemon so it will stop when
# program ends
ids = threading.Thread(target=process_follower_msgs, daemon=True)
ids.start()
# Bind to audit socket
audit_socket = context.socket(zmq.PUB)
audit_socket.bind("tcp://*:%s" % audit_port)
# Simple CLI to interact with leader
print("Type help for options")
while True:
val = input("> ")
if(val == "quit" or val =="q"):
audit_socket.send_string("STOP")
quit()
elif(val == "help" or val == "h"):
print("type s(tart) to begin an audit")
print("Type q(uit) to close")
elif(val == "start" or val == "s"):
if(debug): print("starting audit")
audit_socket.send_string("START")
elif(val == "end" or val == "e"):
if(debug): print("stopping audit")
audit_socket.send_string("END")
elif(val =="d" or val == "debug"):
print(clients)
def run_follower(connect_port):
# ZeroMQ context allows socket creation
context = zmq.Context.instance()
# Connect to leader socket
if(debug): print("Connecting to leader...")
socket = context.socket(zmq.REQ)
socket.connect("tcp://" + ip_address + ":%s" % connect_port)
# Attempt to register with the leader
if(debug): print("Sending connect request...")
connect_message = {"topic":"connect"}
socket.send(pickle.dumps(connect_message))
# Get the reply with ID and audit port
client_info = socket.recv()
client_info = pickle.loads(client_info)
if(debug): print("Received ID [", client_info["ID"], "]")
# Connect to the audit port received from leader
audit_socket = context.socket(zmq.SUB)
audit_socket.connect("tcp://" + ip_address + ":%s" % client_info["audit_port"])
audit_socket.setsockopt(zmq.SUBSCRIBE, b'')
# Until the program ends wait for messages from leader
while True:
# Block on audit socket
audit_message = audit_socket.recv()
if(debug): print(audit_message)
if (audit_message == b'STOP'): # If leader is killing program break out of loop
quit()
elif (audit_message == b"START"): # Start an audit
if(debug): print("starting audit")
#TODO start CamFlow
cf =subprocess.run(["camflow", "-e", "true"])
cf =subprocess.run(["camflow", "-a", "true"])
# Tell leader camflow has started
audit_start = {"topic":"audit","ID":client_info["ID"], "current_state": True}
socket.send(pickle.dumps(audit_start))
# MUST receive ack for this socket pattern
ack = socket.recv()
if(debug): print(ack)
elif (audit_message == b"END"):
if(debug):print("stopping audit")
# stop CamFlow
cf =subprocess.run(["camflow", "-e", "false"])
cf =subprocess.run(["camflow", "-a", "false"])
if(debug):print("audit stopped")
log_data = get_audit_log()
if(log_data is not None):
audit_msg ={"topic":"audit", "ID":client_info["ID"],\
"current_state": False, "data":zlib.compress(log_data.encode("utf-8"))}
audit_msg = pickle.dumps(audit_msg)
socket.send(audit_msg)
else:
print("No log file")
# Prepare and send message for leader indicating state change
audit_stop = {"topic":"audit","ID":client_info["ID"],\
"current_state": False, "data": None}
socket.send(pickle.dumps(audit_stop))
# MUST receive ack for this socket pattern
ack = socket.recv()
if(debug):print(ack)
if __name__ == "__main__":
# This port is how the follower / leader talk
connect_port = "5556"
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', required = True)
args = parser.parse_args()
config_filepath = args.config
with open(config_filepath) as config_file:
config_info = json.loads(config_file.read())
if("leader_ip" in config_info.keys()):
ip_address = config_info["leader_ip"]
is_leader = False
while True:
if(is_leader):
run_leader(connect_port)
else:
run_follower(connect_port)
| [
"joe.wonsil@gmail.com"
] | joe.wonsil@gmail.com |
21af297d149f201fe26a46544168bf6259c049d2 | 036f944553de4af4ada1648357e2282eb6dd2fb7 | /twilike/twilike/wsgi.py | b6870d17166a42d975fd2ae3755fc9f0e75012ac | [] | no_license | Techteam-Course/twilike | 36d2b3996412b153e498f9f15eee1f10670a937a | 24f753138598d05389ff8d4962506e577b01db8b | refs/heads/master | 2023-04-27T09:36:44.690356 | 2019-10-04T03:39:20 | 2019-10-04T03:39:20 | 212,729,545 | 0 | 0 | null | 2023-04-21T20:38:21 | 2019-10-04T03:34:30 | Python | UTF-8 | Python | false | false | 391 | py | """
WSGI config for twilike project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'twilike.settings')
application = get_wsgi_application()
| [
"takuya.miraidenshi@gmail.com"
] | takuya.miraidenshi@gmail.com |
67c6ca7aa747b61e4265c76b5ee7d154068c47f2 | 445169ee33a144cea51e0817df5446420b4f0634 | /tests/python/relay/test_auto_scheduler_layout_rewrite_networks.py | 95f1177da024d0ee57ce922a147336b570e952d7 | [
"Zlib",
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense"
] | permissive | schell/tvm | c70babb90daffb1ee1b01884934b47636d18b6e1 | e8752c95ffe24f79a68d764c8815de3af8feeed9 | refs/heads/main | 2023-06-21T00:25:58.311555 | 2021-03-29T16:39:22 | 2021-03-29T16:39:22 | 352,715,575 | 0 | 0 | Apache-2.0 | 2021-03-29T16:44:57 | 2021-03-29T16:44:57 | null | UTF-8 | Python | false | false | 6,443 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test layout rewrite support for whole neural networks"""
import tempfile
import numpy as np
import tvm
from tvm import relay, auto_scheduler
from tvm.contrib import graph_runtime
import tvm.testing
def get_np_array(var, dtype):
return np.random.randn(*[int(x) for x in var.type_annotation.shape]).astype(dtype)
def get_relay_conv2d(
outc=32,
inc=32,
height=14,
width=14,
kh=3,
kw=3,
batch=1,
pad=0,
stride=1,
dilation=1,
layout="NHWC",
):
dtype = "float32"
if layout == "NHWC":
kernel_layout = "HWIO"
d = relay.var("data", shape=(batch, height, width, inc), dtype=dtype)
w = relay.var("weight", shape=(kh, kw, inc, outc), dtype=dtype)
elif layout == "NCHW":
kernel_layout = "OIHW"
d = relay.var("data", shape=(batch, inc, height, width), dtype=dtype)
w = relay.var("weight", shape=(outc, inc, kh, kw), dtype=dtype)
y = relay.nn.conv2d(
d,
w,
padding=pad,
kernel_size=(kh, kw),
strides=(stride, stride),
dilation=(dilation, dilation),
channels=outc,
groups=1,
data_layout=layout,
kernel_layout=kernel_layout,
)
mod = tvm.IRModule()
mod["main"] = relay.Function([d, w], y)
data, weight = get_np_array(d, dtype), get_np_array(w, dtype)
return mod, data, weight
def get_relay_conv3d(
outc=8,
inc=8,
depth=8,
height=7,
width=7,
kd=1,
kh=1,
kw=1,
batch=1,
pad=0,
stride=1,
dilation=1,
layout="NDHWC",
):
dtype = "float32"
if layout == "NDHWC":
kernel_layout = "DHWIO"
d = relay.var("data", shape=(batch, depth, height, width, inc), dtype=dtype)
w = relay.var("weight", shape=(kd, kh, kw, inc, outc), dtype=dtype)
elif layout == "NCDHW":
kernel_layout = "OIDHW"
d = relay.var("data", shape=(batch, inc, depth, height, width), dtype=dtype)
w = relay.var("weight", shape=(outc, inc, kd, kh, kw), dtype=dtype)
y = relay.nn.conv3d(
d,
w,
padding=pad,
kernel_size=(kd, kh, kw),
strides=(stride, stride, stride),
dilation=(dilation, dilation, dilation),
channels=outc,
groups=1,
data_layout=layout,
kernel_layout=kernel_layout,
)
mod = tvm.IRModule()
mod["main"] = relay.Function([d, w], y)
data, weight = get_np_array(d, dtype), get_np_array(w, dtype)
return mod, data, weight
def get_relay_dense(m=128, n=128, k=128):
dtype = "float32"
d = relay.var("data", shape=(m, k), dtype=dtype)
w = relay.var("weight", shape=(n, k), dtype=dtype)
y = relay.nn.dense(d, w, units=n)
mod = tvm.IRModule()
mod["main"] = relay.Function([d, w], y)
data, weight = get_np_array(d, dtype), get_np_array(w, dtype)
return mod, data, weight
def get_relay_batchmm(batch=4, m=128, n=128, k=128):
dtype = "float32"
d = relay.var("data", shape=(batch, m, k), dtype=dtype)
w = relay.var("weight", shape=(batch, n, k), dtype=dtype)
y = relay.nn.batch_matmul(d, w)
mod = tvm.IRModule()
mod["main"] = relay.Function([d, w], y)
data, weight = get_np_array(d, dtype), get_np_array(w, dtype)
return mod, data, weight
def tune_and_check(mod, data, weight):
# Extract tasks from a relay program
target = tvm.target.Target("llvm")
tasks, task_weights = auto_scheduler.extract_tasks(
mod, target=target, params={"weight": weight}
)
with tempfile.NamedTemporaryFile() as fp:
log_file = fp.name
# Tune tasks
tuner = auto_scheduler.TaskScheduler(tasks, task_weights)
tune_option = auto_scheduler.TuningOptions(
num_measure_trials=1,
num_measures_per_round=1,
builder=auto_scheduler.LocalBuilder(timeout=60),
measure_callbacks=[auto_scheduler.RecordToFile(log_file)],
)
tuner.tune(tune_option, search_policy="sketch.random")
# Compile
with auto_scheduler.ApplyHistoryBest(log_file):
with tvm.transform.PassContext(
opt_level=3,
config={"relay.backend.use_auto_scheduler": True},
):
lib = relay.build(mod, target=target, params={"weight": weight})
# Compile without auto-scheduler for correctness check
with tvm.transform.PassContext(opt_level=0):
lib2 = relay.build(mod, target=target, params={"weight": weight})
def get_output(data, lib):
dev = tvm.cpu()
module = graph_runtime.GraphModule(lib["default"](dev))
module.set_input("data", data)
module.run()
return module.get_output(0).asnumpy()
# Check correctness
actual_output = get_output(data, lib)
expected_output = get_output(data, lib2)
tvm.testing.assert_allclose(actual_output, expected_output, rtol=1e-4, atol=1e-4)
def test_conv2d():
mod, data, weight = get_relay_conv2d(kh=1, kw=1)
tune_and_check(mod, data, weight)
def test_conv2d_winograd():
mod, data, weight = get_relay_conv2d(kh=3, kw=3)
tune_and_check(mod, data, weight)
def test_conv3d():
mod, data, weight = get_relay_conv3d()
tune_and_check(mod, data, weight)
def test_dense():
mod, data, weight = get_relay_dense()
tune_and_check(mod, data, weight)
def test_batch_matmul():
mod, data, weight = get_relay_batchmm()
tune_and_check(mod, data, weight)
if __name__ == "__main__":
test_conv2d()
test_conv2d_winograd()
test_conv3d()
test_dense()
test_batch_matmul()
| [
"noreply@github.com"
] | noreply@github.com |
1d6c691b4ae9be1cd732ff7fad48aa58b9108a82 | 1bf4b908de736df9a5ef3315a24532cf00acd39d | /crawler/service/models/zjld/base.py | b6c95b6010334ba445605480aff8165bcf18e706 | [] | no_license | jshliu/crawler | 43aa9c90b9e1cc36e5d1d381e5d5001a70dc22c8 | 6c6d707ec176e9fcfbc61577a57af27163fb0468 | refs/heads/develop | 2021-06-18T18:48:44.657153 | 2019-11-09T09:03:19 | 2019-11-09T09:03:19 | 219,108,030 | 0 | 1 | null | 2021-06-10T22:12:04 | 2019-11-02T05:25:43 | Python | UTF-8 | Python | false | false | 4,707 | py | # -*- coding: utf-8 -*-
import logging
import copy
import time
from uuid import uuid1
from datetime import datetime
from context.context import Context
unix_time = Context().get("datetimeutil.unix_time")
ModelBase = Context().get("ModelBase")
CassandraQueryApi = Context().get("CassandraQueryApi")
import_logger = logging.getLogger("crawler.import")
class ContentModel(ModelBase):
TYPE = "base.content"
FIELDS = {
"id": uuid1(),
"source": u"",
"origin_source": u"",
"pubtime": datetime.utcfromtimestamp(0),
"crtime": datetime.now(),
"crtime_int": int(time.time() * 1000000),
"province": u"",
"city": u"",
"district": u"",
"comment": {},
"tag": "",
"producer_id": 0,
"category": u"",
"application": u"",
}
def __init__(self, dct={}, authority=None):
if not isinstance(dct, dict):
raise TypeError
for key in self.FIELDS.iterkeys():
'''
New model id is None, do not set id field.
Old model id is not None, should set id field.
'''
if key == "id" and dct.get(key) is None:
continue
self.set_field(key, dct.get(key))
def __setitem__(self, key, value):
return self.set_field(key, value)
def set_field(self, field, value):
ret = super(ContentModel, self).__setitem__(field, value)
if (value is None) or self.is_empty(field):
return ret
def is_empty(self, field):
defvalue = self.FIELDS[field]
if isinstance(defvalue, bool):
return False
if field in ["price", "original_price"]:
return False
return self[field] == defvalue
def _import_cassandra(self, keyspace, column_family):
dup = self.find_dup()
if dup:
self["id"] = dup["id"]
self.merge(dup)
if not self.equals(dup):
self.save_cassandra(keyspace, column_family)
return True
import_logger.info("UPDATED cassandra model - %s", self)
else:
import_logger.info("SKIPPED cassandra model - %s", self)
return False
else:
self.save_cassandra(keyspace, column_family)
import_logger.info("INSERTED cassandra model - %s", self)
return True
def save_cassandra(self, keyspace, column_family):
self["id"] = self.get("id") if self.get("id") else self.new_id()
cql = "INSERT INTO %s (%s) VALUES (%s)" \
% (
column_family,
reduce(
lambda x, y: x + ", " + y,
self.keys()
),
reduce(
lambda x, y: x + ", " + y,
["%s" for x in self.keys()]
)
)
CassandraQueryApi(keyspace).save(cql, self.values())
return self
def new_id(self):
return uuid1()
def on_import(self):
self._import()
def export(self):
dct = dict(self)
dct["id"] = dct.pop("id")
return dct
def find_dup(self):
raise NotImplemented
def merge(self, item):
'''
item: existing item in database
'''
for field in self.FIELDS.iterkeys():
if field in ["id"]:
continue
overwrite = True
value = self.merge_value(field, item[field], overwrite)
self.set_field(field, value)
def merge_value(self, field, value, overwrite):
'''
value: existing value, ***DO NOT MODIFY***
overwrite: whether should value overwrite self[field]
'''
return value if overwrite else self[field]
def equals(self, item):
if self.__class__ != item.__class__:
return False
for k in self.FIELDS.keys():
if k not in ["_id", "created", "updated"]: # TODO: authorities
if isinstance(self[k], datetime):
# drop precise
if unix_time(self[k]) != unix_time(item[k]):
return False
elif self[k] != item[k]:
return False
return True
def __unicode__(self):
return u"%s(%s)" % (self.TYPE, self['id'])
def __str__(self):
return "%s(%s)" % (self.TYPE, self['id'])
| [
"lioujiasheng@outlook.com"
] | lioujiasheng@outlook.com |
2f6956dd6f187273f31b75d5d6429b5d5d23c030 | 7a13a9def50e3d87d74f7d3a2b990cd9bc1acda1 | /accounts/admin.py | 1ae86e995a6ecd33ad7fd7b61e36b1ee99444204 | [] | no_license | anandrajB/speedy-scanner | a97bfe16feef483db9e2fe77a2b1639e1dea8707 | fd5d4fd7b3ba600d975ae2aaf73ae81e1d0e3632 | refs/heads/master | 2023-08-31T09:07:46.802433 | 2021-09-20T12:21:22 | 2021-09-20T12:21:22 | 374,634,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,063 | py | from django.contrib import admin
from django import forms
from django.contrib import admin
from django.contrib.auth.models import Group
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from .models import MyUser, Profile, File, Batch
class UserCreationForm(forms.ModelForm):
"""A form for creating new users. Includes all the required
fields, plus a repeated password."""
password1 = forms.CharField(label="Password", widget=forms.PasswordInput)
password2 = forms.CharField(
label="Password confirmation", widget=forms.PasswordInput
)
class Meta:
model = MyUser
fields = ("email", "phone")
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super().save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField()
class Meta:
model = MyUser
fields = ("email", "phone", "password", "is_active", "is_admin")
def clean_password(self):
return self.initial["password"]
class UserAdmin(BaseUserAdmin):
# The forms to add and change user instances
form = UserChangeForm
add_form = UserCreationForm
# The fields to be used in displaying the User model.
# These override the definitions on the base UserAdmin
# that reference specific fields on auth.User.
list_display = ("email", "phone", "is_admin")
list_filter = ("is_admin",)
fieldsets = (
(None, {"fields": ("email", "password")}),
("Personal info", {"fields": ("phone",)}),
("Permissions", {"fields": ("is_active", "is_admin",)}),
)
# add_fieldsets is not a standard ModelAdmin attribute. UserAdmin
# overrides get_fieldsets to use this attribute when creating a user.
add_fieldsets = (
(
None,
{
"classes": ("wide",),
"fields": ("email", "phone", "password1", "password2"),
},
),
)
search_fields = ("email",)
ordering = ("email",)
filter_horizontal = ()
# Now register the new UserAdmin...
admin.site.register(MyUser, UserAdmin)
# ... and, since we're not using Django's built-in permissions,
# unregister the Group model from admin.
admin.site.unregister(Group)
admin.site.register(Profile)
admin.site.register(Batch)
admin.site.register(File)
| [
"anand98.ar@gmail.com"
] | anand98.ar@gmail.com |
6f79aa7a0e57d1eca2ccf3df40682b9dca539f7d | 6a97624f416a0a0faf18bf19c721977c58e6e0e3 | /tests/rules/test_extreme_connections.py | d4913451746f240f5fe412bde136c11c504e7a97 | [] | no_license | adimian/squid-report | 392d781a973644e777011423aafbe136fa346d8c | 8f1dfafca905383b576d48035f042eaf0e6c1056 | refs/heads/master | 2022-07-12T17:43:23.223632 | 2022-07-05T08:34:28 | 2022-07-05T08:34:28 | 254,040,297 | 0 | 0 | null | 2022-07-05T08:34:30 | 2020-04-08T09:16:41 | Python | UTF-8 | Python | false | false | 352 | py | from squidreport.rules.extreme_connections import ExtremeConnectionsRule
def test_extreme_connections_rule_triggered(config_with_log_dir):
rule = ExtremeConnectionsRule(
config=config_with_log_dir("extreme_connections")
)
rule.evaluate()
assert len(rule.messages) == 1
assert "nb_positives=1" in rule.messages[0].context
| [
"eric@adimian.com"
] | eric@adimian.com |
3d838033d15386a3eba79d8ff6c914677e51f87f | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/59/usersdata/158/47953/submittedfiles/testes.py | 79ed55e189435778a26b71a91b9e7d1d21f2ea6a | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
r=int(input('raio do circulo:'))
p=3,1415
area=p*r*r
print(area)
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
0766d5ff3638af0d66fdff05bdf2debf70f1ffef | 7e5990e661f27d1b3a7f75e6462aaefc6843e8a8 | /vpworkspace/tests/pathExistsTest.py | 93df5b4ebbb595c8ec6612de1f58dc04c8f14f77 | [] | no_license | garthur/vp_workspace | 0117e082b25655bfb72f6fcb9c602d35d9a413ba | 14c842db4ea876f098dcd86e465ac8494cb42104 | refs/heads/master | 2021-01-19T08:46:14.716876 | 2015-08-09T01:59:00 | 2015-08-09T01:59:00 | 30,545,687 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,576 | py | # VP Workspace Window
# path checks to make sure the installation is safe
import os
import sys
PROJDIR_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(PROJDIR_PATH)
PACKAGEDIR_PATH = os.path.dirname(PROJDIR_PATH)
def checkAllFiles():
def checkProjectFolder():
# top level file check
print "Checking project directory..."
assert(os.path.exists(PROJDIR_PATH+os.sep+"settings.txt"))
assert(os.path.exists(PROJDIR_PATH+os.sep+"__init__.py"))
assert(os.path.exists(PROJDIR_PATH+os.sep+"VPWorkspaceWindow.py"))
def checkAssetsFolder():
# crawl the assets folder for the help text files
# eventually, image files will be placed here for logos etc.
print ">>> Checking assets folder...",
addonToPath = PROJDIR_PATH+os.sep+"assets"+os.sep
allHelpPaths = ["genHelp.txt", "vectorHelp.txt", "polyHelp.txt",
"matrixHelp.txt"]
assert(all([os.path.exists(addonToPath+helpPath)
for helpPath in allHelpPaths]))
print "Passed"
def checkGraphicsFolder():
# crawl the graphics folder for the graphics startup files
# these files are taken directly from CMU 15-112
print ">>> Checking graphics folder...",
addonToPath = PROJDIR_PATH+os.sep+"graphics"+os.sep
graphicsPaths = ["eventBasedAnimationClass.py"]
assert(all([os.path.exists(addonToPath+graphicsPath)
for graphicsPath in graphicsPaths]))
print "Passed"
def checkSourceFolder():
# crawl the source folder for the backend class files
# that I wrote to implement some data types
print ">>> Checking source folder...",
addonToPath = PROJDIR_PATH+os.sep+"source"+os.sep
assert(os.path.exists(addonToPath+"__init__.py"))
sourcePaths = ["matrixClass.py", "polynomialClass.py",
"vectorClass.py"]
assert(all([os.path.exists(addonToPath+sourcePath)
for sourcePath in sourcePaths]))
print "Passed"
def checkTestFolder():
print ">>> Checking test folder...",
addonToPath = PROJDIR_PATH+os.sep+"text"+os.sep
print "Passed"
pass
readmePath, tkinterColorChartPath = "README.md", "tkinterColorChart.png"
docsPath, licensePath = "docs", "LICENSE"
print "Checking package directory...",
assert(os.path.exists(PACKAGEDIR_PATH+os.sep+docsPath))
assert(os.path.exists(PACKAGEDIR_PATH+os.sep+licensePath))
assert(os.path.exists(PACKAGEDIR_PATH+os.sep+readmePath))
assert(os.path.exists(PACKAGEDIR_PATH+os.sep+tkinterColorChartPath))
assert(os.path.exists(PROJDIR_PATH))
print "Passed"
checkProjectFolder()
checkAssetsFolder()
checkGraphicsFolder()
checkSourceFolder()
checkTestFolder()
print "All files present." | [
"garthur@andrew.cmu.edu"
] | garthur@andrew.cmu.edu |
ea091533dcda73cd11b30f072f03f4b409685d30 | 2222b9d377f654edbcb61e38e54826d14af098dd | /simpledjangoapi/simpledjangoapi/settings.py | 3161c41b176fd71115fd7c6a1d5464982c2e8f7f | [] | no_license | crromano/simple-django-api | 45c64e5ae530f3290fcda99213e377d248a2eaa0 | fcebd68c4f664f28cb61614836040f4f993e9adb | refs/heads/master | 2020-04-11T21:29:12.852328 | 2018-12-17T10:02:35 | 2018-12-17T10:02:35 | 162,107,200 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,113 | py | """
Django settings for simpledjangoapi project.
Generated by 'django-admin startproject' using Django 2.0.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'c@##30f=+uny8xvfd7#s21!mr1(ia!^0)e%2qvdmp_pp+f90ow'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'simpledjangoapi.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'simpledjangoapi.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
| [
"christianroman.1996@gmail.com"
] | christianroman.1996@gmail.com |
872f48c5e32eedd5720a4a1bb697e91306aea7b0 | d7524c4bb97eae8a08931cb76f93a3634743cd6f | /tr_nonrecurse.py | 9655c728be5c674a22334643595c0ea1c6b4c7d8 | [] | no_license | digimutts/xyz | fad4b1ad26d3df7cd5b00b5a6a4ada62b9c5eb87 | c4956ea2a3f3dfc3511a8cad624142f53fdcb993 | refs/heads/master | 2020-06-19T03:07:17.499707 | 2016-11-28T00:46:16 | 2016-11-28T00:46:16 | 74,923,578 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,111 | py | import stackll
import queuell
class TreeNonRecursive:
"""
Non-recursive tree methods
"""
def __init__(self):
pass
def pre_nonrecursive(self, node):
# go to each leaf, put vals on a stack
stack = stackll.StackLL()
stack.push(node)
pre = []
while not stack.is_empty():
node = stack.pop()
if node is None:
continue
pre.append(node.val)
# go all the way to right, then left
# (stack will return these in reverse)
stack.push(node.right)
stack.push(node.left)
return pre
def post_nonrecursive(self, node):
stack = stackll.StackLL()
stack.push(node)
post = []
while not stack.is_empty():
node = stack.pop()
if node is None:
continue
# go all the way to right, then left
# (stack will return these in reverse)
stack.push(node.right)
stack.push(node.left)
post.append(node.val)
return post
| [
"mpetermangis@gmail.com"
] | mpetermangis@gmail.com |
56b7b34e0989e087d131074aaf141981cabc9f1a | 49a4138a1a2619b260a6b620303a9a690a56fe64 | /question/tests/test_question_view.py | 8e749d2f2975e5e1fb11ed62953cdb8f79c47b06 | [] | no_license | jithin0000/pscbackend | b080701333a4fc75034b919f50e6a9c53b45c49b | 863c697e0dfd5e4574f5bf51b112457a5679b089 | refs/heads/master | 2023-07-17T04:31:47.050236 | 2021-08-30T12:01:17 | 2021-08-30T12:01:17 | 380,978,985 | 0 | 0 | null | 2021-08-30T12:01:18 | 2021-06-28T09:37:28 | Python | UTF-8 | Python | false | false | 5,537 | py | from django.urls import reverse
from rest_framework.test import APITestCase
from agent.models import Agent
from customauth.models import MyUser
from django.utils import timezone
from rest_framework.authtoken.models import Token
from question.models import Question, Option
class TestQuestionCreate(APITestCase):
""" test for question create """
agent = None
valid_data = None
def setUp(self) -> None:
user = MyUser.objects.create_user(
email="agent@gmail.com", role="AGENT",
password="newpassword"
)
self.agent = Agent.objects.create(
name="agent", user=user,
phone_number="1234567890",
address_state="kerala",
address_city="thrissur",
address_pin="680511",
)
self.valid_data = {
"text": "first question",
"answer": "option id",
'options': [
{'text': "first option"},
{'text': "second option"}
]
}
admin = MyUser.objects.create_user(
email="admin@gmail.com", role="ADMIN",
password="newpassword"
)
def test_question_create_return_401(self):
""" test user is authenticated """
response = self.client.post(
reverse('create_question'), data=self.valid_data, format="json")
assert response.status_code == 401
def test_question_create_return_403(self):
""" return 403 if not admin """
token = Token.objects.get(user__email="admin@gmail.com")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post(
reverse('create_question'), data=self.valid_data, format="json")
assert response.status_code == 403
def test_question_create_return_201(self):
"""test question created with options """
token = Token.objects.get(user__email="agent@gmail.com")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post(
reverse('create_question'), data=self.valid_data, format="json")
assert response.status_code == 201
assert Question.objects.first().options.count() == 2
# ================= Question update view ================
def test_question_update_return_200(self):
"""test question updated with options """
token = Token.objects.get(user__email="agent@gmail.com")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
q = Question.objects.create(text="first question ", created_by=self.agent,
answer="answer"
)
a = Option.objects.create(text="first", question=q)
b = Option.objects.create(text="second", question=q)
update_data = {
"text": "updated text", "answer": "updated answer",
"options": [
{"text": a.text},
{"text": "another option"}
]
}
response = self.client.put(
reverse('update_question', kwargs={'pk': 1}), data=update_data, format="json")
assert response.status_code == 200
assert Option.objects.get(text="another option") is not None
# ================= Question detail view ================
def test_question_details_return_200(self):
"""test question detail """
token = Token.objects.get(user__email="agent@gmail.com")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
q = Question.objects.create(text="first question ", created_by=self.agent,
answer="answer"
)
Option.objects.create(text="first", question=q)
Option.objects.create(text="second", question=q)
response = self.client.get(
reverse('detail_question', kwargs={'pk': 1}), format="json")
assert response.status_code == 200
# ================= Question delete view ================
def test_question_delete_return_204(self):
"""test question delete """
token = Token.objects.get(user__email="agent@gmail.com")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
q = Question.objects.create(text="first question ", created_by=self.agent,
answer="answer"
)
Option.objects.create(text="first", question=q)
Option.objects.create(text="second", question=q)
response = self.client.delete(
reverse('delete_question', kwargs={'pk': 1}), format="json")
assert response.status_code == 204
assert Question.objects.count() == 0
# ================= Question list view ================
def test_question_delete_return_200(self):
"""test question delete """
token = Token.objects.get(user__email="agent@gmail.com")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
q = Question.objects.create(text="first question ", created_by=self.agent,
answer="answer"
)
Option.objects.create(text="first", question=q)
Option.objects.create(text="second", question=q)
response = self.client.get(
reverse('list_question'), format="json")
assert response.status_code == 200
| [
"jithinb@psecure.net"
] | jithinb@psecure.net |
ff836f4acdf91ab0b53f1a0a4795269cad1fb13a | 819d41503963012deb79cafeebbf42976ff9dc1d | /learn_python_the_hard_way/projects/skeleton/setup.py | 8b55a3bef540a0e251125c584bebea03a6175ac4 | [] | no_license | cheeyeo/learn_python_the_hard_way | db79a7ebe657c4f4033dd9693ea3b4db4526d7a8 | 2333d97f10a687174dea31691d77e87d839428a1 | refs/heads/master | 2020-04-16T18:52:17.012251 | 2019-01-15T11:22:45 | 2019-01-15T11:22:45 | 165,838,035 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | try:
from setuptools import setup
except ImportError:
from disutils.core import setup
config = {
'description': 'My project',
'author': 'My name',
'url': 'URL to get it',
'download_url': 'where to download',
'author_email': 'My email',
'version': '0.1',
'install_requires': ['nose'],
'packages': ['NAME'],
'scripts': [],
'name': 'projectname'
}
setup(**config)
| [
"chee@compose.io"
] | chee@compose.io |
0a2a6e6a68e79bebbef374d63bfd4e57a41093db | eb87c8b1ce8591d207643d3924b7939228f1a4fe | /conformance_suite/test_assign_test_var.py | b3a5f3ec6ae764f29359d631f46cf82e492d26f7 | [] | no_license | brownplt/insta-model | 06543b43dde89913c219d476ced0f51a439add7b | 85e2c794ec4b1befa19ecb85f2c8d2509ec8cf42 | refs/heads/main | 2023-08-30T19:06:58.083150 | 2023-05-03T18:53:58 | 2023-05-10T22:29:18 | 387,500,638 | 5 | 0 | null | 2022-04-23T23:06:52 | 2021-07-19T14:53:09 | Racket | UTF-8 | Python | false | false | 414 | py | # test_assign_test_var.py
# This should pass.
from typing import Optional
def f(x: Optional[int]) -> int:
if x is None:
x = 1
return x
# def test_assign_test_var(self):
# codestr = """
# from typing import Optional
# def f(x: Optional[int]) -> int:
# if x is None:
# x = 1
# return x
# """
# self.compile(codestr, modname="foo")
| [
"lukuangchen1024@gmail.com"
] | lukuangchen1024@gmail.com |
66ed49839670fcbbeb2cabbb65905fc1d3efe428 | d9dcf8d1298bc8a84f173e7defac4eae28d45076 | /weatherbot/nlu_model.py | e0368f55e094a6a8349b2d6374f4bb60ac38c10e | [] | no_license | mdiegog/python-tutorials | 724423ff0ceb7d2ae7dca95294229083a0235561 | f2b336443836062295719008f58ed4d6665e5abe | refs/heads/master | 2022-12-09T18:08:10.072659 | 2020-02-08T10:42:05 | 2020-02-08T10:42:05 | 141,159,360 | 0 | 0 | null | 2022-12-08T01:14:30 | 2018-07-16T15:41:08 | HTML | UTF-8 | Python | false | false | 758 | py | from rasa_nlu.training_data import load_data
from rasa_nlu import config
from rasa_nlu.model import Trainer
from rasa_nlu.model import Metadata, Interpreter
def train_nlu(data, configs, model_dir):
training_data = load_data(data)
trainer = Trainer(config.load(configs))
trainer.train(training_data)
model_directory=trainer.persist(model_dir, fixed_model_name = 'weathernlu')
def run_nlu():
interpreter = Interpreter.load('./models/nlu/default/weathernlu')
#print(interpreter.parse(u"I am planning my holiday to Barcelona. I wonder what is the weather out there."))
print(interpreter.parse(u"weather in Barcelona"))
if __name__ == '__main__':
train_nlu('./data/data.json', 'config_spacy.json', './models/nlu')
run_nlu()
| [
"mdiegog@gmail.com"
] | mdiegog@gmail.com |
818aa3abf6f0f26c357550965b482be18aa0a2b7 | 4ac57cc07c50d1cc4dbf4894b77783fa03a8c7b1 | /4-case-study-sunlight-in-austin/9_daily_hours_of_clear_sky.py | 127206faced7ca4e185d3d0c5346c054b778c6ed | [] | no_license | OCulzac/pandas-foundations | 905fa778beee5e9d8210716abcc06eeeaf02b8b9 | f13e7270dfcbb661da7a2fa3f26b4001df5eadc9 | refs/heads/master | 2020-05-19T10:44:58.816172 | 2019-05-05T04:09:45 | 2019-05-05T04:09:45 | 184,977,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,090 | py | """ Daily hours of clear sky
In a previous exercise, you analyzed the 'sky_condition' column to explore the difference in temperature on sunny days compared to overcast days. Recall that a 'sky_condition' of 'CLR' represents a sunny day. In this exercise, you will explore sunny days in greater detail. Specifically, you will use a box plot to visualize the fraction of days that are sunny.
The 'sky_condition' column is recorded hourly. Your job is to resample this column appropriately such that you can extract the number of sunny hours in a day and the number of total hours. Then, you can divide the number of sunny hours by the number of total hours, and generate a box plot of the resulting fraction.
As before, df_clean is available for you in the workspace.
Instructions 1/3
Get the cases in df_clean where the sky is clear. That is, when 'sky_condition' equals 'CLR', assigning to is_sky_clear.
Resample is_sky_clear by day, assigning to resampled. """
# Using df_clean, when is sky_condition 'CLR'?
is_sky_clear = df_clean['sky_condition'].str.contains('CLR')
# Resample is_sky_clear by day
resampled = is_sky_clear.resample('D')
# See the result
print(resampled)
""" Instructions 2/3
35 XP
2
3
Calculate the number of measured sunny hours per day as the sum of resampled, assigning to sunny_hours.
Calculate the total number of measured hours per day as the count of resampled, assigning to total_hours.
Calculate the fraction of hours per day that were sunny as the ratio of sunny hours to total hours.
"""
# From previous step
is_sky_clear = df_clean['sky_condition'] == 'CLR'
resampled = is_sky_clear.resample('D')
# Calculate the number of sunny hours per day
sunny_hours = resampled.sum()
# Calculate the number of measured hours per day
total_hours = resampled.count()
# Calculate the fraction of hours per day that were sunny
sunny_fraction = sunny_hours / total_hours
""" Instructions 3/3
30 XP
3
Draw a box plot of sunny_fraction using .plot() with kind set to `'box'``. """
# Make a box plot of sunny_fraction
sunny_fraction.plot(kind='box')
plt.show() | [
"oronculzac@gmail.com"
] | oronculzac@gmail.com |
6a61977905407424d34e33ee7362f78a4514ed82 | e40773f61ac0adfab65637777bd86fd822c8f40d | /DP/204_Count_Primes.py | ec273af4ef15eff7bd665f0daa2d3c4dffe8e68b | [] | no_license | jerrycmh/leetcode | 3afadb09237efc85c6d9d0591b811004837b4510 | ffdf946c7f90ab7feb122b0bdb2b1fcfe07a39f2 | refs/heads/master | 2020-04-16T12:24:27.823566 | 2019-03-27T23:06:49 | 2019-03-27T23:06:49 | 165,577,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | class Solution:
def countPrimes(self, n: int) -> int:
if n < 3 : return 0
is_prime = [True for _ in range(n)]
is_prime[0], is_prime[1] = False, False
counter = 0
for i in range(2, n):
if is_prime[i]:
counter += 1
for p in range(i*2, n, i): is_prime[p] = False
return counter | [
"jerrycuimh@gmail.com"
] | jerrycuimh@gmail.com |
35fdadd7ebbbab7f4ca8f47f270b11ed2e27a01f | 2c841737ed0597884a2bd5e6d3fb411fdfcae22f | /Optimal Control/indirect_single_shooting.py | 7549e6790a6b6dae3018bbd2ac961d18dc4881f3 | [] | no_license | bams/microhydrodynamics | b6d043ec7ac18da1cc1125bdb869f12373beea5a | e34b5216831f519bc73b7f654a70ad1fe25dfc57 | refs/heads/master | 2023-02-23T15:22:28.820274 | 2021-01-29T02:47:35 | 2021-01-29T02:47:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,280 | py | #Solves the optimal control problem via indirect methods
"""
Created on 2020-06-26
@author: Michael Astwood
"""
from casadi import *
import numpy as NP
import matplotlib.pyplot as pl
# Define variables and common expressions
u1=MX.sym('u_1')
u2=MX.sym('u_2')
u=vertcat(u1,u2) # control params = angular speed of active particle
theta1=MX.sym('theta_1')
theta2=MX.sym('theta_2')
theta=vertcat(theta1,theta2)
y1=MX.sym('y_1')
y2=MX.sym('y_2')
y=vertcat(y1,y2) # passive params = position of passive particle
x11=cos(theta1)
x12=sin(theta1)
x21=cos(theta2)
x22=sin(theta2)
x1=vertcat(x11,x12)
x2=vertcat(x21,x22) # explicit position of active particles
v11=-u1*sin(theta1)
v12=u1*cos(theta1)
v21=-u2*sin(theta2)
v22=u2*cos(theta2)
v1=vertcat(v11,v12)
v2=vertcat(v21,v22) # explicit velocity of active particles
L= dot(v1,v1)+dot(v2,v2) # cost function (proportional to energy expended)
ydot=6*pi*(v1+dot(y-x1,v1)*(y-x1)/dot(y-x1,y-x1))/sqrt(dot(y-x1,y-x1)) \
+ 6*pi*(v2+dot(y-x2,v2)*(y-x2)/dot(y-x2,y-x2))/sqrt(dot(y-x2,y-x2)) #stokeslet
X=vertcat(theta1,theta2,y1,y2) # configuration coordinates
Xdot=vertcat(u,ydot) # control system
lam=MX.sym('lambda',4) # lagrange multipliers
H=dot(lam,Xdot)+L # Hamiltonian
ldot=-gradient(H,X)
print("Hamiltonian: ", H)
| [
"mastwood101@gmail.com"
] | mastwood101@gmail.com |
494729e6f2f30c78583ca65070a1387032401821 | 2b86301d5ad3fecaa5a300cabfe6b4dfc82b78ed | /venv/Lib/site-packages/cassiopeia/transformers/championmastery.py | c45ee0d705e5dc57e8ccf720ebd5f8d5dd952cb4 | [
"MIT"
] | permissive | sserrot/champion_relationships | 72823bbe73e15973007e032470d7efdf72af3be0 | 91315d6b7f6e7e678d9f8083b4b3e63574e97d2b | refs/heads/master | 2022-12-21T05:15:36.780768 | 2021-12-05T15:19:09 | 2021-12-05T15:19:09 | 71,414,425 | 1 | 2 | MIT | 2022-12-18T07:42:59 | 2016-10-20T01:35:56 | Python | UTF-8 | Python | false | false | 1,956 | py | from typing import Type, TypeVar
from copy import deepcopy
from datapipelines import DataTransformer, PipelineContext
from ..core.championmastery import ChampionMasteryData, ChampionMasteryListData, ChampionMastery, ChampionMasteries
from ..dto.championmastery import ChampionMasteryDto, ChampionMasteryListDto
T = TypeVar("T")
F = TypeVar("F")
class ChampionMasteryTransformer(DataTransformer):
@DataTransformer.dispatch
def transform(self, target_type: Type[T], value: F, context: PipelineContext = None) -> T:
pass
# Dto to Data
@transform.register(ChampionMasteryDto, ChampionMasteryData)
def champion_mastery_dto_to_data(self, value: ChampionMasteryDto, context: PipelineContext = None) -> ChampionMasteryData:
return ChampionMasteryData(**value)
@transform.register(ChampionMasteryListDto, ChampionMasteryListData)
def champion_mastery_list_dto_to_data(self, value: ChampionMasteryListDto, context: PipelineContext = None) -> ChampionMasteryListData:
data = deepcopy(value)
data["masteries"] = [self.champion_mastery_dto_to_data(c) for c in data["masteries"]]
for c in data["masteries"]:
c(region=data["region"])
data = data["masteries"]
return ChampionMasteryListData(data, region=value["region"], summoner_id=value["summonerId"])
# Data to Core
#@transform.register(ChampionMasteryData, ChampionMastery)
def champion_mastery_data_to_core(self, value: ChampionMasteryData, context: PipelineContext = None) -> ChampionMastery:
return ChampionMastery.from_data(value)
#@transform.register(ChampionMasteryListData, ChampionMasteries)
def champion_mastery_list_data_to_core(self, value: ChampionMasteryListData, context: PipelineContext = None) -> ChampionMasteries:
return ChampionMasteries.from_data(*[self.champion_mastery_data_to_core(cm) for cm in value], region=value.region, summoner=value.summoner_id)
| [
"sserrot@users.noreply.github.com"
] | sserrot@users.noreply.github.com |
02360eed4d2bfd52ab5976db85a1d8bbf9bc3b3b | ef5b3cb4f8fdda2e15f17e4ad67d9f807ed5b1ce | /TuShu/data/main.py | b55b4489723657fe6a59e91f385181a0fca0e485 | [] | no_license | ruguo7425/MinFlim | f959e815fe3214af79385a088729c5ceaef8e92a | 499facf079335662eaf2944bf6dfc3790923ff32 | refs/heads/master | 2020-03-21T00:44:12.966717 | 2018-06-19T15:08:45 | 2018-06-19T15:08:45 | 137,907,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74 | py | from scrapy import cmdline
cmdline.execute('scrapy crawl tushu'.split())
| [
"429450464@qq.com"
] | 429450464@qq.com |
1ffc2a1d2288588fce2b8a7abd07f6eba875ac06 | 5d7aa3904c68ec11638411625bc0bdd4f122e17d | /biShe/zuFang/manage.py | 603ec80c0290ac8403c13969067b12dbd08150a2 | [] | no_license | long0247li/biShe | 83b679dafefcc7618bd176f089e9b0a556bd0a1d | a9f10c9931860b0d635da4e8d11772d58ead260a | refs/heads/master | 2023-05-04T19:55:32.049924 | 2021-02-01T05:27:05 | 2021-02-01T05:27:05 | 373,456,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 630 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'zuFang.settings.dev')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"long0247li@126.com"
] | long0247li@126.com |
84698399bba3ed777ae8b9ce528f12c82d80c1a7 | fe38908bcd872d40e0ad48bf515c6c5a5d5d2fc0 | /Rascunhos/Lição 10 - Listas/stooges2.py | 53e2045a396214d54ae109e4154aadbe6930d74c | [] | no_license | marangoni/nanodegree | 046f3b2b1b6ce15293a30ffca04e486a35eba757 | cfd098b143fe94b57e8c0dc34b3284f95ab2da75 | refs/heads/master | 2021-09-13T12:06:36.093597 | 2018-04-29T19:51:26 | 2018-04-29T19:51:26 | 111,462,656 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | # We defined:
stooges = ['Moe','Larry','Curly']
# but in some Stooges films, Curly was
# replaced by Shemp.
# Write one line of code that changes
# the value of stooges to be:
['Moe','Larry','Shemp']
# but does not create a new List
# object.
stooges[2] = "Shemp"
print stooges
| [
"oliveiralcm@gmail.com"
] | oliveiralcm@gmail.com |
0de784e99e83fe4ae38333468a61891d3692a180 | cc0fb935a7e452e89de67c05995a4d8c5678a2dd | /jaheself.py | 6eea4f1617b7902f8a4b6a6fa9e72f6a29c75748 | [
"Apache-2.0"
] | permissive | croqconia/Keju | 2af601527c0caf859a3befa4550c1e159b461ecc | 98efc67ce2a9c0a7c1f1305e7b94184e989fc8cf | refs/heads/master | 2021-09-01T02:55:31.049824 | 2017-12-24T12:09:41 | 2017-12-24T12:09:41 | 115,259,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178,537 | py | # -*- coding: utf-8 -*-
import PUY
from PUY.lib.curve.ttypes import *
from datetime import datetime
import time, random, sys, ast, re, os, io, json, subprocess, threading, string, codecs, requests, client, ctypes, urllib, urllib2, urllib3, wikipedia
from bs4 import BeautifulSoup
from urllib import urlopen
import requests
import tempfile
import profile
import client
import urllib
import urllib2
from gtts import gTTS
from io import StringIO
from threading import Thread
#from gtts import gTTS
from googletrans import Translator
#JANGAN LUPA => sudo pip install bs4 => sudo pip install BeautifulSoup => sudo pip install urllib => sudo pip install requests => sudo pip install gTTS
cl = PUY.LINE()
cl.login(qr=True)
cl.loginResult()
print "\n[LOGIN SUCCESS o(〃^▽^〃)o]"
reload(sys)
sys.setdefaultencoding('utf-8')
helpmsg ="""╠═════¢σммαи∂ вσт══════
╠-> google (text)
╠-> playstore (text)
╠-> Profileig (username)
╠-> wikipedia (text)
╠-> idline (text)
╠-> ytsearch (text)
╠-> Time
╠-> image (text)
╠-> runtime
╠-> Restart
╠-> lirik (text)
╠-> Mention
╠-> setpoint on/off
╠-> viewlastseen
╠-> protect on/off
╠-> qr on/off
╠-> invite on/off
╠-> Cancel on/off
╠-> Simisimi:on/off
╠-> Read on/off
╠-> Getinfo @
╠-> Getcontact @
╠-> Ulti @
╠-> speed
╠-> Friendlist
╠-> id@en
╠-> en@id
╠-> id@jp\n 「тαмвαнαи」\n╠-> help protect\n╠-> help self\n╠-> help set\n╠-> help grup\n╠-> help translate
╚═════════════════"""
helppro ="""
╠════¢σммαи∂ ρяσтΣ¢т═════
╠➩ protect on/off
╠➩ qr on/off
╠➩ invite on/off
╠➩ cancel on/off
╚═════════════════"""
helpself ="""
╠══════ʝ¢σммαи∂ ՏΣℓf═══════
╠➩Me
╠➩Myname:
╠➩Mybio:
╠➩Mypict
╠➩Mycover
╠➩My copy @
╠➩My backup
╠➩Getgroup image
╠➩Getmid @
╠➩Getprofile @
╠➩Getinfo @
╠➩Getname @
╠➩Getbio @
╠➩Getpict @
╠➩Getcover @
╠➩Mention
╠➩setpoint on/off
╠➩viewlastseen
╠➩Micadd @
╠➩Micdel @
╚═════════════════"""
helpset ="""
╠══════¢σммαи∂ ՏΣт═══════
╠->contact on/off
╠->autojoin on/off
╠->auto leave on/off
╠->autoadd on/off
╠->like friend
╠->link on
╠->respon on/off
╠->read on/off
╠->simisimi on/off
╚═════════════════"""
helpgrup ="""
╠══════¢σммαи∂ ցяυρ═══════
╠->Link on
╠->Url
╠->Cancel
╠->Gcreator
╠->Kick @
╠->Ulti @
╠->Gname:
╠->Gbroadcast:
╠->Cbroadcast:
╠->Infogrup
╠->Gruplist
╠->Friendlist
╠->Blacklist
╠->Ban @
╠->Unban @
╠->Clearban
╠->Banlist
╠->Contact ban
╠->Midban
╚═════════════════"""
helptranslate ="""
╠═════¢σммαи∂ тяαиՏℓαтΣ══════
╠->Id@en<
╠->En@id<
╠->Id@jp<
╠->Jp@id<
╠->Id@th<
╠->Th@id<
╠->Id@ar<
╠->Ar@id<
╠->Id@ko<
╠->Ko@id<
╠->Say-id<
╠->Say-en<
╠->Say-jp<
╚═════════════════"""
KAC=[cl]
mid = cl.getProfile().mid
Bots=[mid]
admin=["ufb890ffb1c03c4d49b16027968daf9cb"]
wait = {
"likeOn":False,
"alwayRead":False,
"detectMention":True,
"kickMention":False,
"steal":True,
'pap':{},
'invite':{},
"spam":{},
'contact':False,
'autoJoin':True,
'autoCancel':{"on":False,"members":5},
'leaveRoom':True,
'timeline':False,
'autoAdd':True,
'message':"""тerima Kasih Sudah Menambahkan Aku Jadi Teman
≫ Aku Ga Jawab PM Karna aq Cuma Bot Protect ≪
≫ UtaiteTeam BOT PROTECT ≪
Ready:
≫ bot protect ≪
≫ SelfBot ≪
ṡȗƿƿȏяṭєԀ ɞʏ:
☆ Utaite BOT PROTECT ☆
☆ ꀎ꓄ꍏꀤ꓄ꍟ τΣΔм ᏰᏫᎿ ☆
☆ DΔRꀘ ꅏꀤ꓄ꉓꃅ TΣΔM ☆
☆ Generasi Kickers Killers ☆
Minat? Silahkan PM!
Follow instagram
instagram.com/croqconiq_""",
"lang":"JP",
"comment":"",
"commentOn":False,
"commentBlack":{},
"wblack":False,
"dblack":False,
"clock":False,
"cNames":"",
"cNames":"",
"blacklist":{},
"wblacklist":False,
"dblacklist":False,
"protect":False,
"cancelprotect":False,
"inviteprotect":False,
"linkprotect":False,
}
wait2 = {
"readPoint":{},
"readMember":{},
"setTime":{},
"ROM":{}
}
mimic = {
"copy":False,
"copy2":False,
"status":False,
"target":{}
}
settings = {
"simiSimi":{}
}
res = {
'num':{},
'us':{},
'au':{},
}
setTime = {}
setTime = wait2['setTime']
mulai = time.time()
contact = cl.getProfile()
backup = cl.getProfile()
backup.displayName = contact.displayName
backup.statusMessage = contact.statusMessage
backup.pictureStatus = contact.pictureStatus
def restart_program():
python = sys.executable
os.execl(python, python, * sys.argv)
def post_content(self, urls, data=None, files=None):
return self._session.post(urls, headers=self._headers, data=data, files=files)
def upload_tempimage(client):
'''
Upload a picture of a kitten. We don't ship one, so get creative!
'''
config = {
'album': album,
'name': 'bot auto upload',
'title': 'bot auto upload',
'description': 'bot auto upload'
}
print("Uploading image... ")
image = client.upload_from_path(image_path, config=config, anon=False)
print("Done")
print()
return image
def download_page(url):
version = (3,0)
cur_version = sys.version_info
if cur_version >= version: #If the Current Version of Python is 3.0 or above
import urllib,request #urllib library for Extracting web pages
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
req = urllib,request.Request(url, headers = headers)
resp = urllib,request.urlopen(req)
respData = str(resp.read())
return respData
except Exception as e:
print(str(e))
else: #If the Current Version of Python is 2.x
import urllib2
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"
req = urllib2.Request(url, headers = headers)
response = urllib2.urlopen(req)
page = response.read()
return page
except:
return"Page Not found"
#Finding 'Next Image' from the given raw page
def _images_get_next_item(s):
start_line = s.find('rg_di')
if start_line == -1: #If no links are found then give an error!
end_quote = 0
link = "no_links"
return link, end_quote
else:
start_line = s.find('"class="rg_meta"')
start_content = s.find('"ou"',start_line+90)
end_content = s.find(',"ow"',start_content-90)
content_raw = str(s[start_content+6:end_content-1])
return content_raw, end_content
def sendAudio(self, to, path):
objectId = self.sendMessage(to=to, text=None, contentType = 3).id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': objectId,
'size': len(open(path, 'rb').read()),
'type': 'audio',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.server.postContent(self.server.LINE_OBS_DOMAIN + '/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload audio failure.')
return True
def sendAudio(self, to_, path):
M = Message(to=to_,contentType = 3)
M.contentMetadata = None
M.contentPreview = None
M_id = self.Talk.client.sendMessage(0,M).id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'audio',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.post_content('https://os.line.naver.jp/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload image failure.')
return True
def sendAudioWithURL(self, to_, url):
path = 'pythonLiness.data'
r = requests.get(url, stream=True)
if r.status_code == 200:
with open(path, 'w') as f:
shutil.copyfileobj(r.raw, f)
else:
raise Exception('Download Audio failure.')
try:
self.sendAudio(to_, path)
except Exception as e:
raise e
def sendVoice(self, to_, path):
M = Message(to=to_, text=None, contentType = 3)
M.contentPreview = None
M_id = self._client.sendMessage(0,M).id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'voice_message',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'audio',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.post_content('https://os.line.naver.jp/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload voice failure.')
return True
#Getting all links with the help of '_images_get_next_image'
def _images_get_all_items(page):
items = []
while True:
item, end_content = _images_get_next_item(page)
if item == "no_links":
break
else:
items.append(item) #Append all the links in the list named 'Links'
time.sleep(0.1) #Timer could be used to slow down the request for image downloads
page = page[end_content:]
return items
def download_page(url):
version = (3,0)
cur_version = sys.version_info
if cur_version >= version: #If the Current Version of Python is 3.0 or above
import urllib,request #urllib library for Extracting web pages
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
req = urllib,request.Request(url, headers = headers)
resp = urllib,request.urlopen(req)
respData = str(resp.read())
return respData
except Exception as e:
print(str(e))
else: #If the Current Version of Python is 2.x
import urllib2
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"
req = urllib2.Request(url, headers = headers)
response = urllib2.urlopen(req)
page = response.read()
return page
except:
return"Page Not found"
def upload_tempimage(client):
'''
Upload a picture of a kitten. We don't ship one, so get creative!
'''
config = {
'album': album,
'name': 'bot auto upload',
'title': 'bot auto upload',
'description': 'bot auto upload'
}
print("Uploading image... ")
image = client.upload_from_path(image_path, config=config, anon=False)
print("Done")
print()
def summon(to, nama):
aa = ""
bb = ""
strt = int(14)
akh = int(14)
nm = nama
for mm in nm:
akh = akh + 2
aa += """{"S":"""+json.dumps(str(strt))+""","E":"""+json.dumps(str(akh))+""","M":"""+json.dumps(mm)+"},"""
strt = strt + 6
akh = akh + 4
bb += "\xe2\x95\xa0 @x \n"
aa = (aa[:int(len(aa)-1)])
msg = Message()
msg.to = to
msg.text = "\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\n"+bb+"\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90"
msg.contentMetadata ={'MENTION':'{"MENTIONEES":['+aa+']}','EMTVER':'4'}
print "[Command] Tag All"
try:
cl.sendMessage(msg)
except Exception as error:
print error
def waktu(secs):
mins, secs = divmod(secs,60)
hours, mins = divmod(mins,60)
return '%02d Jam %02d Menit %02d Detik' % (hours, mins, secs)
def cms(string, commands): #/XXX, >XXX, ;XXX, ^XXX, %XXX, $XXX...
tex = ["+","@","/",">",";","^","%","$","^","サテラ:","サテラ:","サテラ:","サテラ:"]
for texX in tex:
for command in commands:
if string ==command:
return True
return False
def sendMessage(to, text, contentMetadata={}, contentType=0):
mes = Message()
mes.to, mes.from_ = to, profile.mid
mes.text = text
mes.contentType, mes.contentMetadata = contentType, contentMetadata
if to not in messageReq:
messageReq[to] = -1
messageReq[to] += 1
def sendMessage(to, text, contentMetadata={}, contentType=0):
mes = Message()
mes.to, mes.from_ = to, profile.mid
mes.text = text
mes.contentType, mes.contentMetadata = contentType, contentMetadata
if to not in messageReq:
messageReq[to] = -1
messageReq[to] += 1
def sendText(self, Tomid, text):
msg = Message()
msg.to = Tomid
msg.text = text
return self.Talk.client.sendMessage(0, msg)
def sendImage(self, to_, path):
M = Message(to=to_,contentType = 1)
M.contentMetadata = None
M.contentPreview = None
M_id = self.Talk.client.sendMessage(M).id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'image',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.client.post_content('https://os.line.naver.jp/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload image failure.')
#r.content
return True
def sendImageWithURL(self, to_, url):
path = '%s/pythonLine-%i.data' % (tempfile.gettempdir(), randint(0, 9))
r = requests.get(url, stream=True)
if r.status_code == 200:
with open(path, 'w') as f:
shutil.copyfileobj(r.raw, f)
else:
raise Exception('Download image failure.')
try:
self.sendImage(to_, path)
except Exception as e:
raise e
def bot(op):
try:
if op.type == 0:
return
if op.type == 5:
if wait["autoAdd"] == True:
cl.findAndAddContactsByMid(op.param1)
if (wait["message"] in [""," ","\n",None]):
pass
else:
cl.sendText(op.param1,str(wait["message"]))
if op.type == 26:
msg = op.message
if msg.from_ in mimic["target"] and mimic["status"] == True and mimic["target"][msg.from_] == True:
text = msg.text
if text is not None:
cl.sendText(msg.to,text)
if op.type == 19:
if mid in op.param3:
wait["blacklist"][op.param2] = True
if op.type == 22:
if wait["leaveRoom"] == True:
cl.leaveRoom(op.param1)
if op.type == 24:
if wait["leaveRoom"] == True:
cl.leaveRoom(op.param1)
if op.type == 26:
msg = op.message
if msg.toType == 0:
msg.to = msg.from_
if msg.from_ == mid:
if "join:" in msg.text:
list_ = msg.text.split(":")
try:
cl.acceptGroupInvitationByTicket(list_[1],list_[2])
G = cl.getGroup(list_[1])
G.preventJoinByTicket = True
cl.updateGroup(G)
except:
cl.sendText(msg.to,"error")
if msg.toType == 1:
if wait["leaveRoom"] == True:
cl.leaveRoom(msg.to)
if msg.contentType == 16:
url = msg.contentMetadata["postEndUrl"]
cl.like(url[25:58], url[66:], likeType=1001)
# if op.type == 26:
# msg = op.message
# if msg.from_ in mimic["target"] and mimic["status"] == True and mimic["target"][msg.from_] == True:
# text = msg.text
# if text is not None:
# cl.sendText(msg.to,text)
if op.type == 26:
msg = op.message
if msg.to in settings["simiSimi"]:
if settings["simiSimi"][msg.to] == True:
if msg.text is not None:
text = msg.text
r = requests.get("http://api.ntcorp.us/chatbot/v1/?text=" + text.replace(" ","+") + "&key=beta1.nt")
data = r.text
data = json.loads(data)
if data['status'] == 200:
if data['result']['result'] == 100:
cl.sendText(msg.to, "{яeՏρσи Sιмι}\n" + data['result']['response'].encode('utf-8'))
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["detectMention"] == True:
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = ["sibuk..Ngapain tag", cName + " what? Lagi sibuk..Ngapain seh ngetag", cName + " Kenapa? pc aja klo penting ", cName + " kenapa? Gw sibuk ngapain tag", cName + " Ada Perlu apa? jgn tag doang", cName + "ya? Kenapa? Sibuk gw jan tag tag","Hmm? Sibuk", cName + "ngapain tag? Sibuk gw "]
ret_ = "{αυтσ яεꌗρσи∂}\n" + random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in Bots:
cl.sendText(msg.to,ret_)
break
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["kickMention"] == True:
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = ["",cName + " Ngapain Ngetag?, ", cName + " Kenapa Tag saya?, " + cName + "?", "Ada Perlu apa, " + cName + "?","Tag doang tidak perlu., ", "Tersummon -_-, "]
ret_ = "{αυтσ яεꌗρσи∂}\n" + random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in Bots:
cl.sendText(msg.to,ret_)
cl.kickoutFromGroup(msg.to,[msg.from_])
break
if msg.contentType == 13:
if wait['invite'] == True:
_name = msg.contentMetadata["displayName"]
invite = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
cl.sendText(msg.to, _name + " Berada DiGrup Ini")
else:
targets.append(invite)
if targets == []:
pass
else:
for target in targets:
try:
cl.findAndAddContactsByMid(target)
cl.inviteIntoGroup(msg.to,[target])
cl.sendText(msg.to,"Invite " + _name)
wait['invite'] = False
break
except:
cl.sendText(msg.to,"Error")
wait['invite'] = False
break
#if msg.contentType == 13:
# if wait["steal"] == True:
# _name = msg.contentMetadata["displayName"]
# copy = msg.contentMetadata["mid"]
# groups = cl.getGroup(msg.to)
# pending = groups.invitee
# targets = []
# for s in groups.members:
# if _name in s.displayName:
# print "[Target] Stealed"
# break
# else:
# targets.append(copy)
# if targets == []:
# pass
# else:
# for target in targets:
# try:
# cl.findAndAddContactsByMid(target)
# contact = cl.getContact(target)
# cu = cl.channel.getCover(target)
# path = str(cu)
# image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
# cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + msg.contentMetadata["mid"] + "\n\nBio :\n" + contact.statusMessage)
# cl.sendText(msg.to,"Profile Picture " + contact.displayName)
# cl.sendImageWithURL(msg.to,image)
# cl.sendText(msg.to,"Cover " + contact.displayName)
# cl.sendImageWithURL(msg.to,path)
# wait["steal"] = False
# break
# except:
# pass
if wait["alwayRead"] == True:
if msg.toType == 0:
cl.sendChatChecked(msg.from_,msg.id)
else:
cl.sendChatChecked(msg.to,msg.id)
if op.type == 25:
msg = op.message
if msg.contentType == 13:
if wait["wblack"] == True:
if msg.contentMetadata["mid"] in wait["commentBlack"]:
cl.sendText(msg.to,"In Blacklist")
wait["wblack"] = False
else:
wait["commentBlack"][msg.contentMetadata["mid"]] = True
wait["wblack"] = False
cl.sendText(msg.to,"Nothing")
elif wait["dblack"] == True:
if msg.contentMetadata["mid"] in wait["commentBlack"]:
del wait["commentBlack"][msg.contentMetadata["mid"]]
cl.sendText(msg.to,"Done")
wait["dblack"] = False
else:
wait["dblack"] = False
cl.sendText(msg.to,"Not in Blacklist")
elif wait["wblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
cl.sendText(msg.to,"In Blacklist")
wait["wblacklist"] = False
else:
wait["blacklist"][msg.contentMetadata["mid"]] = True
wait["wblacklist"] = False
cl.sendText(msg.to,"Done")
elif wait["dblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
del wait["blacklist"][msg.contentMetadata["mid"]]
cl.sendText(msg.to,"Done")
wait["dblacklist"] = False
else:
wait["dblacklist"] = False
cl.sendText(msg.to,"Done")
elif wait["contact"] == True:
msg.contentType = 0
cl.sendText(msg.to,msg.contentMetadata["mid"])
if 'displayName' in msg.contentMetadata:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"[displayName]:\n" + msg.contentMetadata["displayName"] + "\n[mid]:\n" + msg.contentMetadata["mid"] + "\n[statusMessage]:\n" + contact.statusMessage + "\n[pictureStatus]:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[coverURL]:\n" + str(cu))
else:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"[displayName]:\n" + contact.displayName + "\n[mid]:\n" + msg.contentMetadata["mid"] + "\n[statusMessage]:\n" + contact.statusMessage + "\n[pictureStatus]:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[coverURL]:\n" + str(cu))
elif msg.contentType == 16:
if wait["timeline"] == True:
msg.contentType = 0
if wait["lang"] == "JP":
msg.text = "menempatkan URL\n" + msg.contentMetadata["postEndUrl"]
else:
msg.text = msg.contentMetadata["postEndUrl"]
cl.sendText(msg.to,msg.text)
elif msg.text is None:
return
elif msg.text.lower() == 'help':
if wait["lang"] == "JP":
cl.sendText(msg.to,helpmsg)
else:
cl.sendText(msg.to,helpmsg)
elif msg.text.lower() == 'help protect':
if wait["lang"] == "JP":
cl.sendText(msg.to,helppro)
else:
cl.sendText(msg.to,helppro)
elif msg.text.lower() == 'help self':
if wait["lang"] == "JP":
cl.sendText(msg.to,helpself)
else:
cl.sendText(msg.to,helpself)
elif msg.text.lower() == 'help grup':
if wait["lang"] == "JP":
cl.sendText(msg.to,helpgrup)
else:
cl.sendText(msg.to,helpgrup)
elif msg.text.lower() == 'help set':
if wait["lang"] == "JP":
cl.sendText(msg.to,helpset)
else:
cl.sendText(msg.to,helpset)
elif msg.text.lower() == 'help translate':
if wait["lang"] == "JP":
cl.sendText(msg.to,helptranslate)
else:
cl.sendText(msg.to,helptranslate)
elif msg.text in ["Sp","Speed","speed"]:
start = time.time()
cl.sendText(msg.to, "{ωαιт ρяσᎦᏋꌗ}")
elapsed_time = time.time() - start
cl.sendText(msg.to, "%sseconds" % (elapsed_time))
elif msg.text == ".Speed":
cl.sendText(msg.to,"{ωαιт ρяσᎦᏋꌗ}")
start = time.time()
for i in range(3000):
1+1
elsp = time.time() - start
cl.sendText(msg.to,"%s/Detikี" % (elsp))
elif msg.text.lower() == 'crash':
msg.contentType = 13
msg.contentMetadata = {'mid': "ufb890ffb1c03c4d49b16027968daf9cb',"}
cl.sendMessage(msg)
elif msg.text.lower() == 'me':
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
cl.sendMessage(msg)
elif ".fb" in msg.text:
a = msg.text.replace(".fb","")
b = urllib.quote(a)
cl.sendText(msg.to,"「 Mencari 」\n" "Type:Mencari Info\nStatus: Proses")
cl.sendText(msg.to, "https://www.facebook.com" + b)
cl.sendText(msg.to,"「 Mencari 」\n" "Type:Mencari Info\nStatus: Sukses")
elif "zodiak " in msg.text:
tanggal = msg.text.replace("zodiak ","")
r=requests.get('https://script.google.com/macros/exec?service=AKfycbw7gKzP-WYV2F5mc9RaR7yE3Ve1yN91Tjs91hp_jHSE02dSv9w&nama=ervan&tanggal='+tanggal)
data=r.text
data=json.loads(data)
lahir = data["data"]["lahir"]
usia = data["data"]["usia"]
ultah = data["data"]["ultah"]
zodiak = data["data"]["zodiak"]
cl.sendText(msg.to,"Tanggal Lahir: "+lahir+"\n\nUsia: "+usia+"\n\nUltah: "+ultah+"\n\nZodiak: "+zodiak)
#========================== FOR COMMAND BOT STARTING =============================#
elif msg.text.lower() == 'contact on':
if wait["contact"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ɕσηϯαɕϯ ςεϯ ϯσ ση")
else:
cl.sendText(msg.to,"ɕσηϯαɕϯ ςεϯ ϯσ ση")
else:
wait["contact"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ɕσηϯαɕϯ ςεϯ ϯσ ση")
else:
cl.sendText(msg.to,"ɕσηϯαɕϯ ςεϯ ϯσ ση")
elif msg.text.lower() == 'contact off':
if wait["contact"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ɕσηϯαɕϯ ςεϯ ϯσ σƒƒ")
else:
cl.sendText(msg.to,"ɕσηϯαɕϯ αʆɾεαδψ σƒƒ")
else:
wait["contact"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ɕσηϯαɕϯ ςεϯ ϯσ σƒƒ")
else:
cl.sendText(msg.to,"ɕσηϯαɕϯ αʆɾεαδψ σƒƒ")
elif msg.text.lower() == 'sambut on':
if wait["sambut"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ɕρΣиуαмвυтαи σи")
else:
cl.sendText(msg.to,"ɕɕρΣиуαмвυтαи σи")
else:
wait["sambut"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ɕɕρΣиуαмвυтαи σи")
else:
cl.sendText(msg.to,"ɕɕρΣиуαмвυтαи σи")
elif msg.text.lower() == 'sambut off':
if wait["sambut"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ɕɕρΣиуαмвυтαи σƒƒ")
else:
cl.sendText(msg.to,"ɕɕɕρΣиуαмвυтαи σƒƒ")
else:
wait["sambut"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ɕɕɕρΣиуαмвυтαи σƒƒ")
else:
cl.sendText(msg.to,"ɕɕɕρΣиуαмвυтαи σƒƒ")
elif msg.text.lower() == 'protect on':
if wait["protect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protecion Already On")
else:
cl.sendText(msg.to,"Protecion Already On")
else:
wait["protect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protecion Already On")
else:
cl.sendText(msg.to,"Protecion Already On")
elif msg.text.lower() == 'qr on':
if wait["linkprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Qr already On")
else:
cl.sendText(msg.to,"Protection Qr already On")
else:
wait["linkprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Qr already On")
else:
cl.sendText(msg.to,"Protection Qr already On")
elif msg.text.lower() == 'invite on':
if wait["inviteprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Invite already On")
else:
cl.sendText(msg.to,"Protection Invite already On")
else:
wait["inviteprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ρяσтє¢тισи ιиνιтє ѕєт тσ σи")
else:
cl.sendText(msg.to,"ρяσтє¢тισи ιиνιтє αℓяєα∂у σи")
elif msg.text.lower() == 'cancel on':
if wait["cancelprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"¢αи¢єℓ ρяσтє¢тισи ѕєт тσ σи")
else:
cl.sendText(msg.to,"¢αи¢єℓ ρяσтє¢тισи αℓяєα∂у σи")
else:
wait["cancelprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"¢αи¢єℓ ρяσтє¢тισи ѕєт тσ σи")
else:
cl.sendText(msg.to,"¢αи¢єℓ ρяσтє¢тισи αℓяєα∂у σи")
elif msg.text.lower() == 'autojoin on':
if wait["autoJoin"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"αυтσʝσιи ѕєт тσ σи")
else:
cl.sendText(msg.to,"αυтσʝσιи αℓяєα∂у σи")
else:
wait["autoJoin"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"αυтσʝσιи ѕєт тσ σи")
else:
cl.sendText(msg.to,"αυтσʝσιи αℓяєα∂у σи")
elif msg.text.lower() == 'autojoin off':
if wait["autoJoin"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"αυтσʝσιи ѕєт тσ σff")
else:
cl.sendText(msg.to,"αυтσʝσιи αℓяєα∂у σff")
else:
wait["autoJoin"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"αυтσʝσιи ѕєт тσ σff")
else:
cl.sendText(msg.to,"αυтσʝσιи αℓяєα∂у σff")
elif msg.text.lower() == 'protect off':
if wait["protect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection already Off")
else:
cl.sendText(msg.to,"Protection already Off")
else:
wait["protect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ρяσтє¢тισи ѕєт тσ σff")
else:
cl.sendText(msg.to,"ρяσтє¢тισи αℓяєα∂у σff")
elif msg.text.lower() == 'qr off':
if wait["linkprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Qr already off")
else:
cl.sendText(msg.to,"Protection Qr already off")
else:
wait["linkprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Qr already Off")
else:
cl.sendText(msg.to,"Protection Qr already Off")
elif msg.text.lower() == 'invit off':
if wait["inviteprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Invite already Off")
else:
cl.sendText(msg.to,"Protection Invite already Off")
else:
wait["inviteprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Invite already Off")
else:
cl.sendText(msg.to,"Protection Invite already Off")
elif msg.text.lower() == 'cancel off':
if wait["cancelprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Cancel already Off")
else:
cl.sendText(msg.to,"Protection Cancel already Off")
else:
wait["cancelprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Cancel already Off")
else:
cl.sendText(msg.to,"Protection Cancel already Off")
elif "Grup cancel:" in msg.text:
try:
strnum = msg.text.replace("Grup cancel:","")
if strnum == "off":
wait["autoCancel"]["on"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Itu off undangan ditolak??\nSilakan kirim dengan menentukan jumlah orang ketika Anda menghidupkan")
else:
cl.sendText(msg.to,"Off undangan ditolak??Sebutkan jumlah terbuka ketika Anda ingin mengirim")
else:
num = int(strnum)
wait["autoCancel"]["on"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,strnum + "Kelompok berikut yang diundang akan ditolak secara otomatis")
else:
cl.sendText(msg.to,strnum + "The team declined to create the following automatic invitation")
except:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Nilai tidak benar")
else:
cl.sendText(msg.to,"Weird value")
elif msg.text.lower() == 'autoleave on':
if wait["leaveRoom"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Auto Leave room set to on")
else:
cl.sendText(msg.to,"Auto Leave room already on")
else:
wait["leaveRoom"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Auto Leave room set to on")
else:
cl.sendText(msg.to,"Auto Leave room already on")
elif msg.text.lower() == 'autoleave off':
if wait["leaveRoom"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Auto Leave room set to off")
else:
cl.sendText(msg.to,"Auto Leave room already off")
else:
wait["leaveRoom"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Auto Leave room set to off")
else:
cl.sendText(msg.to,"Auto Leave room already off")
elif msg.text.lower() == 'share on':
if wait["timeline"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Share set to on")
else:
cl.sendText(msg.to,"Share already on")
else:
wait["timeline"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Share set to on")
else:
cl.sendText(msg.to,"Share already on")
elif msg.text.lower() == 'share off':
if wait["timeline"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Share set to off")
else:
cl.sendText(msg.to,"Share already off")
else:
wait["timeline"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Share set to off")
else:
cl.sendText(msg.to,"Share already off")
elif msg.text.lower() == 'status':
md = ""
if wait["contact"] == True: md+="Contact:on \n"
else: md+="Contact:off\n"
if wait["autoJoin"] == True: md+="Auto Join:on \n"
else: md +="Auto Join:off\n"
if wait["autoCancel"]["on"] == True:md+="Auto cancel:" + str(wait["autoCancel"]["members"]) + "\n"
else: md+= "Group cancel:off \n"
if wait["leaveRoom"] == True: md+="Auto leave:on \n"
else: md+="Auto leave:off \n"
if wait["timeline"] == True: md+="Share:on \n"
else:md+="Share:off \n"
if wait["autoAdd"] == True: md+="Auto add:on \n"
else:md+="Auto add:off \n"
if wait["protect"] == True: md+="Protect:on \n"
else:md+="Protect:off \n"
if wait["linkprotect"] == True: md+="Link Protect:on \n"
else:md+="Link Protect:off \n"
if wait["inviteprotect"] == True: md+="Invitation Protect:on \n"
else:md+="Invitation Protect:off \n"
if wait["cancelprotect"] == True: md+="Cancel Protect:on \n"
else:md+="Cancel Protect:off \n"
cl.sendText(msg.to,md)
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
cl.sendMessage(msg)
elif cms(msg.text,["creator","Creator"]):
msg.contentType = 13
msg.contentMetadata = {'mid': "ufb890ffb1c03c4d49b16027968daf9cb"}
cl.sendMessage(msg)
kk.sendMessage(msg)
elif msg.text.lower() == 'autoadd on':
if wait["autoAdd"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Auto add set to on")
else:
cl.sendText(msg.to,"Auto add already on")
else:
wait["autoAdd"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Auto add set to on")
else:
cl.sendText(msg.to,"Auto add already on")
elif msg.text.lower() == 'autoadd off':
if wait["autoAdd"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Auto add set to off")
else:
cl.sendText(msg.to,"Auto add already off")
else:
wait["autoAdd"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Auto add set to off")
else:
cl.sendText(msg.to,"Auto add already off")
elif "Pesan set:" in msg.text:
wait["message"] = msg.text.replace("Pesan set:","")
cl.sendText(msg.to,"We changed the message")
elif msg.text.lower() == 'pesan cek':
if wait["lang"] == "JP":
cl.sendText(msg.to,"Pesan tambahan otomatis telah ditetapkan sebagai berikut \n\n" + wait["message"])
else:
cl.sendText(msg.to,"Pesan tambahan otomatis telah ditetapkan sebagai berikut \n\n" + wait["message"])
elif "Come Set:" in msg.text:
c = msg.text.replace("Come Set:","")
if c in [""," ","\n",None]:
cl.sendText(msg.to,"Merupakan string yang tidak bisa diubah")
else:
wait["comment"] = c
cl.sendText(msg.to,"Ini telah diubah\n\n" + c)
elif msg.text in ["Com on","Com:on","Comment on"]:
if wait["commentOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Aku berada di")
else:
cl.sendText(msg.to,"To open")
else:
wait["commentOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Comment Actived")
else:
cl.sendText(msg.to,"Comment Has Been Active")
elif msg.text in ["Come off"]:
if wait["commentOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Hal ini sudah off")
else:
cl.sendText(msg.to,"It is already turned off")
else:
wait["commentOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Off")
else:
cl.sendText(msg.to,"To turn off")
elif msg.text in ["Com","Comment"]:
cl.sendText(msg.to,"Auto komentar saat ini telah ditetapkan sebagai berikut:??\n\n" + str(wait["comment"]))
elif msg.text in ["Com Bl"]:
wait["wblack"] = True
cl.sendText(msg.to,"Please send contacts from the person you want to add to the blacklist")
elif msg.text in ["Com hapus Bl"]:
wait["dblack"] = True
cl.sendText(msg.to,"Please send contacts from the person you want to add from the blacklist")
elif msg.text in ["Com Bl cek"]:
if wait["commentBlack"] == {}:
cl.sendText(msg.to,"Nothing in the blacklist")
else:
cl.sendText(msg.to,"The following is a blacklist")
mc = ""
for mi_d in wait["commentBlack"]:
mc += "・" +cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc)
elif msg.text.lower() == 'jam on':
if wait["clock"] == True:
cl.sendText(msg.to,"Jam already on")
else:
wait["clock"] = True
now2 = datetime.now()
nowT = datetime.strftime(now2,"?%H:%M?")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
cl.sendText(msg.to,"Jam set on")
elif msg.text.lower() == 'jam off':
if wait["clock"] == False:
cl.sendText(msg.to,"Jam already off")
else:
wait["clock"] = False
cl.sendText(msg.to,"Jam set off")
elif "Jam say:" in msg.text:
n = msg.text.replace("Jam say:","")
if len(n.decode("utf-8")) > 30:
cl.sendText(msg.to,"terlalu lama")
else:
wait["cName"] = n
cl.sendText(msg.to,"Nama Jam Berubah menjadi:" + n)
elif msg.text.lower() == 'update':
if wait["clock"] == True:
now2 = datetime.now()
nowT = datetime.strftime(now2,"?%H:%M?")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
cl.sendText(msg.to,"Diperbarui")
else:
cl.sendText(msg.to,"Silahkan Aktifkan Jam")
elif "Image " in msg.text:
search = msg.text.replace("Image ","")
url = 'https://www.google.com/search?espv=2&biw=1366&bih=667&tbm=isch&oq=kuc&aqs=mobile-gws-lite.0.0l5&q=' + search
raw_html = (download_page(url))
items = []
items = items + (_images_get_all_items(raw_html))
path = random.choice(items)
print path
try:
cl.sendImageWithURL(msg.to,path)
except:
pass
#========================== FOR COMMAND BOT FINISHED =============================#
elif "Spam change:" in msg.text:
if msg.toType == 2:
wait["spam"] = msg.text.replace("Spam change:","")
cl.sendText(msg.to,"spam changed")
elif "Spam add:" in msg.text:
if msg.toType == 2:
wait["spam"] = msg.text.replace("Spam add:","")
if wait["lang"] == "JP":
cl.sendText(msg.to,"spam changed")
else:
cl.sendText(msg.to,"Done")
elif "Spam:" in msg.text:
if msg.toType == 2:
strnum = msg.text.replace("Spam:","")
num = int(strnum)
for var in range(0,num):
cl.sendText(msg.to, wait["spam"])
#=====================================
elif "Spamm " in msg.text:
if msg.toType == 2:
bctxt = msg.text.replace("Spamm ", "")
t = cl.getAllContactIds()
t = 500
while(t):
cl.sendText(msg.to, (bctxt))
t-=1
#==============================================
elif msg.text in ["Hehehe","Hehe","He","hehehe","hehe","he"]:
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "10",
"STKPKGID": "1317325",
"STKVER": "100" }
cl.sendMessage(msg)
cl.sendMessage(msg)
#==============================================
elif "Spamcontact @" in msg.text:
_name = msg.text.replace("Spamcontact @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(msg.to, "Done")
print " Spammed !"
#==============================================================================#
elif msg.text in ["Invite"]:
wait["invite"] = True
cl.sendText(msg.to,"Send Contact")
elif msg.text in ["Steal contact"]:
wait["contact"] = True
cl.sendText(msg.to,"Send Contact")
elif msg.text in ["Like:me","Like me"]: #Semua Bot Ngelike Status Akun Utama
print "[Command]Like executed"
cl.sendText(msg.to,"Like Status Owner")
try:
likeme()
except:
pass
elif msg.text in ["Like:friend","Like friend"]: #Semua Bot Ngelike Status Teman
print "[Command]Like executed"
cl.sendText(msg.to,"Like Status Teman")
try:
likefriend()
except:
pass
elif msg.text in ["Like:on","Like on"]:
if wait["likeOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done")
else:
wait["likeOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already")
elif msg.text in ["Like off","Like:off"]:
if wait["likeOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done")
else:
wait["likeOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already")
elif msg.text in ["Simisimi on","Simisimi:on"]:
settings["simiSimi"][msg.to] = True
cl.sendText(msg.to,"Simi mode On")
elif msg.text in ["Simisimi off","Simisimi:off"]:
settings["simiSimi"][msg.to] = False
cl.sendText(msg.to,"Simi mode Off")
elif msg.text in ["Autoread on","Read on"]:
wait['alwayRead'] = True
cl.sendText(msg.to,"Auto read On")
elif msg.text in ["Autoread off","Read off"]:
wait['alwayRead'] = False
cl.sendText(msg.to,"Auto read Off")
elif msg.text in ["Respontag on","Autorespon:on","Respon on","Respon:on"]:
wait["detectMention"] = True
cl.sendText(msg.to,"Auto respon tag On")
elif msg.text in ["Respontag off","Autorespon:off","Respon off","Respon:off"]:
wait["detectMention"] = False
cl.sendText(msg.to,"Auto respon tag Off")
elif msg.text in ["Kicktag on","Autokick:on","Responkick on","Responkick:on"]:
wait["kickMention"] = True
cl.sendText(msg.to,"Auto Kick tag ON")
elif msg.text in ["Kicktag off","Autokick:off","Responkick off","Responkick:off"]:
wait["kickMention"] = False
cl.sendText(msg.to,"Auto Kick tag OFF")
elif "Time" in msg.text:
if msg.toType == 2:
cl.sendText(msg.to,datetime.today().strftime('%H:%M:%S'))
#==============================================================================#
elif "Clearall" in msg.text:
if msg.toType == 2:
if msg.toType == 2:
print "ok"
_name = msg.text.replace("Clearall","")
gs = cl.getGroup(msg.to)
gs = cl.getGroup(msg.to)
gs = cl.getGroup(msg.to)
cl.sendText(msg.to,"Group Cleared.")
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found.")
cl.sendText(msg.to,"Not found.")
else:
for target in targets:
try:
klist=[cl,cl,cl]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
cl.sendText(msg.to,"Group cleanse")
cl.sendText(msg.to,"Group cleanse")
elif ("Kick " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"] [0] ["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
cl.kickoutFromGroup(msg.to,[target])
except:
cl.sendText(msg.to,"Error")
elif ("Ulti " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"] [0] ["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
cl.kickoutFromGroup(msg.to,[target])
cl.inviteIntoGroup(msg.to,[target])
cl.cancelGroupInvitation(msg.to,[target])
except:
cl.sendText(msg.to,"Terima Kasih sudah berkunjung...")
elif "Nk " in msg.text:
if msg.from_ in admin:
nk0 = msg.text.replace("Nk ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
cl.sendText(msg.to,"Bubye")
elif "Kick: " in msg.text:
midd = msg.text.replace("Kick: ","")
cl.kickoutFromGroup(msg.to,[midd])
elif 'invite ' in msg.text.lower():
key = msg.text[-33:]
cl.findAndAddContactsByMid(key)
cl.inviteIntoGroup(msg.to, [key])
contact = cl.getContact(key)
elif "Mid @" in msg.text:
if msg.from_ in admin:
_name = msg.text.replace("Mid @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
random.choice(KAC).sendText(msg.to, g.mid)
else:
pass
elif msg.text.lower() == 'cancel':
if msg.toType == 2:
group = cl.getGroup(msg.to)
if group.invitee is not None:
gInviMids = [contact.mid for contact in group.invitee]
cl.cancelGroupInvitation(msg.to, gInviMids)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Tidak ada undangan")
else:
cl.sendText(msg.to,"Invitan tidak ada")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Tidak ada undangan")
else:
cl.sendText(msg.to,"Invitan tidak ada")
elif msg.text.lower() == 'link on':
if msg.toType == 2:
group = cl.getGroup(msg.to)
group.preventJoinByTicket = False
cl.updateGroup(group)
if wait["lang"] == "JP":
cl.sendText(msg.to,"URL open")
else:
cl.sendText(msg.to,"URL open")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"It can not be used outside the group")
else:
cl.sendText(msg.to,"Can not be used for groups other than")
elif msg.text.lower() == 'link off':
if msg.toType == 2:
group = cl.getGroup(msg.to)
group.preventJoinByTicket = True
cl.updateGroup(group)
if wait["lang"] == "JP":
cl.sendText(msg.to,"URL close")
else:
cl.sendText(msg.to,"URL close")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"It can not be used outside the group")
else:
cl.sendText(msg.to,"Can not be used for groups other than")
elif msg.text in ["Url","Gurl"]:
if msg.toType == 2:
g = cl.getGroup(msg.to)
if g.preventJoinByTicket == True:
g.preventJoinByTicket = False
cl.updateGroup(g)
gurl = cl.reissueGroupTicket(msg.to)
cl.sendText(msg.to,"line://ti/g/" + gurl)
elif "Gcreator" == msg.text:
try:
group = cl.getGroup(msg.to)
GS = group.creator.mid
M = Message()
M.to = msg.to
M.contentType = 13
M.contentMetadata = {'mid': GS}
cl.sendMessage(M)
except:
W = group.members[0].mid
M = Message()
M.to = msg.to
M.contentType = 13
M.contentMetadata = {'mid': W}
cl.sendMessage(M)
cl.sendText(msg.to,"Creator Grup")
elif msg.text.lower() == 'invite:gcreator':
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
gcmid = ginfo.creator.mid
except:
gcmid = "Error"
if wait["lang"] == "JP":
cl.inviteIntoGroup(msg.to,[gcmid])
else:
cl.inviteIntoGroup(msg.to,[gcmid])
elif ("Gname: " in msg.text):
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.name = msg.text.replace("Gname: ","")
cl.updateGroup(X)
elif msg.text.lower() == 'infogrup':
group = cl.getGroup(msg.to)
try:
gCreator = group.creator.displayName
except:
gCreator = "Error"
md = "[Nama Grup : ]\n" + group.name + "\n\n[Id Grup : ]\n" + group.id + "\n\n[Pembuat Grup :]\n" + gCreator + "\n\n[Gambar Grup : ]\nhttp://dl.profile.line-cdn.net/" + group.pictureStatus
if group.preventJoinByTicket is False: md += "\n\nKode Url : Diizinkan"
else: md += "\n\nKode Url : Diblokir"
if group.invitee is None: md += "\nJumlah Member : " + str(len(group.members)) + " Orang" + "\nUndangan Yang Belum Diterima : 0 Orang"
else: md += "\nJumlah Member : " + str(len(group.members)) + " Orang" + "\nUndangan Yang Belum Diterima : " + str(len(group.invitee)) + " Orang"
cl.sendText(msg.to,md)
elif msg.text.lower() == 'grup id':
gid = cl.getGroupIdsJoined()
h = ""
for i in gid:
h += "[%s]:%s\n" % (cl.getGroup(i).name,i)
cl.sendText(msg.to,h)
#==============================================================================#
elif msg.text in ["Glist"]:
gid = cl.getGroupIdsJoined()
h = ""
for i in gid:
h += "%s\n" % (cl.getGroup(i).name +" ? ["+str(len(cl.getGroup(i).members))+"]")
cl.sendText(msg.to,"-- List Groups --\n\n"+ h +"\nTotal groups =" +" ["+str(len(gid))+"]")
elif msg.text.lower() == 'gcancel':
gid = cl.getGroupIdsInvited()
for i in gid:
cl.rejectGroupInvitation(i)
if wait["lang"] == "JP":
cl.sendText(msg.to,"Aku menolak semua undangan")
else:
cl.sendText(msg.to,"He declined all invitations")
elif "Auto add" in msg.text:
thisgroup = cl.getGroups([msg.to])
Mids = [contact.mid for contact in thisgroup[0].members]
mi_d = Mids[:33]
cl.findAndAddContactsByMids(mi_d)
cl.sendText(msg.to,"Berhasil add semua")
elif "@bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
cl.leaveGroup(msg.to)
except:
pass
#==============================================================================#
elif "mention" == msg.text.lower():
group = cl.getGroup(msg.to)
nama = [contact.mid for contact in group.members]
nm1, nm2, nm3, nm4, nm5, jml = [], [], [], [], [], len(nama)
if jml <= 100:
summon(msg.to, nama)
if jml > 100 and jml < 200:
for i in range(0, 99):
nm1 += [nama[i]]
summon(msg.to, nm1)
for j in range(100, len(nama)-1):
nm2 += [nama[j]]
summon(msg.to, nm2)
if jml > 200 and jml < 500:
for i in range(0, 99):
nm1 += [nama[i]]
summon(msg.to, nm1)
for j in range(100, 199):
nm2 += [nama[j]]
summon(msg.to, nm2)
for k in range(200, 299):
nm3 += [nama[k]]
summon(msg.to, nm3)
for l in range(300, 399):
nm4 += [nama[l]]
summon(msg.to, nm4)
for m in range(400, len(nama)-1):
nm5 += [nama[m]]
summon(msg.to, nm5)
if jml > 500:
print "Terlalu Banyak Men 500+"
cnt = Message()
cnt.text = "Jumlah:\n" + str(jml) + " Members"
cnt.to = msg.to
cl.sendMessage(cnt)
elif "setpoint on" == msg.text.lower():
if msg.to in wait2['readPoint']:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%H:%M:%S')
wait2['ROM'][msg.to] = {}
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
cl.sendText(msg.to,"Setpoint already on")
else:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%H:%M:%S')
wait2['ROM'][msg.to] = {}
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
cl.sendText(msg.to, "Keciduk lu sider:\n" + datetime.now().strftime('%H:%M:%S'))
print wait2
elif "setpoint off" == msg.text.lower():
if msg.to not in wait2['readPoint']:
cl.sendText(msg.to,"Setpoint already off")
else:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
cl.sendText(msg.to, "Delete reading point:\n" + datetime.now().strftime('%H:%M:%S'))
elif "viewlastseen" == msg.text.lower():
if msg.to in wait2['readPoint']:
if wait2["ROM"][msg.to].items() == []:
cl.sendText(msg.to, "Keciduk lo sider:\nNone")
else:
chiya = []
for rom in wait2["ROM"][msg.to].items():
chiya.append(rom[1])
cmem = cl.getContacts(chiya)
zx = ""
zxc = ""
zx2 = []
xpesan = ''
for x in range(len(cmem)):
xname = str(cmem[x].displayName)
pesan = ''
pesan2 = pesan+"@a\n"
xlen = str(len(zxc)+len(xpesan))
xlen2 = str(len(zxc)+len(pesan2)+len(xpesan)-1)
zx = {'S':xlen, 'E':xlen2, 'M':cmem[x].mid}
zx2.append(zx)
zxc += pesan2
msg.contentType = 0
print zxc
msg.text = xpesan+ zxc + "Keciduk kalian para sider\nBefore: %s\nAfter: %s"%(wait2['setTime'][msg.to],datetime.now().strftime('%H:%M:%S'))
lol ={'MENTION':str('{"MENTIONEES":'+json.dumps(zx2).replace(' ','')+'}')}
print lol
msg.contentMetadata = lol
try:
cl.sendMessage(msg)
except Exception as error:
print error
pass
else:
cl.sendText(msg.to, "Lurking has not been set.")
elif "Gbroadcast: " in msg.text:
bc = msg.text.replace("Gbroadcast: ","")
gid = cl.getGroupIdsJoined()
for i in gid:
cl.sendText(i, bc)
elif "Cbroadcast: " in msg.text:
bc = msg.text.replace("Cbroadcast: ","")
gid = cl.getAllContactIds()
for i in gid:
cl.sendText(i, bc)
elif "Spam change: " in msg.text:
wait["spam"] = msg.text.replace("Spam change: ","")
cl.sendText(msg.to,"spam changed")
elif "Spam add: " in msg.text:
wait["spam"] = msg.text.replace("Spam add: ","")
if wait["lang"] == "JP":
cl.sendText(msg.to,"spam changed")
else:
cl.sendText(msg.to,"Done")
elif "Spam: " in msg.text:
strnum = msg.text.replace("Spam: ","")
num = int(strnum)
for var in range(0,num):
cl.sendText(msg.to, wait["spam"])
elif "Spamtag @" in msg.text:
_name = msg.text.replace("Spamtag @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
xname = g.displayName
xlen = str(len(xname)+1)
msg.contentType = 0
msg.text = "@"+xname+" "
msg.contentMetadata ={'MENTION':'{"MENTIONEES":[{"S":"0","E":'+json.dumps(xlen)+',"M":'+json.dumps(g.mid)+'}]}','EMTVER':'4'}
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
else:
pass
elif "Spam " in msg.text:
if msg.from_ in admin:
txt = msg.text.split(" ")
jmlh = int(txt[2])
teks = msg.text.replace("Spam "+str(txt[1])+" "+str(jmlh)+" ","")
tulisan = jmlh * (teks+"\n")
#LordJahe
if txt[1] == "on":
if jmlh <= 100000:
for x in range(jmlh):
cl.sendText(msg.to, teks)
else:
cl.sendText(msg.to, "Kelebihan Batas!")
elif txt[1] == "off":
if jmlh <= 100000:
cl.sendText(msg.to, tulisan)
else:
cl.sendText(msg.to, "Kelebihan Batas!")
elif msg.text in ["Kibar"]:
msg.contentType = 13
msg.contentMetadata = {'mid': 'u4a9665f867e11e24f54846c4b95a1049'}
cl.sendText(msg.to,"🔥 Halo kami numpang play digc kalian 🔥")
cl.sendText(msg.to,"✩WE ARE TEAM ALLIANCE✩")
cl.sendMessage(msg)
cl.sendText(msg.to,"Support by clan:")
msg.contentType = 13
msg.contentMetadata = {'mid': 'u56c3fcc7264524ad73824066e7de7bf0'}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': 'udfaf13d67a0456a7ef5dd4fb962e65a2'}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': 'u1d88685d9e9047f647491f34e2ab7fd8'}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': 'u673793beec75aba0765877029dbc8486'}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': 'ua55d405bb0d73c9dd6e81dab7b52fe9e'}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': 'u0d6cea88459fe35757b4831fe3be0420'}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': 'u32bf137182e76543854a2068c814cb9c'}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': 'u495fefba2d03365d85fcf068c4bec416'}
cl.sendMessage(msg)
cl.sendText(msg.to,"🚬 BERANI KIBAR, BERANI REMED 🚬")
cl.sendText(msg.to,"°~NICE TO MEET YOU°~")
elif ("Micadd " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
mimic["target"][target] = True
cl.sendText(msg.to,"Target ditambahkan!")
break
except:
cl.sendText(msg.to,"Fail !")
break
elif ("Micdel " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
del mimic["target"][target]
cl.sendText(msg.to,"Target dihapuskan!")
break
except:
cl.sendText(msg.to,"Fail !")
break
elif msg.text in ["Miclist"]:
if mimic["target"] == {}:
cl.sendText(msg.to,"nothing")
else:
mc = "Target mimic user\n"
for mi_d in mimic["target"]:
mc += "?? "+cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc)
elif "Mimic target " in msg.text:
if mimic["copy"] == True:
siapa = msg.text.replace("Mimic target ","")
if siapa.rstrip(' ') == "me":
mimic["copy2"] = "me"
cl.sendText(msg.to,"Mimic change to me")
elif siapa.rstrip(' ') == "target":
mimic["copy2"] = "target"
cl.sendText(msg.to,"Mimic change to target")
else:
cl.sendText(msg.to,"I dont know")
elif "Mimic " in msg.text:
cmd = msg.text.replace("Mimic ","")
if cmd == "on":
if mimic["status"] == False:
mimic["status"] = True
cl.sendText(msg.to,"Reply Message on")
else:
cl.sendText(msg.to,"Sudah on")
elif cmd == "off":
if mimic["status"] == True:
mimic["status"] = False
cl.sendText(msg.to,"Reply Message off")
else:
cl.sendText(msg.to,"Sudah off")
elif "Setimage: " in msg.text:
wait["pap"] = msg.text.replace("Setimage: ","")
cl.sendText(msg.to, "Pap telah di Set")
elif msg.text in ["Papimage","Papim","Pap"]:
cl.sendImageWithURL(msg.to,wait["pap"])
elif "Setvideo: " in msg.text:
wait["pap"] = msg.text.replace("Setvideo: ","")
cl.sendText(msg.to,"Video Has Ben Set To")
elif msg.text in ["Papvideo","Papvid"]:
cl.sendVideoWithURL(msg.to,wait["pap"])
elif "TL:" in msg.text:
if msg.toType == 2:
tl_text = msg.text.replace("TL:","")
cl.sendText(msg.to,"line://home/post?userMid="+mid+"&postId="+cl.new_post(tl_text)["result"]["post"]["postInfo"]["postId"])
#==============================================================================#
elif msg.text.lower() == 'mymid':
cl.sendText(msg.to,mid)
elif "Timeline: " in msg.text:
tl_text = msg.text.replace("Timeline: ","")
cl.sendText(msg.to,"line://home/post?userMid="+mid+"&postId="+cl.new_post(tl_text)["result"]["post"]["postInfo"]["postId"])
elif "Myname: " in msg.text:
string = msg.text.replace("Myname: ","")
if len(string.decode('utf-8')) <= 10000000000:
profile = cl.getProfile()
profile.displayName = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Changed " + string + "")
elif "Mybio: " in msg.text:
string = msg.text.replace("Mybio: ","")
if len(string.decode('utf-8')) <= 10000000000:
profile = cl.getProfile()
profile.statusMessage = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Changed " + string)
elif msg.text in ["Myname"]:
h = cl.getContact(mid)
cl.sendText(msg.to,"===[DisplayName]===\n" + h.displayName)
elif msg.text in ["Mybio"]:
h = cl.getContact(mid)
cl.sendText(msg.to,"===[StatusMessage]===\n" + h.statusMessage)
elif msg.text in ["Mypict"]:
h = cl.getContact(mid)
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
elif msg.text in ["Myvid"]:
h = cl.getContact(mid)
cl.sendVideoWithURL(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
elif msg.text in ["Urlpict"]:
h = cl.getContact(mid)
cl.sendText(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
elif msg.text in ["Mycover"]:
h = cl.getContact(mid)
cu = cl.channel.getCover(mid)
path = str(cu)
cl.sendImageWithURL(msg.to, path)
elif msg.text in ["Urlcover"]:
h = cl.getContact(mid)
cu = cl.channel.getCover(mid)
path = str(cu)
cl.sendText(msg.to, path)
elif "Getmid @" in msg.text:
_name = msg.text.replace("Getmid @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
cl.sendText(msg.to, g.mid)
else:
pass
elif "Getinfo" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + contact.mid + "\n\nBio :\n" + contact.statusMessage + "\n\nProfile Picture :\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n\nHeader :\n" + str(cu))
except:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + contact.mid + "\n\nBio :\n" + contact.statusMessage + "\n\nProfile Picture :\n" + str(cu))
elif "Getbio" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to, "===[StatusMessage]===\n" + contact.statusMessage)
except:
cl.sendText(msg.to, "===[StatusMessage]===\n" + contact.statusMessage)
elif "Getname" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to, "===[DisplayName]===\n" + contact.displayName)
except:
cl.sendText(msg.to, "===[DisplayName]===\n" + contact.displayName)
elif "Getprofile" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithURL(msg.to,path)
except:
pass
elif "Getcontact" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
mmid = cl.getContact(key1)
msg.contentType = 13
msg.contentMetadata = {"mid": key1}
cl.sendMessage(msg)
elif "Getpict @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Getpict @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendImageWithURL(msg.to, path)
except Exception as e:
raise e
print "[Command]dp executed"
elif "Getvid @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Getvid @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendVideoWithURL(msg.to, path)
except Exception as e:
raise e
print "[Command]dp executed"
elif "Picturl @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Picturl @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendText(msg.to, path)
except Exception as e:
raise e
print "[Command]dp executed"
elif "Getcover @" in msg.text:
print "[Command]cover executing"
_name = msg.text.replace("Getcover @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
cl.sendImageWithURL(msg.to, path)
except Exception as e:
raise e
print "[Command]cover executed"
elif "Coverurl @" in msg.text:
print "[Command]cover executing"
_name = msg.text.replace("Coverurl @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
cl.sendText(msg.to, path)
except Exception as e:
raise e
print "[Command]cover executed"
elif "Getgrup image" in msg.text:
group = cl.getGroup(msg.to)
path = "http://dl.profile.line-cdn.net/" + group.pictureStatus
cl.sendImageWithURL(msg.to,path)
elif "Urlgrup image" in msg.text:
group = cl.getGroup(msg.to)
path = "http://dl.profile.line-cdn.net/" + group.pictureStatus
cl.sendText(msg.to,path)
elif "Mycopy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("Mycopy @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
cl.CloneContactProfile(target)
cl.sendText(msg.to, "Copied.")
except Exception as e:
print e
elif msg.text in ["Mybackup","mybackup"]:
try:
cl.updateDisplayPicture(backup.pictureStatus)
cl.updateProfile(backup)
cl.sendText(msg.to, "Refreshed.")
except Exception as e:
cl.sendText(msg.to, str(e))
#==============================================================================#
elif "Fancytext: " in msg.text:
txt = msg.text.replace("Fancytext: ", "")
cl.kedapkedip(msg.to,txt)
print "[Command] Kedapkedip"
elif "kedapkedip " in msg.text.lower():
txt = msg.text.replace("kedapkedip ", "")
t1 = "\xf4\x80\xb0\x82\xf4\x80\xb0\x82\xf4\x80\xb0\x82\xf4\x80\xb0\x82\xf4\x80\xa0\x81\xf4\x80\xa0\x81\xf4\x80\xa0\x81"
t2 = "\xf4\x80\x82\xb3\xf4\x8f\xbf\xbf"
cl.sendText(msg.to, t1 + txt + t2)
elif "Translate-id " in msg.text:
isi = msg.text.replace("Tr-id ","")
translator = Translator()
hasil = translator.translate(isi, dest='id')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Translate-en " in msg.text:
isi = msg.text.replace("Tr-en ","")
translator = Translator()
hasil = translator.translate(isi, dest='en')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Translate-ar" in msg.text:
isi = msg.text.replace("Tr-ar ","")
translator = Translator()
hasil = translator.translate(isi, dest='ar')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Translate-jp" in msg.text:
isi = msg.text.replace("Tr-jp ","")
translator = Translator()
hasil = translator.translate(isi, dest='ja')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Translate-ko" in msg.text:
isi = msg.text.replace("Tr-ko ","")
translator = Translator()
hasil = translator.translate(isi, dest='ko')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Id@en" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'en'
kata = msg.text.replace("Id@en ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"**FROM ID**\n" + "" + kata + "\n**TO ENGLISH**\n" + "" + result + "\n**SUKSES**")
elif "En@id" in msg.text:
bahasa_awal = 'en'
bahasa_tujuan = 'id'
kata = msg.text.replace("En@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"**FROM EN**\n" + "" + kata + "\n**TO ID**\n" + "" + result + "\n**SUKSES**")
elif "Id@jp" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ja'
kata = msg.text.replace("Id@jp ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"**FROM ID**\n" + "" + kata + "\n**TO JP**\n" + "" + result + "\n**SUKSES**")
elif "Jp@id" in msg.text:
bahasa_awal = 'ja'
bahasa_tujuan = 'id'
kata = msg.text.replace("Jp@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM JP----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif "Id@th" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'th'
kata = msg.text.replace("Id@th ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO TH----\n" + "" + result + "\n------SUKSES-----")
elif "Th@id" in msg.text:
bahasa_awal = 'th'
bahasa_tujuan = 'id'
kata = msg.text.replace("Th@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM TH----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif "Id@jp" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ja'
kata = msg.text.replace("Id@jp ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO JP----\n" + "" + result + "\n------SUKSES-----")
elif "Id@ar" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ar'
kata = msg.text.replace("Id@ar ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO AR----\n" + "" + result + "\n------SUKSES-----")
elif "Ar@id" in msg.text:
bahasa_awal = 'ar'
bahasa_tujuan = 'id'
kata = msg.text.replace("Ar@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM AR----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif "Id@ko" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ko'
kata = msg.text.replace("Id@ko ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO KO----\n" + "" + result + "\n------SUKSES-----")
elif "Ko@id" in msg.text:
bahasa_awal = 'ko'
bahasa_tujuan = 'id'
kata = msg.text.replace("Ko@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM KO----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif msg.text.lower() == 'welcome':
ginfo = cl.getGroup(msg.to)
cl.sendText(msg.to," Di Grup " + str(ginfo.name))
jawaban1 = ("Selamat Datang Di Grup " + str(ginfo.name))
cl.sendText(msg.to,"Owner Grup " + str(ginfo.name) + " :\n" + ginfo.creator.displayName )
tts = gTTS(text=jawaban1, lang='id')
tts.save('tts.mp3')
cl.sendAudio(msg.to,'tts.mp3')
elif "Say-id " in msg.text:
say = msg.text.replace("Say-id ","")
lang = 'id'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-en " in msg.text:
say = msg.text.replace("Say-en ","")
lang = 'en'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-jp " in msg.text:
say = msg.text.replace("Say-jp ","")
lang = 'ja'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-ar " in msg.text:
say = msg.text.replace("Say-ar ","")
lang = 'ar'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-ko " in msg.text:
say = msg.text.replace("Say-ko ","")
lang = 'ko'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Kapan " in msg.text:
tanya = msg.text.replace("Kapan ","")
jawab = ("kapan kapan","besok","satu abad lagi","Hari ini","Tahun depan","Minggu depan","Bulan depan","Sebentar lagi")
jawaban = random.choice(jawab)
tts = gTTS(text=jawaban, lang='id')
tts.save('tts.mp3')
cl.sendAudio(msg.to,'tts.mp3')
elif "Apakah " in msg.text:
tanya = msg.text.replace("Apakah ","")
jawab = ("Ya","Tidak","Mungkin","Bisa jadi")
jawaban = random.choice(jawab)
tts = gTTS(text=jawaban, lang='id')
tts.save('tts.mp3')
cl.sendAudio(msg.to,'tts.mp3')
elif 'Youtubemp4 ' in msg.text:
try:
textToSearch = (msg.text).replace('Youtubemp4 ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class': 'yt-uix-tile-link'})
ght = ('https://www.youtube.com' + results['href'])
cl.sendVideoWithURL(msg.to, ght)
except:
cl.sendText(msg.to, "Could not find it")
elif "ytsearch " in msg.text:
query = msg.text.replace("ytsearch ","")
with requests.session() as s:
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'http://www.youtube.com/results'
params = {'search_query': query}
r = s.get(url, params=params)
soup = BeautifulSoup(r.content, 'html5lib')
hasil = ""
for a in soup.select('.yt-lockup-title > a[title]'):
if '&list=' not in a['href']:
hasil += ''.join((a['title'],'\nUrl : http://www.youtube.com' + a['href'],'\n\n'))
cl.sendText(msg.to,hasil)
print '[Command] Youtube Search'
elif "Lirik " in msg.text:
try:
songname = msg.text.lower().replace("Lirik ","")
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'Lirik Lagu ('
hasil += song[0]
hasil += ')\n\n'
hasil += song[5]
cl.sendText(msg.to, hasil)
except Exception as wak:
cl.sendText(msg.to, str(wak))
elif "Wikipedia " in msg.text:
try:
wiki = msg.text.lower().replace("Wikipedia ","")
wikipedia.set_lang("id")
pesan="Title ("
pesan+=wikipedia.page(wiki).title
pesan+=")\n\n"
pesan+=wikipedia.summary(wiki, sentences=1)
pesan+="\n"
pesan+=wikipedia.page(wiki).url
cl.sendText(msg.to, pesan)
except:
try:
pesan="Over Text Limit! Please Click link\n"
pesan+=wikipedia.page(wiki).url
cl.sendText(msg.to, pesan)
except Exception as e:
cl.sendText(msg.to, str(e))
elif "Music " in msg.text:
try:
songname = msg.text.lower().replace("Music ","")
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'This is Your Music\n'
hasil += 'Judul : ' + song[0]
hasil += '\nDurasi : ' + song[1]
hasil += '\nLink Download : ' + song[4]
cl.sendText(msg.to, hasil)
cl.sendText(msg.to, "Please Wait for audio...")
cl.sendAudioWithURL(msg.to, song[4])
except Exception as njer:
cl.sendText(msg.to, str(njer))
elif "Image " in msg.text:
search = msg.text.replace("Image ","")
url = 'https://www.google.com/search?espv=2&biw=1366&bih=667&tbm=isch&oq=kuc&aqs=mobile-gws-lite.0.0l5&q=' + search
raw_html = (download_page(url))
items = []
items = items + (_images_get_all_items(raw_html))
path = random.choice(items)
print path
try:
cl.sendImageWithURL(msg.to,path)
except:
pass
elif "Profileig " in msg.text:
try:
instagram = msg.text.replace("Profileig ","")
response = requests.get("https://www.instagram.com/"+instagram+"?__a=1")
data = response.json()
namaIG = str(data['user']['full_name'])
bioIG = str(data['user']['biography'])
mediaIG = str(data['user']['media']['count'])
verifIG = str(data['user']['is_verified'])
usernameIG = str(data['user']['username'])
followerIG = str(data['user']['followed_by']['count'])
profileIG = data['user']['profile_pic_url_hd']
privateIG = str(data['user']['is_private'])
followIG = str(data['user']['follows']['count'])
link = "LinkNya: " + "https://www.instagram.com/" + instagram
text = "Name : "+namaIG+"\nUsername : "+usernameIG+"\nBiography : "+bioIG+"\nFollowerNya : "+followerIG+"\nFollowingNya : "+followIG+"\nPost : "+mediaIG+"\nVerified : "+verifIG+"\nPrivate : "+privateIG+"" "\n" + link
cl.sendText(msg.to, str(text))
except Exception as e:
cl.sendText(msg.to, str(e))
elif "Checkdate " in msg.text:
tanggal = msg.text.replace("Checkdate ","")
r=requests.get('https://script.google.com/macros/exec?service=AKfycbw7gKzP-WYV2F5mc9RaR7yE3Ve1yN91Tjs91hp_jHSE02dSv9w&nama=ervan&tanggal='+tanggal)
data=r.text
data=json.loads(data)
lahir = data["data"]["lahir"]
usia = data["data"]["usia"]
ultah = data["data"]["ultah"]
zodiak = data["data"]["zodiak"]
cl.sendText(msg.to,"============ I N F O R M A S I ============\n"+"Date Of Birth : "+lahir+"\nAge : "+usia+"\nUltah : "+ultah+"\nZodiak : "+zodiak+"\n============ I N F O R M A S I ============")
elif msg.text in ["Kalender","Time","Waktu"]:
timeNow = datetime.now()
timeHours = datetime.strftime(timeNow,"(%H:%M)")
day = ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday","Friday", "Saturday"]
hari = ["Minggu", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu"]
bulan = ["Januari", "Februari", "Maret", "April", "Mei", "Juni", "Juli", "Agustus", "September", "Oktober", "November", "Desember"]
inihari = datetime.today()
hr = inihari.strftime('%A')
bln = inihari.strftime('%m')
for i in range(len(day)):
if hr == day[i]: hasil = hari[i]
for k in range(0, len(bulan)):
if bln == str(k): blan = bulan[k-1]
rst = hasil + ", " + inihari.strftime('%d') + " - " + blan + " - " + inihari.strftime('%Y') + "\nJam : [ " + inihari.strftime('%H:%M:%S') + " ]"
cl.sendText(msg.to, rst)
#==============================================================================#
elif msg.text.lower() == 'ifconfig':
botKernel = subprocess.Popen(["ifconfig"], stdout=subprocess.PIPE).communicate()[0]
cl.sendText(msg.to, botKernel + "\n\n===SERVER INFO NetStat===")
elif msg.text.lower() == 'system':
botKernel = subprocess.Popen(["df","-h"], stdout=subprocess.PIPE).communicate()[0]
cl.sendText(msg.to, botKernel + "\n\n===SERVER INFO SYSTEM===")
elif msg.text.lower() == 'kernel':
botKernel = subprocess.Popen(["uname","-srvmpio"], stdout=subprocess.PIPE).communicate()[0]
cl.sendText(msg.to, botKernel + "\n\n===SERVER INFO KERNEL===")
elif msg.text.lower() == 'cpu':
botKernel = subprocess.Popen(["cat","/proc/cpuinfo"], stdout=subprocess.PIPE).communicate()[0]
cl.sendText(msg.to, botKernel + "\n\n===SERVER INFO CPU===")
elif "Restart" in msg.text:
print "[Command]Restart"
try:
cl.sendText(msg.to,"Sedang Restart...")
cl.sendText(msg.to,"Restart Success(╯3╰)")
restart_program()
except:
cl.sendText(msg.to,"Please wait")
restart_program()
pass
elif "Turn off" in msg.text:
try:
import sys
sys.exit()
except:
pass
elif msg.text.lower() == 'runtime':
eltime = time.time() - mulai
van = "Bot aktif selama "+waktu(eltime)
cl.sendText(msg.to,van)
#================================ PUY SCRIPT STARTED ==============================================#
elif "google " in msg.text:
a = msg.text.replace("google ","")
b = urllib.quote(a)
cl.sendText(msg.to,"Sedang Mencari kak...")
cl.sendText(msg.to, "https://www.google.com/" + b)
cl.sendText(msg.to,"Ketemu ψ(`∇´)ψ ^")
elif cms(msg.text,["/creator","Creator"]):
msg.contentType = 13
msg.contentMetadata = {'mid': "ufb890ffb1c03c4d49b16027968daf9cb"}
cl.sendMessage(msg)
#elif msg.text in ["puy"]:
#cl.sendText(msg.to,"Puy here")
# cl.sendText(msg.to,"Puy here")
# kk.sendText(msg.to,"Puy here")
# cl.sendText(msg.to,"Hadir semua puy!")
elif msg.text in ["Masuk","...","Join kuy"]: #Panggil Semua Bot
if msg.from_ in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
kk.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
cl.updateGroup(G)
print "Semua Sudah Lengkap"
elif msg.text in ["Puy join"]:
if msg.from_ in admin:
x = ki.getGroup(msg.to)
x.preventJoinByTicket = False
ki.updateGroup(x)
invsend = 0
Ti = ki.reissueGroupTicket(msg.to)
cl.acceptGroupInvitationByTicket(msg.to,Ti)
G = ki.getGroup(msg.to)
G.preventJoinByTicket = True
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(msg.to)
elif "Clone " in msg.text:
copy0 = msg.text.replace("Clone ","")
copy1 = copy0.lstrip()
copy2 = copy1.replace("@","")
copy3 = copy2.rstrip()
_name = copy3
group = cl.getGroup(msg.to)
for contact in group.members:
cname = cl.getContact(contact.mid).displayName
if cname == _name:
cl.CloneContactProfile(contact.mid)
cl.sendText(msg.to, "Berhasil (°∀°)b")
else:
pass
elif "friendpp: " in msg.text:
if msg.from_ in admin:
suf = msg.text.replace('friendpp: ','')
gid = cl.getAllContactIds()
for i in gid:
h = cl.getContact(i).displayName
gna = cl.getContact(i)
if h == suf:
cl.sendImageWithURL(msg.to,"http://dl.profile.line.naver.jp/"+ gna.pictureStatus)
elif "Checkmid: " in msg.text:
saya = msg.text.replace("Checkmid: ","")
msg.contentType = 13
msg.contentMetadata = {"mid":saya}
cl.sendMessage(msg)
contact = cl.getContact(saya)
cu = cl.channel.getCover(saya)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithURL(msg.to,path)
except:
pass
elif "Checkid: " in msg.text:
saya = msg.text.replace("Checkid: ","")
gid = cl.getGroupIdsJoined()
for i in gid:
h = cl.getGroup(i).id
group = cl.getGroup(i)
if h == saya:
try:
creator = group.creator.mid
msg.contentType = 13
msg.contentMetadata = {'mid': creator}
md = "Nama Grup :\n" + group.name + "\n\nID Grup :\n" + group.id
if group.preventJoinByTicket is False: md += "\n\nKode Url : Diizinkan"
else: md += "\n\nKode Url : Diblokir"
if group.invitee is None: md += "\nJumlah Member : " + str(len(group.members)) + " Orang" + "\nUndangan Yang Belum Diterima : 0 Orang"
else: md += "\nJumlah Member : " + str(len(group.members)) + " Orang" + "\nUndangan Yang Belum Diterima : " + str(len(group.invitee)) + " Orang"
cl.sendText(msg.to,md)
cl.sendMessage(msg)
cl.sendImageWithURL(msg.to,"http://dl.profile.line.naver.jp/"+ group.pictureStatus)
except:
creator = "Error"
elif msg.text in ["Friendlist"]:
contactlist = cl.getAllContactIds()
kontak = cl.getContacts(contactlist)
num=1
msgs="═════════List Friend═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Friend═════════\n\nTotal Friend : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["Memlist"]:
kontak = cl.getGroup(msg.to)
group = kontak.members
num=1
msgs="═════════List Member═════════-"
for ids in group:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Member═════════\n\nTotal Members : %i" % len(group)
cl.sendText(msg.to, msgs)
elif "Friendinfo: " in msg.text:
saya = msg.text.replace('Friendinfo: ','')
gid = cl.getAllContactIds()
for i in gid:
h = cl.getContact(i).displayName
contact = cl.getContact(i)
cu = cl.channel.getCover(i)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
if h == saya:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithURL(msg.to,path)
elif "Friendpict: " in msg.text:
saya = msg.text.replace('Friendpict: ','')
gid = cl.getAllContactIds()
for i in gid:
h = cl.getContact(i).displayName
gna = cl.getContact(i)
if h == saya:
cl.sendImageWithURL(msg.to,"http://dl.profile.line.naver.jp/"+ gna.pictureStatus)
elif msg.text in ["Friendlistmid"]:
gruplist = cl.getAllContactIds()
kontak = cl.getContacts(gruplist)
num=1
msgs="═════════ʆίςϯ ƒɾίεηδʍίδ═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.mid)
num=(num+1)
msgs+="\n═════════ʆίςϯ ƒɾίεηδʍίδ═════════\n\nTotal Friend : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["Blocklist"]:
blockedlist = cl.getBlockedContactIds()
kontak = cl.getContacts(blockedlist)
num=1
msgs="═════════List Blocked═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Blocked═════════\n\nTotal Blocked : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["Gruplist"]:
gruplist = cl.getGroupIdsJoined()
kontak = cl.getGroups(gruplist)
num=1
msgs="═════════List Grup═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.name)
num=(num+1)
msgs+="\n═════════List Grup═════════\n\nTotal Grup : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["Gruplistmid"]:
gruplist = cl.getGroupIdsJoined()
kontak = cl.getGroups(gruplist)
num=1
msgs="═════════List GrupMid═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.id)
num=(num+1)
msgs+="\n═════════List GrupMid═════════\n\nTotal Grup : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif "Grupimage: " in msg.text:
saya = msg.text.replace('Grupimage: ','')
gid = cl.getGroupIdsJoined()
for i in gid:
h = cl.getGroup(i).name
gna = cl.getGroup(i)
if h == saya:
cl.sendImageWithURL(msg.to,"http://dl.profile.line.naver.jp/"+ gna.pictureStatus)
elif "Grupname" in msg.text:
saya = msg.text.replace('Grupname','')
gid = cl.getGroup(msg.to)
cl.sendText(msg.to, "[Nama Grup : ]\n" + gid.name)
elif "Grupid" in msg.text:
saya = msg.text.replace('Grupid','')
gid = cl.getGroup(msg.to)
cl.sendText(msg.to, "[ID Grup : ]\n" + gid.id)
elif "Grupinfo: " in msg.text:
saya = msg.text.replace('Grupinfo: ','')
gid = cl.getGroupIdsJoined()
for i in gid:
h = cl.getGroup(i).name
group = cl.getGroup(i)
if h == saya:
try:
creator = group.creator.mid
msg.contentType = 13
msg.contentMetadata = {'mid': creator}
md = "Nama Grup :\n" + group.name + "\n\nID Grup :\n" + group.id
if group.preventJoinByTicket is False: md += "\n\nKode Url : Diizinkan"
else: md += "\n\nKode Url : Diblokir"
if group.invitee is None: md += "\nJumlah Member : " + str(len(group.members)) + " Orang" + "\nUndangan Yang Belum Diterima : 0 Orang"
else: md += "\nJumlah Member : " + str(len(group.members)) + " Orang" + "\nUndangan Yang Belum Diterima : " + str(len(group.invitee)) + " Orang"
cl.sendText(msg.to,md)
cl.sendMessage(msg)
cl.sendImageWithURL(msg.to,"http://dl.profile.line.naver.jp/"+ group.pictureStatus)
except:
creator = "Error"
elif "Spamtag @" in msg.text:
_name = msg.text.replace("Spamtag @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
xname = g.displayName
xlen = str(len(xname)+1)
msg.contentType = 0
msg.text = "@"+xname+" "
msg.contentMetadata ={'MENTION':'{"MENTIONEES":[{"S":"0","E":'+json.dumps(xlen)+',"M":'+json.dumps(g.mid)+'}]}','EMTVER':'4'}
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
print "Spamtag Berhasil."
elif "/Spamcontact @" in msg.text:
_name = msg.text.replace("Spamcontact @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
msg.contentType = 13
msg.contentMetadata = {'mid': "ufb890ffb1c03c4d49b16027968daf9cb',"}
cl.sendText(g.mid,"Spam")
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendText(msg.to, "Done")
print " Spammed !"
elif "playstore " in msg.text.lower():
tob = msg.text.lower().replace("playstore ","")
cl.sendText(msg.to,"Sedang Mencari kak...")
cl.sendText(msg.to,"Title : "+tob+"\nSource : Google Play\nLinknya : https://play.google.com/store/search?q=" + tob)
cl.sendText(msg.to,"Ketemu ψ(`∇´)ψ ^")
elif 'wikipedia ' in msg.text.lower():
try:
wiki = msg.text.lower().replace("wikipedia ","")
wikipedia.set_lang("id")
pesan="Title ("
pesan+=wikipedia.page(wiki).title
pesan+=")\n\n"
pesan+=wikipedia.summary(wiki, sentences=3)
pesan+="\n"
pesan+=wikipedia.page(wiki).url
cl.sendText(msg.to, pesan)
except:
try:
pesan="Teks nya kepanjangan! ketik link dibawah aja\n"
pesan+=wikipedia.page(wiki).url
cl.sendText(msg.to, pesan)
except Exception as e:
cl.sendText(msg.to, str(e))
elif "say " in msg.text.lower():
say = msg.text.lower().replace("say ","")
lang = 'id'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif msg.text in ["spam gift 25"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': 'ae3d9165-fab2-4e70-859b-c14a9d4137c4',
'PRDTYPE': 'THEME',
'MSGTPL': '8'}
msg.text = None
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
elif msg.text in ["Gcreator:inv"]:
if msg.from_ in admin:
ginfo = cl.getGroup(msg.to)
gCreator = ginfo.creator.mid
try:
cl.findAndAddContactsByMid(gCreator)
cl.inviteIntoGroup(msg.to,[gCreator])
print "success inv gCreator"
except:
pass
elif msg.text in ["Gcreator:kick"]:
if msg.from_ in admin:
ginfo = cl.getGroup(msg.to)
gCreator = ginfo.creator.mid
try:
cl.findAndAddContactsByMid(gCreator)
cl.kickoutFromGroup(msg.to,[gCreator])
print "success inv gCreator"
except:
pass
elif msg.text in ["Quote1"]:
if msg.from_ in admin:
cl.sendText(msg.to,"Jangan pernah menganggap dirimu hebat tetaplah rendah hati, karena dengan seperti itu rezeki akan menghampiri kita.\nDunia ini terlalu indah bila hanya dihabiskan untuk membenci seseorang, akan menjadi indah bila saling melengkapi.\nJangan menunggu seseorang untuk menyelesaikan masalahmu, jadilah dewasa dengan menyelesaikannya sendiri segera.\nJika kita mensyukuri apa yang kita miliki, kita telah selangkah sampai pada kebahagiaan.")
elif msg.text in ["Quote2"]:
if msg.from_ in admin:
cl.sendText(msg.to,"Orang yang tulus mencintaimu adalah orang yang akan membuat kamu mengeluarkan air mata bahagia \nUntuk membuat sebuah senyuman itu tidaklah mudah, tapi untuk membuat satu tangisan sangatlah mudah\nHidup ini tak akan pernah lepas dari cinta, karna kita lahir dan di besarkan karena cinta \nKebersamaan itu penting bukan hanya menjaga silahturahmi, tapi harus menghasilkan sesuatu yang lebih baik")
elif msg.text in ["Quote3"]:
if msg.from_ in admin:
cl.sendText(msg.to,"Dengan kekuranganmu kamu juga melihat, siapa sebenarnya yang menerimamu apa adanya, bukan ada apanya \nMasa lalu tak perlu dilupakan, tapi jadikan itu semua pelajaran untuk masa depanmu \nKeyakinan akan menciptakan kenyataan yang sebenarnya \nCintailah orang tua dengan sunguh-sungguh, karena cinta orang tua tak pernah menghadirkan airmata kesedihan")
elif msg.text in ["Quote4"]:
if msg.from_ in admin:
cl.sendText(msg.to,"Masalalu bukan untuk diratapi terus, masalalu ada untuk kita jadikan pelajaran berharga \nKebahagiaan datang dari hati yang tulus, bukan dari senyum yang dipaksakan.\nSelalu ada pelajaran dibalik setiap kejadian. Jangan anggap masalah sebagai musibah, tapi carilah hikmah dan jangan menyerah.\nKetika luka buatmu tak mampu berkata, sebuah pelukan akan buatmu temukan tawa. Karena pelukan mampu berkata tanpa perlu bersuara.")
elif msg.text in ["Quote5"]:
if msg.from_ in admin:
cl.sendText(msg.to,"Sahabat sejati sulit tuk ditemukan. Jangan sia-siakan sahabat yang ada dalam hidupmu.\nHati itu memang hanya ada satu. Maka jika memang ingin memberi dengan utuh, ya berilah pada satu orang juga; bukan beberapa. \nHanya orang kecil yang berupaya mengecilkan Anda agar dia merasa besar. Sabarlah. Perilaku orang kecil memang begitu.")
elif msg.text in ["Quote6"]:
if msg.from_ in admin:
cl.sendText(msg.to,"Pada suatu titik,\nKamu hanya perlu\nmembiarkan pergi\napa yang kamu\n pikir harus terjadi\n dan nikmatilah apa yang sedang terjadi")
elif msg.text in ["Quote7"]:
if msg.from_ in admin:
cl.sendText(msg.to,"hal indah akan terjadi\nbila kamu menjauhkan\ndiri dari pikiran negatif")
elif msg.text in ["Quote8"]:
if msg.from_ in admin:
cl.sendText(msg.to,"kita punya lebih banyak\nalasan untuk bersyukur\n daripada mengeluh")
elif msg.text in ["Quote9"]:
if msg.from_ in admin:
cl.sendText(msg.to,"kesungguh-sungguhan\ntidak pernah tidak berujung\nseperti berjalan dipantai")
elif msg.text in ["Quote10"]:
if msg.from_ in admin:
cl.sendText(msg.to,"Hidup akan terasa lebih ringan\nketika kamu tertawa :)")
elif msg.text in ["Woy","woy","Woi","woi","bot","Bot","Quote Spesial","Quote"]:
quote = ['Memang kenangan sulit untuk dilupakan, tetapi \nbukan menjadikan itu sebagai alasan untuk terus \nterpuruk.\n~Kakashi Hatake','Sekalipun jika kita tidak bisa bahagia, \nkita harus bisa selalu bersemangat.\n~Naruto Uzumaki','Harap ku hanya satu, dengarkan nasihat dari ibumu.\n~Minato Namikaze','Makin lama waktu yg kamu habiskan bersama seseorang,\nmakin kuat hubungan antara kalian berdua\n~Orochimaru','Hidup hanya sekali, \ntak perlu memilih jalan yang tak mungkin.\n~Sarutobi Hiruzen','Saat kau mengenal kasih sayang, \nkau juga menanggung resiko kebencian.\n~Uchiha Itachi','Mereka yang bersedia memaafkan diri mereka sendiri\ndan berani menerima kenyataanlah yang disebut Kuat!\n~Uchiha Itachi','Jika dia tak bisa menerima keburukanmu, \ndia tak pantas tuk mendapatkan yang terbaik darimu.\n~Obito Uchiha','Tanpa kerja keras dan konsistensi yang luar biasa, \nmimpi selamanya hanya akan menjadi sebuah mimpi\n~Rock Lee','Ada tiga hal yang tetap dalam hidup:\nperubahan, pilihan, dan prinsip\n~Jiraiya','Tulislah hal terbaik yang kau dengar.\nHafalkan hal terbaik yang kau tulis.\nBicaralah dengan hal terbaik yang kau hafal.\n~Kakashi Hatake','Jangan bergantung kepada siapapun di dunia ini,\nbahkan bayanganmu sendiri akan meninggalkanmu\ndidalam kegelapan.\n~Sasuke Uchiha','Senyuman yang paling indah adalah senyuman yang lahir\nbukan dari keterpaksaan,\ntetapi lahir dari dasar hati\n~Naruto Uzumaki','Kita harus percaya tidak ada yang sia-sia\ndari apa yang kita lakukan dalam proses pencapaian tujuan\n~Madara Uchiha','Setiap impian dan keinginan\nitu butuh perjuangan dan pengorbanan\n~Madara Uchiha','Rasa sayang tidak akan membuatmu bisa memaafkan\nseseorang semudah itu\n~Nagato','Tanpa arah dan tujuan\ntidak ada gunanya kau hidup di dunia ini\n~Might Guy','Manusia itu sama dengan Pedang!\nKalau tak diasah maka dia akan tumpul\n~Orochimaru','Manusia tak kan pernah bisa menang\ndari rasa kesepian\n~Gaara','Lubang di hatimu adalah kekosongan\nyang akan diisi oleh orang lain\n~Naruto Uzumaki','Penyesalan itu hal yang lucu.\nitu akan datang setelah fakta berlalu\n~Obito Uchiha','Menderita agar anaknya bisa hidup lebih baik\nadalah tugas seorang ayah\n~Minato Namikaze','Aku harus percaya pada diriku sendiri,\npercaya bahwa aku adalah orang yang mereka percaya\n~Naruto Uzumaki','Seni itu adalah sesuatu yang rapuh,\nyang menghilang dalam sekejap\n~Deidara','Seni adalah ledakan\n~Deidara(tukang bom)','Kau lemah, kenapa kau lemah???\nSoalnya kurang sesuatu, yaitu kebencian\n~Itachi Uchiha','Kau gagal tetapi masih bisa mampu bangkit kembali,\nkarena itu menurutku arti dari kuat yang sebenarnya\~Neji Hyuga','Seseorang akan menjadi kuat\napabila melindungi seseorang yang dicintainya\n~Haku','Jalan hidup seorang murid adalah warisan dan estimasi dari sang guru\n~Jiraiya','Ketika seseorang terluka,\nmereka akan belajar untuk membeci\n~Jiraiya','Tidak semua mimpi dan harapan akan terwujud sesuai dengan keinginan kita\n~Orochimaru','Untuk mencapai tujuan akhirmu,\nkamu harus bersabar\n~Obito Uchiha','Lelaki manjadi semakin kuat saat ditolak\n~Jiraiya','Keyakinan lebih baik daripada rencanan apapun\n~Nagato','Teman itu adalah orang yang menyelamatkan dari neraka yang bernama kesepian\n~Naruto Uzumaki','Aku pernah kehilangan semuanya,\naku tak mau lagi melihat teman yang berharga bagiku tewas di hadapanku\n~Sasuke Uchiha','Satu-satunya yang harus dilakukan orang tua adalah mempercayai anaknya.\nitu saja… Dan dari situlah nilai sesungguhnya\n~Minato Namikaze','Aku tidak takut mati..\ntapi aku takut jika tidak bisa melakukan apa apa..\naku rela mati demi dia\n~Sakura Haruno','Kebohongan juga membuat kau tak bisa tahu siapa dirimu\n~Itachi Uchiha','Jangan pernah takut mengungkapkan perasaanmu pada orang lain\n~Asuma Sarutobi','Senyummu yang menyelamatkanku.\nItulah mengapa aku tidak takut mati untuk melindungimu.\nKarena Aku Mencintaimu\n~Hinata Hyuga','Bisakah kau mendengarkan ketika\nseseorang menuangkan perasaan dari hatinya\n~Sakura Haruno','Memaafkan Adalah Kunci\nUntuk memutuskan Rantai Kebencian\n~Jiraiya']
psn = random.choice(quote)
cl.sendText(msg.to,psn)
elif 'lirik ' in msg.text.lower():
try:
songname = msg.text.lower().replace('lirik ','')
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'Lirik Lagu ('
hasil += song[0]
hasil += ')\n\n'
hasil += song[5]
cl.sendText(msg.to, hasil)
except Exception as wak:
cl.sendText(msg.to, str(wak))
elif "Getcover @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Getcover @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
cl.sendImageWithURL(msg.to, path)
except:
pass
print "[Command]dp executed"
elif "idline: " in msg.text:
msgg = msg.text.replace('idline: ','')
conn = cl.findContactsByUserid(msgg)
if True:
msg.contentType = 13
msg.contentMetadata = {'mid': conn.mid}
cl.sendText(msg.to,"http://line.me/ti/p/~" + msgg)
cl.sendMessage(msg)
elif "reinvite" in msg.text.split():
if msg.toType == 2:
group = cl.getGroup(msg.to)
if group.invitee is not None:
try:
grCans = [contact.mid for contact in group.invitee]
cl.findAndAddContactByMid(msg.to, grCans)
cl.cancelGroupInvitation(msg.to, grCans)
cl.inviteIntoGroup(msg.to, grCans)
except Exception as error:
print error
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"No Invited")
else:
cl.sendText(msg.to,"Error")
else:
pass
elif msg.text.lower() == 'runtime':
eltime = time.time() - mulai
van = "Bot sudah berjalan selama ╮(╯▽╰)╭ "+waktu(eltime)
cl.sendText(msg.to,van)
elif msg.text in ["Restart"]:
cl.sendText(msg.to, "Bot has been restarted")
restart_program()
print "@Restart"
elif msg.text in ["time"]:
timeNow = datetime.now()
timeHours = datetime.strftime(timeNow,"(%H:%M)")
day = ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday","Friday", "Saturday"]
hari = ["Minggu", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu"]
bulan = ["Januari", "Februari", "Maret", "April", "Mei", "Juni", "Juli", "Agustus", "September", "Oktober", "November", "Desember"]
inihari = datetime.today()
hr = inihari.strftime('%A')
bln = inihari.strftime('%m')
for i in range(len(day)):
if hr == day[i]: hasil = hari[i]
for k in range(0, len(bulan)):
if bln == str(k): blan = bulan[k-1]
rst = hasil + ", " + inihari.strftime('%d') + " - " + blan + " - " + inihari.strftime('%Y') + "\nJam : [ " + inihari.strftime('%H:%M:%S') + " ]"
client.sendText(msg.to, rst)
elif "image " in msg.text:
search = msg.text.replace("image ","")
url = 'https://www.google.com/search?espv=2&biw=1366&bih=667&tbm=isch&oq=kuc&aqs=mobile-gws-lite.0.0l5&q=' + search
raw_html = (download_page(url))
items = []
items = items + (_images_get_all_items(raw_html))
path = random.choice(items)
print path
try:
cl.sendImageWithURL(msg.to,path)
except:
pass
elif 'instagram ' in msg.text.lower():
try:
instagram = msg.text.lower().replace("instagram ","")
html = requests.get('https://www.instagram.com/' + instagram + '/?')
soup = BeautifulSoup(html.text, 'html5lib')
data = soup.find_all('meta', attrs={'property':'og:description'})
text = data[0].get('content').split()
data1 = soup.find_all('meta', attrs={'property':'og:image'})
text1 = data1[0].get('content').split()
user = "Name: " + text[-2] + "\n"
user1 = "Username: " + text[-1] + "\n"
followers = "Followers: " + text[0] + "\n"
following = "Following: " + text[2] + "\n"
post = "Post: " + text[4] + "\n"
link = "Link: " + "https://www.instagram.com/" + instagram
detail = "**INSTAGRAM INFO USER**\n"
details = "\n**INSTAGRAM INFO USER**"
cl.sendText(msg.to, detail + user + user1 + followers + following + post + link + details)
cl.sendImageWithURL(msg.to, text1[0])
except Exception as njer:
cl.sendText(msg.to, str(njer))
elif msg.text in ["Attack"]:
msg.contentType = 13
msg.contentMetadata = {'mid': "ufb890ffb1c03c4d49b16027968daf9cb',"}
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
elif msg.text.lower() == '.....':
msg.contentType = 13
msg.contentMetadata = {'mid': "ufb890ffb1c03c4d49b16027968daf9cb',"}
cl.sendMessage(msg)
cl.sendText(msg.to,"Sepi bat dah(╥_╥)")
#=================================PUY SCRIPT FINISHED =============================================#
elif "Ban @" in msg.text:
if msg.toType == 2:
_name = msg.text.replace("Ban @","")
_nametarget = _name.rstrip()
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,_nametarget + " Not Found")
else:
for target in targets:
try:
wait["blacklist"][target] = True
cl.sendText(msg.to,_nametarget + " Succes Add to Blacklist")
except:
cl.sendText(msg.to,"Error")
elif "Unban @" in msg.text:
if msg.toType == 2:
_name = msg.text.replace("Unban @","")
_nametarget = _name.rstrip()
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,_nametarget + " Not Found")
else:
for target in targets:
try:
del wait["blacklist"][target]
cl.sendText(msg.to,_nametarget + " Delete From Blacklist")
except:
cl.sendText(msg.to,_nametarget + " Not In Blacklist")
elif "Ban:" in msg.text:
nk0 = msg.text.replace("Ban:","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,_name + " Succes Add to Blacklist")
except:
cl.sendText(msg.to,"Error")
elif "Unban:" in msg.text:
nk0 = msg.text.replace("Unban:","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
del wait["blacklist"][target]
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,_name + " Delete From Blacklist")
except:
cl.sendText(msg.to,_name + " Not In Blacklist")
elif msg.text in ["Clear"]:
wait["blacklist"] = {}
cl.sendText(msg.to,"Blacklist Telah Dibersihkan")
elif msg.text in ["Ban:on"]:
wait["wblacklist"] = True
cl.sendText(msg.to,"Send Contact")
elif msg.text in ["Unban:on"]:
wait["dblacklist"] = True
cl.sendText(msg.to,"Send Contact")
elif msg.text in ["Banlist"]:
if wait["blacklist"] == {}:
cl.sendText(msg.to,"Tidak Ada Blacklist")
else:
cl.sendText(msg.to,"Daftar Banlist")
num=1
msgs="*Blacklist*"
for mi_d in wait["blacklist"]:
msgs+="\n[%i] %s" % (num, cl.getContact(mi_d).displayName)
num=(num+1)
msgs+="\n*Blacklist*\n\nTotal Blacklist : %i" % len(wait["blacklist"])
cl.sendText(msg.to, msgs)
elif msg.text in ["Conban","Contactban","Contact ban"]:
if wait["blacklist"] == {}:
cl.sendText(msg.to,"Tidak Ada Blacklist")
else:
cl.sendText(msg.to,"Daftar Blacklist")
h = ""
for i in wait["blacklist"]:
h = cl.getContact(i)
M = Message()
M.to = msg.to
M.contentType = 13
M.contentMetadata = {'mid': i}
cl.sendMessage(M)
elif msg.text in ["Midban","Mid ban"]:
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
num=1
cocoa = "══════════List Blacklist═════════"
for mm in matched_list:
cocoa+="\n[%i] %s" % (num, mm)
num=(num+1)
cocoa+="\n═════════List Blacklist═════════\n\nTotal Blacklist : %i" % len(matched_list)
cl.sendText(msg.to,cocoa)
elif msg.text.lower() == 'scan blacklist':
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
cl.sendText(msg.to,"Tidak ada Daftar Blacklist")
return
for jj in matched_list:
try:
cl.kickoutFromGroup(msg.to,[jj])
print (msg.to,[jj])
except:
pass
#==============================================#
if op.type == 17:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
if wait["protect"] == True:
if wait["blacklist"][op.param2] == True:
try:
cl.kickoutFromGroup(op.param1,[op.param2])
G = cl.getGroup(op.param1)
G.preventJoinByTicket = True
cl.updateGroup(G)
except:
try:
cl.kickoutFromGroup(op.param1,[op.param2])
G = cl.getGroup(op.param1)
G.preventJoinByTicket = True
cl.updateGroup(G)
except:
pass
if op.type == 19:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["protect"] == True:
wait ["blacklist"][op.param2] = True
cl.kickoutFromGroup(op.param1,[op.param2])
cl.inviteIntoGroup(op.param1,[op.param2])
if op.type == 13:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["inviteprotect"] == True:
wait ["blacklist"][op.param2] = True
cl.kickoutFromGroup(op.param1,[op.param2])
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["inviteprotect"] == True:
wait ["blacklist"][op.param2] = True
cl.cancelGroupInvitation(op.param1,[op.param3])
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["cancelprotect"] == True:
wait ["blacklist"][op.param2] = True
cl.cancelGroupInvitation(op.param1,[op.param3])
if op.type == 11:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["linkprotect"] == True:
wait ["blacklist"][op.param2] = True
G = cl.getGroup(op.param1)
G.preventJoinByTicket = True
cl.updateGroup(G)
cl.kickoutFromGroup(op.param1,[op.param2])
# if op.type == 5:
# if wait["autoAdd"] == True:
# if (wait["message"] in [""," ","\n",None]):
# pass
# else:
# cl.sendText(op.param1,str(wait["message"]))
if op.type == 11:
if wait["linkprotect"] == True:
if op.param2 not in Bots:
G = cl.getGroup(op.param1)
G.preventJoinByTicket = True
cl.kickoutFromGroup(op.param1,[op.param3])
cl.updateGroup(G)
if op.type == 17:
if op.param2 in Bots:
return
ginfo = cl.getGroup(op.param1)
random.choice(KAC).sendText(op.param1,"Selamat Datang di Indomaret.. Selamat belanja..")
print "MEMBER HAS JOIN THE GROUP"
if op.type == 15:
if op.param2 in Bots:
return
random.choice(KAC).sendText(op.param1,"terima kasih sudah berkunjung di indomaret..")
print "MEMBER HAS LEFT THE GROUP"
#------------------------------------------------------------------------------#
if op.type == 55:
try:
if op.param1 in wait2['readPoint']:
if op.param2 in wait2['readMember'][op.param1]:
pass
else:
wait2['readMember'][op.param1] += op.param2
wait2['ROM'][op.param1][op.param2] = op.param2
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
else:
pass
except:
pass
if op.type == 59:
print op
except Exception as error:
print error
def autolike():
count = 1
while True:
try:
for posts in cl.activity(1)["result"]["posts"]:
if posts["postInfo"]["liked"] is False:
if wait["likeOn"] == True:
cl.like(posts["userInfo"]["writerMid"], posts["postInfo"]["postId"], 1001)
print "Like"
if wait["commentOn"] == True:
if posts["userInfo"]["writerMid"] in wait["commentBlack"]:
pass
else:
cl.comment(posts["userInfo"]["writerMid"],posts["postInfo"]["postId"],wait["comment"])
except:
count += 1
if(count == 50):
sys.exit(0)
else:
pass
thread2 = threading.Thread(target=autolike)
thread2.daemon = True
thread2.start()
def likefriend():
for zx in range(0,20):
hasil = cl.activity(limit=20)
if hasil['result']['posts'][zx]['postInfo']['liked'] == False:
try:
cl.like(hasil['result']['posts'][zx]['userInfo']['mid'],hasil['result']['posts'][zx]['postInfo']['postId'],likeType=1001)
print "Like"
except:
pass
else:
print "Already Liked Om"
time.sleep(0.60)
def likeme():
for zx in range(0,20):
hasil = cl.activity(limit=20)
if hasil['result']['posts'][zx]['postInfo']['liked'] == False:
if hasil['result']['posts'][zx]['userInfo']['mid'] in mid:
try:
cl.like(hasil['result']['posts'][zx]['userInfo']['mid'],hasil['result']['posts'][zx]['postInfo']['postId'],likeType=1002)
print "Like"
except:
pass
else:
print "Status Sudah di Like Om"
while True:
try:
Ops = cl.fetchOps(cl.Poll.rev, 5)
except EOFError:
raise Exception("It might be wrong revision\n" + str(cl.Poll.rev))
for Op in Ops:
if (Op.type != OpType.END_OF_OPERATION):
cl.Poll.rev = max(cl.Poll.rev, Op.revision)
bot(Op)
| [
"noreply@github.com"
] | noreply@github.com |
bdeef222539d05e8198b4ec62cdb482012b69652 | cd08794c5ccdae4f0c9260ba537c2c2f11f5658b | /jq/jq_spider.py | 4513d09576491b776bc6dc299148bed6696ad441 | [] | no_license | Biking0/spider_project | d0b1d5443070240d8c28d8db470de78323f7134c | 8c8c874cea90684f255011e4ecf03aa9cd10a0f0 | refs/heads/master | 2022-01-28T07:53:26.128321 | 2019-06-01T08:17:19 | 2019-06-01T08:17:19 | 189,704,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,038 | py | # coding=utf-8
import time, logging, requests, json, urllib, re, redis, random, sys, traceback, settings
from datetime import datetime, timedelta
from urllib.parse import urlparse, parse_qs, urlencode
# from urlparse import urlparse, parse_qs
from lxml import etree
from fake_useragent import UserAgent
from utils.genProxy import genProxy
# from utils.set_invalid import set_invalid
from utils.process_cookies.cookies_generator import get_cookies
import urllib3
from utils import pubUtil, timeUtil, dataUtil
# from utils.mac_address import get_mac_address
# logging基本配置
logging.basicConfig(
# filename='jq-spider-api.log', filemode="w",
level=logging.INFO,
format="[%(asctime)s] %(name)s:%(levelname)s: %(message)s"
)
class JQSpider:
def __init__(self, name, num, proxy, local):
# self.ua = UserAgent()
self.city_airport = self._city_airport()
self.now = 0
self.session = requests.session()
self.start_url = 'https://booking.jetstar.com/sg/zh/booking/search-flights?'
self.task = []
self.name = name
self.num = num
# self.dynamic = True if dynamic else False
# self.proxy = True if proxy and not dynamic else False
self.buffer = []
self.st_time = timeUtil.change_to_int('07:30:00')
self.en_time = timeUtil.change_to_int('22:00:00')
# self.genProxy = genProxy() if self.proxy else ''
self.genProxy = genProxy()
self.cookie_time = 0
# self.refreshCookies()
self.item_num = 0
self.db = redis.Redis('116.196.83.53', port=6379, db=1)
self.version = 1.4
self.ip_sleep = 0
self.local = local
urllib3.disable_warnings()
def refreshCookies(self):
content = None
try:
if self.db.llen('jq_cookies') <= 0:
content = get_cookies()
# else:
# content = self.db.lpop('jq_cookies')
except Exception as e:
# print('55', e)
content = get_cookies()
finally:
dict_cookies = json.loads(content)
self.bm_sz = 'bm_sz=' + dict_cookies.get('bm_sz')
self.ak_bmsc = 'ak_bmsc=' + dict_cookies.get('ak_bmsc')
# self.ASP_NET_SessionId = 'ASP.NET_SessionId=' + dict_cookies.get('ASP.NET_SessionId')
# print 'bmsz', self.bm_sz
# print 'ak_bmsc', self.ak_bmsc
# print 'ASP.NET_SessionId', self.ASP_NET_SessionId
logging.info('got new cookie')
# dict_cookies.pop('ASP.NET_SessionId')
final_cookies = requests.utils.cookiejar_from_dict(dict_cookies, cookiejar=None, overwrite=True)
self.session.cookies.update(final_cookies)
# 处理url
@property
def start_request(self):
result_iter = None
# 需要加查询判断
while True:
# if not timeUtil.time_is_valid(self.st_time, self.en_time):
# logging.info('Waiting to 07:30:00.....')
# time.sleep(5 * 60)
# continue
# data_api = 'http://dx.redis.jiaoan100.com/buddha/gettask?carrier=JX'
data_api = 'http://task.jiaoan100.com/buddha/gettask?carrier=jx'
try:
if self.local:
if not result_iter:
result_iter = pubUtil.get_task('JQ', days=10)
result = next(result_iter)
else:
result = json.loads(requests.get(data_api, timeout=60).text).get('data')
except Exception as e:
logging.error(e)
result = None
if result is None:
logging.info('Date is None!')
logging.info('Waiting...')
time.sleep(16)
continue
airports, _day, day_num = result[0].split(':')
# day_num='1'
# print('airports, _day, day_num',airports, _day, day_num)
FROM, TO = airports.split('-')
# FROM, TO = ('DAD', 'HKG')
_day = re.sub(r'(\d{4})(\d{2})(\d{2})', r'\1-\2-\3', _day)
days = self._get_dates(_day, day_num)
# print(days)
# days = ['2019-01-11', '2019-01-12', '2019-01-13']
for day in days:
# FROM, TO, day = 'RGN', 'SIN', '2019-01-17'
query = urlencode({
'origin1': FROM,
'destination1': TO,
# 'flight-type': '1',
'departuredate1': day,
'adults': str(settings.ADULT_NUM),
'children': '0',
'infants': '0',
})
print(query)
# set_invalid('JX', FROM, TO, day)
total_url = self.start_url + query
# 设置无效
invalid = {
'date': day.replace('-', ''),
'depAirport': FROM,
'arrAirport': TO,
'mins': settings.INVALID_TIME
}
# total_url = 'https://www.jetstar.com/au/en/home?origin=CBR&destination=HNL&flight-type=1&selected-departure-date=02-02-2019&adult=1&flexible=1¤cy=AUD'
# yield total_url,invalid
yield [total_url, invalid]
# 请求页面
def spider_worker(self, task):
url = task[0]
invalid = task[1]
# 解析url
result = parse_qs(urlparse(url).query)
FROM = result.get('origin1')[0]
TO = result.get('destination1')[0]
response = None
# try:
bm_sz = 'bm_sz=8FDDAD1500BB3181E007312084B74DA7~QAAQj+I+Fzus4t5nAQAACobLVLvxdiuzn063pNBFkTVgOPQsHzs06YJZFARyCeRdJ4OW1yMTQ6YZZ2KvYv0RGyJrd7irytTbRAKy4DPJf2FR3bV2+Jbl6azq9ffviB7OT/4PCwV+Wo5KWStfFY4PYePeDAdpwHNyJvDddWXmScoVlyjZu6iFn+ff9reRbCd4'
ak_bmsc = 'ak_bmsc=C0F93DC841F28198100D2E40067EDBAC173EE28F6F5A0000E2AA3E5C93B0C105~plmMZfVTVea4qlzoPlFKLl0JkkWVWIzJCizVuAJtNbqiAz1q3I+qfoNCCCkFwTFwPMYcyf72MggquEHzDTExDlhBtlHUp/QpM2HxFAVbkUFlV2ruGnUAg2KOvSRDs9Krfoci21iS98FZKfl/xaWQKABFi08wDORmmu/KsdJrsvDF7rsacdDGvjm/cZoh41w+zkYmrrBN5StLBRwL4e4vuTFOTYgerIGpxGAEqOEz4wxwKKrLVePd3D7tXDrY/fkHsp'
session = 'ASP.NET_SessionId=ekkha1fufcilv3fhdgbmricf'
# bm_sz = self.bm_sz
# ak_bmsc = self.ak_bmsc
ua = UserAgent()
headers_302 = {
# 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36',
'User-Agent': ua.random,
# self.ua.random,
# 'referer': ('https://www.google.com/travel/clk/f?t=%s' % int(time.time() * 1000)),
'referer': 'https://www.jetstar.com/au/en/home?origin=SYD&destination=NRT&flight-type=1&selected-departure-date=01-02-2019&adult=1&flexible=1¤cy=AUD',
# 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
# 'accept-encoding': 'gzip, deflate, br',
# 'accept-language': 'zh-CN,zh;q=0.9',
# 'cookie': bm_sz + ';' + ak_bmsc
}
# print 'headers_302', headers_302
headers_data = {
# 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36',
'User-Agent': ua.random,
# self.ua.random,
# 'referer': ('https://www.google.com/travel/clk/f?t=%s' % int(time.time() * 1000)),
'referer': 'https://www.jetstar.com/au/en/home?origin=SYD&destination=NRT&flight-type=1&selected-departure-date=01-02-2019&adult=1&flexible=1¤cy=AUD',
# 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
# 'accept-encoding': 'gzip, deflate, br',
# 'accept-language': 'zh-CN,zh;q=0.9',
# 'cookie': self.bm_sz + ';' + self.ak_bmsc + ';' + self.ASP_NET_SessionId
}
# if self.dynamic:
# (proxies, new_headers) = self.genProxy.genProxy()
# headers.update(new_headers)
# elif self.proxy:
# ip_port = self.genProxy.getHttpProxy()
# proxies = {"http": "http://" + ip_port, "https": "https://" + ip_port}
# else:
# proxies=''
# url = 'https://booking.jetstar.com/au/en/booking/search-flights?origin1=SYD&destination1=NRT&departuredate1=2019-02-01&adults=1&children=0&infants=0&AutoSubmit=Y¤cy=AUD'
# url = 'https://booking.jetstar.com/au/en/booking/search-flights?origin1=SYD&destination1=BNK&departuredate1=2019-02-02&adults=1&children=0&infants=0&AutoSubmit=Y¤cy=AUD'
# url = 'https://www.jetstar.com/au/en/home?origin=SYD&destination=BNK&flight-type=2&selected-departure-date=07-02-2019&adult=1&flexible=1¤cy=AUD'
#
# ip_port = genProxy().getHttpProxy()
# ip_port='ZLy5cF:XkzCmz@181.177.84.107:9852'
# ip_port = 'lum-customer-zhanghua-zone-static-country-us:latr6o1y65t3@zproxy.lum-superproxy.io:22225'
# proxies = {"http": "http://" + ip_port, "https": "https://" + ip_port}
# proxies = {"https": "https://" + ip_port}
response = self.session.get(url, headers=headers_302, timeout=30, verify=False,
allow_redirects=False)
# response = self.session.get(url, headers=headers_302, timeout=100, verify=False)
# print('130', response)
# url_302 = 'https://booking.jetstar.com/cn/zh/booking/select-flights'
url_302 = 'https://booking.jetstar.com/au/en/booking/select-flights'
proxies = {'http': 'http://localhost:8080'}
response = self.session.get(url_302, headers=headers_data, timeout=30, verify=False,
allow_redirects=False)
if response.status_code == 403:
logging.info('Access Denied!')
self.refreshCookies()
self.ip_sleep += 1
if self.ip_sleep > 5:
logging.info('# sleep 60s')
time.sleep(60)
self.ip_sleep = 0
return
self.ip_sleep = 0
# print('134', response)
self.parse(response.text, FROM, TO, invalid)
# except IndexError:
# if response.content.lower().find('<title>access denied</title>') != -1:
# logging.info('Access Denied!')
# self.refreshCookies()
# # if not self.dynamic and not self.proxy:
# # self.genProxy.getHttpProxy(True)
# self.spider_worker(url)
# return
# # traceback.print_exc()
# logging.info(url)
# logging.info("%s->%s no data,passed" % (FROM, TO))
# except Exception as e:
# # logging.info("%s->%s,%s,%s failed,try again" % (FROM, TO, 'requests.exceptions.Timeout',url))
# # traceback.print_exc()
# print e
# if not self.dynamic and self.proxy:
# self.genProxy.getHttpProxy(True)
# # self.refreshCookies()
# self.spider_worker(url)
# 解析页面数据
def parse(self, response, FROM, TO, invalid):
# logging.info('success get data!')
from_city = self.city_airport.get(FROM, FROM)
to_city = self.city_airport.get(TO, TO)
html = etree.HTML(response)
# f = open('123.txt', 'w')
# f.write(html)
# f.close()
# try:
currency = html.xpath('//div[@id="datalayer-data"]/@data-currency-code')[0]
# currency = html.xpath('//div[@id="datalayer-data"]/@data-currency-code')
# except Exception as e:
# print e
# print html.xpath('//div')[0].text
# print 226, html.xpath('//div[@id="tab-economy-SYD-NRT"]')
try:
eco_div = html.xpath('//div[@id="economy-%s-%s"]' % (FROM, TO))[0]
except:
return
# print 'eco: ', eco_div
display_div = eco_div.xpath('.//div[@class=" display-currency-%s"]/div[@class="row"]' % currency)[1:]
# row = [leg for leg in display_div if not leg.xpath('.//div[contains(@class, "fare__details--leg-1")]')]
row = [leg for leg in display_div]
for div in row:
# 忽略航班已售完的情况
try:
item = dict()
# try:
seats = div.xpath('.//span[@class="hot-fare"]')
if len(seats) == 0:
maxSeats = 9
else:
maxSeats = seats[0].text.split(' ')[0]
# 无航班
flight_info = div.xpath('.//div[@class="price-select__button"]/input/@data-price-breakdown')
if len(flight_info) == 0:
logging.info('# no flight')
self.task.append(invalid)
return
dataPrice = div.xpath('.//div[@class="price-select__button"]/input/@data-price-breakdown')[0]
dataPriceJson = json.loads(dataPrice)
adultPrice = round(float(dataPriceJson.get('TotalAmountDue')), 2) // settings.ADULT_NUM
adultPrice = float(dataPriceJson.get('TotalAmountDue')) // settings.ADULT_NUM
netFare = round(float(dataPriceJson.get('TotalFare')), 2) // settings.ADULT_NUM
depTime = div.xpath('.//div[@class="price-select__button"]/input/@data-departure-time')[0] # 出发时间
arrTime = div.xpath('.//div[@class="price-select__button"]/input/@data-arrival-time')[0] # 到达时间
flightNumber = div.xpath('.//div[@class="price-select__button"]/input/@data-flightnumber')[0] # 航班号
# 中转
if '-' in flightNumber:
logging.info('# is change')
continue
timegroup_str = div.xpath('.//div[@class="price-select__button"]/input/@id')[0]
timegroup = re.findall(r'(\d{2}/\d{2}/\d{4} \d{2}:\d{2})', timegroup_str)
depTimeStamp = time.mktime(time.strptime(timegroup[0], "%m/%d/%Y %H:%M")).__int__() # 出发时间
arrTimeStamp = time.mktime(time.strptime(timegroup[1], "%m/%d/%Y %H:%M")).__int__() # 出发时间
item.update(dict(
adultPrice=adultPrice,
netFare=netFare,
depTime=depTimeStamp,
arrTime=arrTimeStamp,
flightNumber=flightNumber,
depAirport=FROM, # 出发机场
arrAirport=TO, # 到达机场
cabin='ECO',
currency=currency,
fromCity=from_city,
toCity=to_city,
maxSeats=maxSeats,
isChange=1,
segments='[]',
getTime=time.mktime(datetime.now().timetuple()).__int__(),
))
item.update(dict(
adultTax=item["adultPrice"] - item["netFare"], # 税
carrier=item["flightNumber"][:2],
))
# except Exception as e:
# adultPrice = 0
# netFare = 0
# maxSeats = 0
# flightNumberTag = \
# div.xpath(
# './/div[contains(@class, "flight-info__flightNubmer")]/div[@class="medium-11"]/strong')[0]
# flightNumber = flightNumberTag.text
# depTimeTag = div.xpath('.//strong[@class="depaturestation"]')[0]
# arrTimeTag = div.xpath('.//strong[@class="arrivalstation"]')[0]
# depTimeContent = re.split(r'[\s\,\;\n\t]+', depTimeTag.text)
# arrTimeContent = re.split(r'[\s\,\;\n\t]+', arrTimeTag.text)
# depDateStr = ' '.join(depTimeContent[1:-1])
# arrDateStr = ' '.join(arrTimeContent[1:-1])
# depTimeStamp = time.mktime(time.strptime(depDateStr, "%A %d %B %Y %I:%M%p")).__int__()
# arrTimeStamp = time.mktime(time.strptime(arrDateStr, "%A %d %B %Y %I:%M%p")).__int__()
# print e
# continue
# finally:
# print(item)
self.process_item(item)
except:
print(FROM + '-->' + TO)
traceback.print_exc()
# 入库
def process_item(self, item):
self.buffer.append(item)
if len(self.buffer) >= 5:
# # 测试库
# url = '%scarrier=%s' % (settings.PUSH_DATA_URL_TEST, item["carrier"])
# # 正式库
# # url = '%scarrier=%s' % (settings.PUSH_DATA_URL, item["carrier"])
# data = {
# "action": "add",
# "data": self.buffer
#
# }
# response = requests.post(url, data=json.dumps(data), timeout=2 * 60, verify=False)
# logging.info("%s,%s" % (response.content, len(self.buffer)))
url = dataUtil.get_random_url(settings.PUSH_DATA_URL)
add_success = pubUtil.addData('add', self.buffer, url, self.name, 'JQ')
self.item_num += len(self.buffer)
if add_success:
self.buffer = []
invalid_success = pubUtil.invalidData('invalid', self.task, url + 'carrier=%s' % 'JQ', self.name)
if invalid_success:
self.task = []
# 加入心跳
run_time = time.time()
if run_time - self.now >= 60:
permins = self.item_num
self.item_num = 0
print(pubUtil.heartbeat('%s' % (self.name),
'jq', '%s' % self.num, permins, self.version))
self.now = run_time
# 城市-机场
@staticmethod
def _city_airport():
api = 'http://dx.jiaoan100.com/br/portcity?carrier=JQ'
response = requests.get(api)
return json.loads(response.text).get('data')
@staticmethod
def _get_dates(day, num):
start_day = datetime.strptime(day, '%Y-%m-%d')
dates = []
# num = 1
for _day in range(int(num)):
dates.append((start_day + timedelta(_day)).strftime('%Y-%m-%d'))
return dates
def run(self):
for url in self.start_request:
try:
self.spider_worker(url)
except:
traceback.print_exc()
pass
# 无参数测试
name = 'hyn-test'
num = 1
proxy = False
dynamic = False
run = JQSpider(name, '1', proxy, dynamic)
run.run()
# if __name__ == '__main__':
# import sys, os
#
# argv = sys.argv
# # os.system('mitmdump -s ./mitmproxy_js/addons.py')
# name = argv[1]
# num = argv[2] if len(argv) > 2 else 1
# proxy = argv[3] if len(argv) > 3 else False
# local = argv[4] if len(argv) > 4 else False
#
# if local:
# if local.split('=')[0] == 'local':
# local = 1
# else:
# local = 0
# else:
# local = 0
# # dynamic = argv[4] if len(argv) > 4 else False
# # jq = JQSpider(name=argv[1], num=num, proxy=proxy, dynamic=dynamic)
# jq = JQSpider(name=argv[1], num=num, proxy=proxy, local=local)
# jq.run()
| [
"1342831695@qq.com"
] | 1342831695@qq.com |
f6020f775a01768385d784eb92e9f1acf3eb17a4 | c5172b80895b3f98dbb37cbe80cf6bdc19906856 | /hotel/migrations/0021_auto_20210401_1730.py | f403837f9359125395950115301e3ffbfe67d8eb | [] | no_license | ayemyatmm/hotel-booking | be24b9444bcf4db8f86fc15dea3c8ff1ce0eddaf | be6f234a08cd97b59848d380e2a4c4c205468a28 | refs/heads/master | 2023-04-19T17:01:13.051335 | 2021-04-21T09:58:20 | 2021-04-21T09:58:20 | 352,900,988 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | # Generated by Django 3.1.7 on 2021-04-01 08:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hotel', '0020_auto_20210401_1729'),
]
operations = [
migrations.AlterField(
model_name='booking',
name='diff',
field=models.DateField(blank=True, null=True),
),
]
| [
"ayemyatmyatphyo@mm.cybermissions.co.jp"
] | ayemyatmyatphyo@mm.cybermissions.co.jp |
2077471a9bca26eefc530c73bc6a28cc5ea812b7 | e739a794577d2bf312394b6ae921798607fffa08 | /bookings/migrations/0001_initial.py | fc8ad8a3a866d189a4ac66fe41ddc6d01d15ff2e | [] | no_license | zaheerkzz/to-infinity | a05ffe2fd7c48b0b2309d934117ef90c1d4e2b09 | a266482b8ea8a1a12ec83d8c65b9c4505f581e7f | refs/heads/master | 2023-09-02T00:29:57.486269 | 2021-11-16T23:28:24 | 2021-11-16T23:28:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,754 | py | # Generated by Django 3.1.4 on 2020-12-28 18:59
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('profiles', '0004_userprofile_default_passport_no'),
('products', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Booking',
fields=[
('booking_ref', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('booking_total', models.DecimalField(decimal_places=2, default=0, max_digits=10)),
('status', models.CharField(choices=[('OPENED', 'Open'), ('COMPLETE', 'Complete'), ('CANCELLED', 'Cancelled')], default='OPENED', max_length=10)),
('contact_number', models.CharField(blank=True, max_length=20, null=True)),
('date_completed', models.DateTimeField(blank=True, null=True)),
('original_bag', models.TextField(default='')),
('stripe_pid', models.CharField(default='', max_length=254)),
('lead_passenger', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='bookings', to='profiles.userprofile')),
],
),
migrations.CreateModel(
name='Trip',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField()),
('seats_available', models.IntegerField(editable=False)),
('trip_ref', models.CharField(blank=True, max_length=32, null=True)),
('destination', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='trips', to='products.destination')),
],
options={
'ordering': ['date'],
},
),
migrations.CreateModel(
name='Passenger',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=20)),
('last_name', models.CharField(max_length=20)),
('email', models.EmailField(max_length=254)),
('passport_no', models.CharField(max_length=12)),
('is_leaduser', models.BooleanField(default=False, editable=False)),
('booking', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='passengers', to='bookings.booking')),
('trip_addons', models.ManyToManyField(blank=True, to='products.AddOn')),
('trip_insurance', models.ManyToManyField(to='products.Insurance')),
],
),
migrations.CreateModel(
name='BookingLineItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField(default=0)),
('line_total', models.DecimalField(decimal_places=2, editable=False, max_digits=8)),
('booking', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bookingitems', to='bookings.booking')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='products.product')),
],
),
migrations.AddField(
model_name='booking',
name='trip',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='bookings', to='bookings.trip'),
),
]
| [
"frances.deboo@gmail.com"
] | frances.deboo@gmail.com |
604f5e80c36cb223093158ec656c6176b07e9792 | fe83c75ac4de3041ffba6d33459aade1abd3d575 | /CSCI 127/assignment 51.py | 0c44704a4d9756eebd876c5b1e0b6ccc2d0310ef | [] | no_license | JessicaDeMota/CSCI-127 | d09f42c60117ae3b0351cc9959eb5d0b2f62510d | a9a65b6d16934c0c8bd66ca949ddd8387cda87b7 | refs/heads/master | 2021-07-03T07:44:24.572626 | 2021-01-21T05:52:18 | 2021-01-21T05:52:18 | 216,917,987 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | #Jessica De Mota Munoz
#jessica.demotamunoz86@myhunter.cuny.edu
#November 21 2019
ADDI $s0, $zero, 0
ADDI $s1, $zero, 2
ADDI $s2, $zero, 20
AGAIN: ADDI $s0, $s0, $s1
BEQ $s0, $s2, DONE
J AGAIN
DONE:
| [
"noreply@github.com"
] | noreply@github.com |
b2b17925f635b96dc6c465e89d83bdeb73198efb | 7b822beff0f8d423ad0470995aaf323a5de02404 | /django/mysite/polls/migrations/0001_initial.py | d5fc0e3cd24ec9a77e57454ae3c3f38beafac2d3 | [] | no_license | qing-long/python | 0e73fced964f0eda9476b6fd1f5fa7b49f367fb9 | fb97aacb59caa5bdd791bae9c87f5214da4dd656 | refs/heads/master | 2020-05-19T21:08:47.688711 | 2019-08-07T01:25:03 | 2019-08-07T01:25:03 | 185,217,111 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,165 | py | # Generated by Django 2.1.2 on 2018-10-26 12:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=120)),
('votes', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=120)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Question'),
),
]
| [
"lansheng@forchange.tech"
] | lansheng@forchange.tech |
852e59d11cbc9d71578cb3ea6e88aec4026a4b51 | 38fa179fcc17068c105aa9a687aae7bccc525f32 | /train.py | 4f6d4ee3f7a9075528d47b7c56438f6ce16ab31b | [] | no_license | miazn98/Math32 | 3dd025a01dad393ad021778276c2128c21c2e4d8 | 92f51edb2024d907ef93cfdd7d0094255ff03b64 | refs/heads/master | 2022-04-09T15:43:46.602482 | 2020-03-06T22:22:51 | 2020-03-06T22:22:51 | 243,124,567 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,938 | py | from torch import optim, save
from torch.nn import CrossEntropyLoss
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
from os import path, mkdir
from torchvision import transforms
def train(model, train_ds, val_ds, train_opts, transform_ds=None, exp_dir=None):
"""
Fits a categorization model on the provided data
Arguments
---------
model: (A pytorch module), the categorization model to train
train_ds: (TensorDataset), the examples (images and labels) in the training set
val_ds: (TensorDataset), the examples (images and labels) in the validation set
train_opts: (dict), the training schedule. Read the assignment handout
for the keys and values expected in train_opts
exp_dir: (string), a directory where the model checkpoints should be saved (optional)
"""
train_dl = DataLoader(train_ds, train_opts["batch_size"], shuffle=True)
train_transform = DataLoader(train_ds, train_opts["batch_size"], shuffle=True)
val_dl = DataLoader(val_ds, train_opts["batch_size"] * 2, shuffle=False)
print(train_dl)
num_tr = train_ds.tensors[0].size(0)
num_val = val_ds.tensors[0].size(0)
print(f"Training on {num_tr} and validating on {num_val} examples")
# we will use stochastic gradient descent
optimizer = optim.SGD(
model.parameters(),
lr=train_opts["lr"],
momentum=train_opts["momentum"],
weight_decay=train_opts["weight_decay"]
)
lr_scheduler = optim.lr_scheduler.StepLR(
optimizer=optimizer,
step_size=train_opts["step_size"],
gamma=train_opts["gamma"]
)
# the loss function of choice for most image categorization tasks
# is categorical cross-entropy
criterion = CrossEntropyLoss()
# track the training metrics
epoch_loss_tr = []
epoch_acc_tr = []
epoch_loss_val = []
epoch_acc_val = []
num_epochs = train_opts["num_epochs"]
for epoch in range(num_epochs):
# training phase
model.train()
tr_loss, train_acc = fit(epoch, model, train_dl, criterion, optimizer, lr_scheduler)
train_acc = train_acc/num_tr
epoch_loss_tr.append(tr_loss)
epoch_acc_tr.append(train_acc)
# validation phase
model.eval()
val_loss, val_acc = fit(epoch, model, val_dl, criterion)
val_acc = val_acc/num_val
epoch_loss_val.append(val_loss)
epoch_acc_val.append(val_acc)
# it is always good to report the training metrics at the end of every epoch
print(f"[{epoch + 1}/{num_epochs}: tr_loss {tr_loss:.4} val_loss {val_loss:.4} "
f"t_acc {train_acc:.2%} val_acc {val_acc:.2%}]")
# save model checkpoint if exp_dir is specified
if exp_dir:
if path.exists(exp_dir):
save(model.state_dict(), path.join(exp_dir, f"checkpoint _{epoch + 1}.pt"))
else:
try:
mkdir(exp_dir)
save(model.state_dict(), path.join(exp_dir, f"checkpoint _{epoch + 1}.pt"))
except FileNotFoundError:
pass
# plot the training metrics at the end of training
plot(epoch_loss_tr, epoch_acc_tr, epoch_loss_val, epoch_acc_val)
def fit(epoch, model, data_loader, criterion, optimizer=None, scheduler=None):
"""
Executes a training (or validation) epoch
epoch: (int), the training epoch. This parameter is used by the learning rate scheduler
model: (a pytorch module), the categorization model begin trained
data_loader: (DataLoader), the training or validation set
criterion: (CrossEntropy) for this task. The objective function
optimizer: (SGD) for this task. The optimization function (optional)
scheduler: (StepLR) for this schedule. The learning rate scheduler (optional)
Return
------
epoch_loss: (float), the average loss on the given set for the epoch
epoch_acc: (float), the categorization accuracy on the given set for the epoch
"""
epoch_loss = epoch_acc = 0
for mini_x, mini_y in data_loader:
# print(mini_x.size())
pred = model(mini_x).squeeze()
# print(pred.size())
# print(mini_y.size())
# l2 = 0
# for param in model.parameters():
# if l2 is None:
# l2 = param.norm(1)
# else:
# l2 = l2 + param.norm(1)
loss = criterion(pred, mini_y)
epoch_loss += loss.item()
epoch_acc += mini_y.eq(pred.argmax(dim=1)).sum().item()
if optimizer:
optimizer.zero_grad()
loss.backward()
optimizer.step()
scheduler.step(epoch)
epoch_loss = epoch_loss / len(data_loader)
return epoch_loss, epoch_acc
def plot(loss_tr, acc_tr, loss_val, acc_val):
"""
plots the training metrics
Arguments
---------
loss_tr: (list), the average epoch loss on the training set for each epoch
acc_tr: (list), the epoch categorization accuracy on the training set for each epoch
loss_val: (list), the average epoch loss on the validation set for each epoch
acc_val: (list), the epoch categorization accuracy on the validation set for each epoch
"""
figure, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 6))
n = [i + 1 for i in range(len(loss_tr))]
acc_tr = [x * 100 for x in acc_tr]
acc_val = [x * 100 for x in acc_val]
ax1.plot(n, loss_tr, 'bs-', markersize=3, label="train")
ax1.plot(n, loss_val, 'rs-', markersize=3, label="val")
ax1.legend(loc="upper right")
ax1.set_title("Losses")
ax1.set_ylabel("Loss")
ax1.set_xlabel("Epoch")
ax2.plot(n, acc_tr, 'bo-', markersize=3, label="train")
ax2.plot(n, acc_val, 'ro-', markersize=3, label="val")
ax2.legend(loc="upper right")
ax2.set_title("Accuracy")
ax2.set_ylabel("Accuracy (%)")
ax2.set_xlabel("Epoch")
| [
"noreply@github.com"
] | noreply@github.com |
999bdcdf184cccb92c7e255eb8f9f08ead6fe222 | 1d955e8d9be78d877d972a3aad822ab7f870d1d9 | /clarifai/json_reader.py | 7ea2134b7c7938061d9b1d0cc5ff7c22162214c8 | [] | no_license | jason-wong-9/TellMeMore | d68c589f73410f84907d3ac57fde6faa620eb65e | 5130c7669494444f13de328be5e97957c74dd018 | refs/heads/master | 2020-06-14T03:54:38.843262 | 2016-12-04T04:17:58 | 2016-12-04T04:17:58 | 75,496,710 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | import json
import operator
from pprint import pprint
with open('data.json') as data_file:
data = json.load(data_file)
array = data["outputs"][0]["data"]["concepts"]
_dict = {}
for i in array:
name = i['name']
value = i['value']
kv = [(name, value)]
_dict.update(kv)
sorted_x = sorted(_dict.items(), key=operator.itemgetter(1), reverse=True)
with open('kv.json', 'wb') as outfile:
json.dump(sorted_x, outfile, indent=4)
| [
"alexonej@gmail.com"
] | alexonej@gmail.com |
a850df4143ac34046c10d724431d1e9be5a09cd3 | 387485543b44cdd4586fab5b40c1450b962c7f73 | /thinkstats/hazard_function.py | fa2d7a36807992c4032ba72df46b5d298779e6d3 | [
"Apache-2.0"
] | permissive | eschmidt42/thinkstats | 482dcfac65fb30b3f61e01b308920e9818285c18 | ad15dfd102b504acf2781260e6c4da9ae844e181 | refs/heads/master | 2023-04-15T03:03:45.144166 | 2022-02-27T09:01:09 | 2022-02-27T09:01:09 | 247,499,425 | 0 | 0 | Apache-2.0 | 2023-04-12T05:58:58 | 2020-03-15T15:56:48 | Jupyter Notebook | UTF-8 | Python | false | false | 1,880 | py | # AUTOGENERATED! DO NOT EDIT! File to edit: notebooks/04_hazard_function.ipynb (unless otherwise specified).
__all__ = ['estimate_hazard_fun']
# Cell
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
from scipy import stats
from functools import partial
import statsmodels.formula.api as smf
import pandas as pd
# Cell
def estimate_hazard_fun(df:pd.DataFrame, dep_var:str, idp_var:str, fill:int=0):
"""Estimates the hazard function using the dependent variable `dep_var` and the independent
variable `idp_var`, assuming the former is binary"""
assert df[dep_var].nunique() == 2
df0 = df.loc[df[dep_var]==0, [idp_var]]
df1 = df.loc[df[dep_var]==1, [idp_var]]
n = len(df)
print(f"dep_var '{dep_var}' shares: 0 = {len(df0)/n}, 1 = {len(df1)/n}")
count_and_rename = lambda _df: _df.groupby(idp_var).size().reset_index().rename(columns={0: "count"})
hist0 = count_and_rename(df0)
hist1 = count_and_rename(df1)
# joining the two counts
hf = pd.DataFrame(hist0).join(hist1.set_index(idp_var), on=idp_var, how="outer", lsuffix="_0", rsuffix="_1")
hf.sort_values("age", ascending=True, inplace=True)
# cleaning nans
hf.fillna(fill, inplace=True)
# core computing
hf["survivors"] = [n] + list(n - (hf["count_0"] + hf["count_1"]).cumsum().values[:-1])
hf["hazard_function"] = hf["count_1"] / hf["survivors"]
hf["survival_function"] = (1-hf["hazard_function"]).cumprod()
# hf["survival_function"] = np.exp((-np.log1p(hf["hazard_function"])).cumsum())
hf["cdf"] = 1. - hf["survival_function"]
hf["event_frequency"] = hf["count_1"] / (hf["count_0"] + hf["count_1"])
dt = np.diff(hf[idp_var])[0]
hf["sf_int"] = [hf["survival_function"].values[_i:].sum()*dt for _i in range(len(hf))]
hf["expected_survival"] = hf["sf_int"] / hf["survival_function"]
return hf | [
"11818904+eschmidt42@users.noreply.github.com"
] | 11818904+eschmidt42@users.noreply.github.com |
ccdb0c30a8b56af6595a2ea1e7306c14dd805c10 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/HUAWEI-TRNG-MIB.py | a33b84f6643f628a5a51e2194e7f39fc3a47f759 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 18,263 | py | #
# PySNMP MIB module HUAWEI-TRNG-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-TRNG-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:49:07 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint")
hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
Counter64, MibIdentifier, Counter32, iso, NotificationType, Gauge32, Bits, ObjectIdentity, Unsigned32, IpAddress, Integer32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "MibIdentifier", "Counter32", "iso", "NotificationType", "Gauge32", "Bits", "ObjectIdentity", "Unsigned32", "IpAddress", "Integer32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity")
TextualConvention, DisplayString, TruthValue, DateAndTime, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue", "DateAndTime", "RowStatus")
hwTRNG = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13))
hwTRNG.setRevisions(('2011-03-22 00:00', '2003-04-11 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: hwTRNG.setRevisionsDescriptions(('V1.01, modified the description of hwTimerangePeriodicEndTimes. modified the description of hwTrngCreateTimerangeTable, hwTrngAbsoluteTable and hwTrngPeriodicTable . modified the errors of the MIB file. modified the description of leaves. modified the datatype definition and the format of the MIB script.', 'V1.00, initial revision of this MIB module.',))
if mibBuilder.loadTexts: hwTRNG.setLastUpdated('201103220000Z')
if mibBuilder.loadTexts: hwTRNG.setOrganization('Huawei Technologies Co.,Ltd.')
if mibBuilder.loadTexts: hwTRNG.setContactInfo("Huawei Industrial Base Bantian, Longgang Shenzhen 518129 People's Republic of China Website: http://www.huawei.com Email: support@huawei.com ")
if mibBuilder.loadTexts: hwTRNG.setDescription('The mib is used for configuring time range. When configuring the ACL rule, if you need to specify the time for the ACL rule to take effect, you need to run this command to configure a time range before specifying the time. After that, you can specify the time for an ACL to take effect by referring the time range name when configuring the ACL rule.')
hwTRNGMibObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1))
hwTrngCreateTimerangeTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1), )
if mibBuilder.loadTexts: hwTrngCreateTimerangeTable.setStatus('current')
if mibBuilder.loadTexts: hwTrngCreateTimerangeTable.setDescription('Describes a time range. When configuring an ACL rule, set its effective time. To do so, configurate a time range first. After the configuration, the effective time is specified by referencing the time range when an ACL rule is being configured. An ACL time range can be a relative time range and an absolute time range. The index of this table is hwTrngIndex. ')
hwTrngCreateTimerangeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1, 1), ).setIndexNames((0, "HUAWEI-TRNG-MIB", "hwTrngIndex"))
if mibBuilder.loadTexts: hwTrngCreateTimerangeEntry.setStatus('current')
if mibBuilder.loadTexts: hwTrngCreateTimerangeEntry.setDescription('Describes a time range. When configuring an ACL rule, set its effective time. To do so, configurate a time range first. After the configuration, the effective time is specified by referencing the time range when an ACL rule is being configured. An ACL time range can be a relative time range and an absolute time range. The index of this entry is hwTrngIndex. ')
hwTrngIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256)))
if mibBuilder.loadTexts: hwTrngIndex.setStatus('current')
if mibBuilder.loadTexts: hwTrngIndex.setDescription('Uniquely identifies a time range. Range: 1-256 ')
hwTrngName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTrngName.setStatus('current')
if mibBuilder.loadTexts: hwTrngName.setDescription('Indicates the character string of a time range name. It is used to identify different time ranges. The character string consists of 1-32 characters of letters and digits. No other characters can be included. ')
hwTrngValidFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1, 1, 3), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwTrngValidFlag.setStatus('current')
if mibBuilder.loadTexts: hwTrngValidFlag.setDescription('Describes whether the current time range is valid, that is, whether the current time is within the specified time range. Options: 1. true(1) -if the current time is within the specified time range, the value is true(1), which indicates validity. 2. false(2) -if the current time is not within the specified time range, the value is false(2), which indicates invalidity. ')
hwTrngCreateRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTrngCreateRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwTrngCreateRowStatus.setDescription('Indicates the row status. Options: 1. active(1) -when this leaf is queried, the value is fixed to active(1). 2. createAndGo(4) -add a time range 3. destroy(6) -delete a time range It is used for adding or deleting a time range. To add a time range, you must bind hwTrngName and set hwTrngCreateRowStatus to createAndGo(4). To delete a time range, set hwTrngCreateRowStatus to destroy(6). When this leaf is queried, the value is fixed to active(1). ')
hwTrngAbsoluteTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2), )
if mibBuilder.loadTexts: hwTrngAbsoluteTable.setStatus('current')
if mibBuilder.loadTexts: hwTrngAbsoluteTable.setDescription('Describes an absolute time range. An absolute time range refers to the time range without a period. The time range is active from the specified start time and date to the end time and date. Otherwise, the time range is inactive. The indexes of this table are hwTrngAbsoluteNameIndex and hwTrngAbsoluteSubIndex. ')
hwTrngAbsoluteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1), ).setIndexNames((0, "HUAWEI-TRNG-MIB", "hwTrngAbsoluteNameIndex"), (0, "HUAWEI-TRNG-MIB", "hwTrngAbsoluteSubIndex"))
if mibBuilder.loadTexts: hwTrngAbsoluteEntry.setStatus('current')
if mibBuilder.loadTexts: hwTrngAbsoluteEntry.setDescription('Describes an absolute time range. An absolute time range refers to the time range without a period. The time range is active from the specified start time and date to the end time and date. Otherwise, the time range is inactive. The indexes of this entry are hwTrngAbsoluteNameIndex and hwTrngAbsoluteSubIndex. ')
hwTrngAbsoluteNameIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256)))
if mibBuilder.loadTexts: hwTrngAbsoluteNameIndex.setStatus('current')
if mibBuilder.loadTexts: hwTrngAbsoluteNameIndex.setDescription('Uniquely identifies a time range. Range: 1-256 The specified time range must be created in hwTrngCreateTimerangeTable. ')
hwTrngAbsoluteSubIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 12)))
if mibBuilder.loadTexts: hwTrngAbsoluteSubIndex.setStatus('current')
if mibBuilder.loadTexts: hwTrngAbsoluteSubIndex.setDescription('Uniquely identifies an absolute time range. Range: 1-12 ')
hwTimerangeAbsoluteStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1, 3), DateAndTime()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangeAbsoluteStartTime.setStatus('current')
if mibBuilder.loadTexts: hwTimerangeAbsoluteStartTime.setDescription('Indicates the start time of an absolute time range. It is an 8-byte hexadecimal numeral, where, the first two bytes indicate the year, the third byte indicates the month, the fourth byte indicates the day, the fifth byte indicates the hour, the six byte indicates the minute, and the seventh and eighth digits are reserved, which are filled in 0. For example, if the start time is 2010-1-10,8:10, the value is presented as 0x07 0xDA 0x01 0x0A 0x08 0x0A 0x00 0x00. The time range that the device can identify is 1970/01/01 00:00-2099/12/31 23:59. Therefore, the time value must be within the time range. ')
hwTimerangeAbsoluteEndTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1, 4), DateAndTime()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangeAbsoluteEndTime.setStatus('current')
if mibBuilder.loadTexts: hwTimerangeAbsoluteEndTime.setDescription('Indicates the end time of an absolute time range. The format is the same as that of hwTrngAbsoluteStartTime. The value of the end time must be larger than that of the start time. If the value is not specified, the system uses 2099/12/31 23:59 by default. ')
hwTimerangeAbsolueRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangeAbsolueRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwTimerangeAbsolueRowStatus.setDescription('Indicates the row status. Options: 1. active(1) -when this leaf is queried, the value is fixed to active(1). 2. createAndGo(4) -add an absolute time range 3. destroy(6) -delete an absolute time range It is used for adding or deleting an absolute time range. To add an absolute time range, enter hwTrngAbsoluteStartTime and set hwTrngAbsolueRowStatus to createAndGo(4). hwTrngAbsoluteEndTime is optional. To delete an absolute time range, set hwTrngAbsolueRowStatus to destroy(6). When this leaf is queried, the value is fixed to active(1). ')
hwTrngPeriodicTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3), )
if mibBuilder.loadTexts: hwTrngPeriodicTable.setStatus('current')
if mibBuilder.loadTexts: hwTrngPeriodicTable.setDescription('Describes a relative time range. A relative time range refers to the time range with a period. When a time range is already created, only the specific time is specified but the date is set to a day from Monday to Sunday. The time range is active at the specified time and date. Otherwise, the time range is inactive. The indexes of this table are hwTrngPeriodicNameIndex and hwTrngPeriodicSubIndex. ')
hwTrngPeriodicEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1), ).setIndexNames((0, "HUAWEI-TRNG-MIB", "hwTrngPeriodicNameIndex"), (0, "HUAWEI-TRNG-MIB", "hwTrngPeriodicSubIndex"))
if mibBuilder.loadTexts: hwTrngPeriodicEntry.setStatus('current')
if mibBuilder.loadTexts: hwTrngPeriodicEntry.setDescription('Describes a relative time range. A relative time range refers to the time range with a period. When a time range is already created, only the specific time is specified but the date is set to a day from Monday to Sunday. The time range is active at the specified time and date. Otherwise, the time range is inactive. The indexes of this entry are hwTrngPeriodicNameIndex and hwTrngPeriodicSubIndex. ')
hwTrngPeriodicNameIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256)))
if mibBuilder.loadTexts: hwTrngPeriodicNameIndex.setStatus('current')
if mibBuilder.loadTexts: hwTrngPeriodicNameIndex.setDescription('Uniquely identifies a relative time range. Range: 1-256 The specified time range must be created in hwTrngCreateTimerangeTable. ')
hwTrngPeriodicSubIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 32)))
if mibBuilder.loadTexts: hwTrngPeriodicSubIndex.setStatus('current')
if mibBuilder.loadTexts: hwTrngPeriodicSubIndex.setDescription('Uniquely identifies a relative time range. Range: 1-32 ')
hwTrngPeriodicDayofWeek = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTrngPeriodicDayofWeek.setStatus('current')
if mibBuilder.loadTexts: hwTrngPeriodicDayofWeek.setDescription('Indicates the day of week within the periodic time range. The values are as follows: Sunday: 0x01 Monday: 0x02 Tuesday: 0x04 Wednesday: 0x08 Thursday: 0x10 Friday: 0x20 Saturday: 0x40 If the value is set to Sunday and Monday, perform the | operation to the values of Sunday and Monday, and the value is 0x03, and so on. ')
hwTimerangePeriodicStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 4), DateAndTime()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangePeriodicStartTime.setStatus('current')
if mibBuilder.loadTexts: hwTimerangePeriodicStartTime.setDescription('Indicates the start time of a periodic time range. The format is the same as that of hwTrngAbsoluteStartTime in hwTrngAbsoluteTable. The periodic time needs only the hour and minute values, and thus only the fifth and sixth bytes are used, where, the fifth byte indicates the hour value of the start time and the sixth byte indicates the minute value. Other bytes are reserved and are filled in 0. For example, if the start time is 08:30, the value is presented as 0x00 0x00 0x00 0x00 0x08 0x1E 0x00 0x00. The time must be from 00:00 to 24:00. ')
hwTimerangePeriodicEndTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 5), DateAndTime()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangePeriodicEndTime.setStatus('current')
if mibBuilder.loadTexts: hwTimerangePeriodicEndTime.setDescription('Indicates the end time of a periodic time range. The format is the same as that of hwTrngPeriodicStartTime. The value of the end time must be larger than that of the start time. The value must be from 00:00 to 24:00. The 7th byte is used only in the case of 23: 59: 60 to indicate the time 24: 00. ')
hwTimerangePeriodicRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangePeriodicRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwTimerangePeriodicRowStatus.setDescription('Indicates the row status. Options: 1. active(1) -when this leaf is queried, the value is fixed to active(1). 2. createAndGo(4) -add a relative time range 3. destroy(6) -delete a relative time range It is used for adding or deleting a relative time range. To add a relative time range, enter hwTrngPeriodicStartTime and hwTrngPeriodicEndTime, and set hwTrngPeriodicRowStatus to createAndGo(4). To delete a relative time range, set hwTrngAbsolueRowStatus to destroy(6). When this leaf is queried, the value is fixed to active(1). ')
hwTRNGMibConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 3))
hwTRNGMibCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 3, 1))
hwTRNGMibCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 3, 1, 1)).setObjects(("HUAWEI-TRNG-MIB", "hwTRNGGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwTRNGMibCompliance = hwTRNGMibCompliance.setStatus('current')
if mibBuilder.loadTexts: hwTRNGMibCompliance.setDescription('The compliance statement for entities which implement the Huawei Time-range MIB.')
hwTRNGMibGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 3, 2))
hwTRNGGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 3, 2, 1)).setObjects(("HUAWEI-TRNG-MIB", "hwTrngName"), ("HUAWEI-TRNG-MIB", "hwTrngValidFlag"), ("HUAWEI-TRNG-MIB", "hwTrngCreateRowStatus"), ("HUAWEI-TRNG-MIB", "hwTimerangeAbsoluteStartTime"), ("HUAWEI-TRNG-MIB", "hwTimerangeAbsoluteEndTime"), ("HUAWEI-TRNG-MIB", "hwTimerangeAbsolueRowStatus"), ("HUAWEI-TRNG-MIB", "hwTrngPeriodicDayofWeek"), ("HUAWEI-TRNG-MIB", "hwTimerangePeriodicStartTime"), ("HUAWEI-TRNG-MIB", "hwTimerangePeriodicEndTime"), ("HUAWEI-TRNG-MIB", "hwTimerangePeriodicRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwTRNGGroup = hwTRNGGroup.setStatus('current')
if mibBuilder.loadTexts: hwTRNGGroup.setDescription('A collection of objects providing mandatory time-range information.')
mibBuilder.exportSymbols("HUAWEI-TRNG-MIB", hwTrngAbsoluteSubIndex=hwTrngAbsoluteSubIndex, hwTrngCreateRowStatus=hwTrngCreateRowStatus, hwTrngPeriodicNameIndex=hwTrngPeriodicNameIndex, hwTRNGMibGroups=hwTRNGMibGroups, hwTrngAbsoluteNameIndex=hwTrngAbsoluteNameIndex, hwTrngIndex=hwTrngIndex, hwTimerangeAbsoluteStartTime=hwTimerangeAbsoluteStartTime, PYSNMP_MODULE_ID=hwTRNG, hwTrngPeriodicTable=hwTrngPeriodicTable, hwTrngAbsoluteEntry=hwTrngAbsoluteEntry, hwTRNG=hwTRNG, hwTRNGMibConformance=hwTRNGMibConformance, hwTrngPeriodicSubIndex=hwTrngPeriodicSubIndex, hwTrngPeriodicEntry=hwTrngPeriodicEntry, hwTrngValidFlag=hwTrngValidFlag, hwTrngPeriodicDayofWeek=hwTrngPeriodicDayofWeek, hwTRNGMibCompliance=hwTRNGMibCompliance, hwTrngCreateTimerangeTable=hwTrngCreateTimerangeTable, hwTrngName=hwTrngName, hwTimerangeAbsoluteEndTime=hwTimerangeAbsoluteEndTime, hwTimerangePeriodicEndTime=hwTimerangePeriodicEndTime, hwTRNGMibCompliances=hwTRNGMibCompliances, hwTimerangePeriodicRowStatus=hwTimerangePeriodicRowStatus, hwTRNGMibObjects=hwTRNGMibObjects, hwTrngCreateTimerangeEntry=hwTrngCreateTimerangeEntry, hwTRNGGroup=hwTRNGGroup, hwTimerangeAbsolueRowStatus=hwTimerangeAbsolueRowStatus, hwTrngAbsoluteTable=hwTrngAbsoluteTable, hwTimerangePeriodicStartTime=hwTimerangePeriodicStartTime)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
f5783295b9da75775129f98dd946468103cef8d6 | 8ce88df2976d07fd287d1c96771694f4f9fd6c9a | /exe103.py | de36ffae44b067077fea47aa6bd13b69f2728a67 | [] | no_license | gabialeixo/python-exercises | 9974b9b2e15f92c7f58fb32c0cc0abdd5015a34c | f62f7ba49eb77fc16a37058f8974153491b565bb | refs/heads/master | 2023-01-24T09:23:12.387968 | 2020-12-07T17:31:07 | 2020-12-07T17:31:07 | 305,712,203 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 564 | py | #Faça um programa que tenha uma função chamada ficha(), que receba dois parâmetros opcionais: o nome de um jogador e quantos
#gols ele marcou. O programa deverá ser capaz de mostrar a ficha do jogador, mesmo que algum dado não tenha sido informado
#corretamente.
def ficha(nome='<desconhecido>', gols=0):
print(f'O jogador {nome} fez {gols} gol(s) no campeonato.')
n = str(input('Nome do jogador: '))
g = str(input('Saldo de gols: '))
if g.isnumeric():
g = int(g)
else:
g = 0
if n.strip() == '':
ficha(gols=g)
else:
ficha(n, g) | [
"gabealeixo13@gmail.com"
] | gabealeixo13@gmail.com |
a8256707da5d8d31589335c85ecba4682757d5d6 | 274c001d19e598e2b95fee07e480d780efc3486f | /cogs/info.py | 2cb8baf8e1054c36c295f74a308c6e7f7fdd330d | [] | no_license | Ha3kerDevs/Ha3ker-s-Utilities | 050d3e12e64d1bd5da6fba45876476202bf74422 | dbdf06a60febbfd691545c31ebaaae6565b4b3be | refs/heads/main | 2023-07-13T00:53:39.381996 | 2021-08-28T12:17:28 | 2021-08-28T12:17:28 | 339,258,810 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,118 | py | from datetime import datetime
from typing import Optional
from cogs.utils import config
import platform
import discord, time
import datetime
import psutil
from discord import Embed, Member
from discord.ext import commands
start_time = time.time()
class Information(commands.Cog):
def __init__(self, client):
self.client = client
psutil.cpu_percent()
@commands.command(
name="whois",
description="Check your or a person's server information.",
usage='[user]'
)
async def whois(self, ctx, target: Optional[Member]):
target = target or ctx.author
roles = [role for role in target.roles]
embed = Embed(title="User information",
colour=target.colour,
timestamp=datetime.datetime.utcnow())
embed.set_thumbnail(url=target.avatar_url)
embed.set_footer(text=f"ID: {target.id}")
embed.add_field(name="Name", value=str(target), inline=True)
embed.add_field(name="Guild/Server name", value=target.display_name, inline=True)
embed.add_field(name=f"Roles [{len(roles[1:])}]", value=" ".join([role.mention for role in roles[1:]]), inline=False)
embed.add_field(name="Status",
value=f"Desktop Status: {config.statuses[target.desktop_status]}\n"
f"Mobile Status: {config.statuses[target.mobile_status]}\n"
f"Browser Status: {config.statuses[target.web_status]}",
inline=False
)
embed.add_field(name="Registered", value=target.created_at.strftime("%B %d, %Y %I:%M %p UTC"), inline=False)
embed.add_field(name="Joined", value=target.joined_at.strftime("%B %d, %Y %I:%M %p UTC"), inline=False)
embed.add_field(name="Bot?", value=target.bot, inline=True)
embed.add_field(name="Boosted?", value=bool(target.premium_since), inline=True)
await ctx.send(embed=embed)
@commands.command(pass_context=True)
@commands.cooldown(1, 3, commands.BucketType.user)
async def botinfo(self, ctx):
delta_uptime = datetime.datetime.utcnow() - self.client.launch_time
hours, remainder = divmod(int(delta_uptime.total_seconds()), 3600)
minutes, seconds = divmod(remainder, 60)
days, hours = divmod(hours, 24)
cpu = psutil.cpu_percent(interval=None)
ram = psutil.virtual_memory().percent
embed = discord.Embed(description="_This bot is only designed for Ha3ker's Skyland._",
colour=0x9CDFFF)
embed.set_author(name=f"{self.client.user.name}", icon_url=self.client.user.avatar_url)
embed.add_field(name="Developer", value="TheHa3ker#3080",inline=True)
embed.add_field(name="Version", value=config.Version,inline=True)
embed.add_field(name="Usage",
value=f"```CPU Usage: {cpu}%\nRAM Usage: {ram}%```", inline=False)
embed.add_field(name="Version",
value=f"```Python: v{platform.python_version()}\ndiscord.py: v{discord.__version__}```", inline=False)
embed.set_footer(
text=f"Powered by {config.tm} | Uptime: {days}d, {hours}h, {minutes}m, {seconds}s")
await ctx.send(embed=embed)
def setup(client):
client.add_cog(Information(client)) | [
"master2pvpgaming@gmail.com"
] | master2pvpgaming@gmail.com |
0c0963b7d6d4e135b8aa570366dc9bceeb5404b1 | 9bd0695c89f7ae58c06b9bc43b41d8de4ad21cb4 | /aml-scripts/test-endpoint.py | 6c0f2634868a0883d3811985333ba1779b5f63df | [] | no_license | colbyford/tf-food-demo | 984ac14600c3b30f17d82edcf3ded6044b24ef1f | 82b54259301cec3442b6595bd4e57453403e2da9 | refs/heads/master | 2020-11-25T23:44:37.205140 | 2019-12-09T16:13:22 | 2019-12-09T16:13:22 | 228,892,905 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,263 | py | # Copyright (c) 2019 Microsoft
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
import json
import os
import click
import requests
from azureml.core import Model, Workspace
from azureml.core.authentication import AzureCliAuthentication
@click.command()
@click.option("-n", "--service-name", type=str,
help="The name of the service to be tested")
@click.option("--data-path", '-d', type=str,
default="./code/score/sample_data.json",
help="The path to the data file to send to be tested.")
@click.option("--auth-enabled", "-a", is_flag=True)
def test_response(service_name, data_path, auth_enabled):
cli_auth = AzureCliAuthentication()
ws = Workspace.from_config(auth=cli_auth)
with open(data_path, "r") as file_obj:
data = json.load(file_obj)
service = ws.webservices[service_name]
headers = {}
if auth_enabled:
auth_key = service.get_keys()[0]
headers = {"Authorization": "Bearer {0}".format(auth_key)}
response = requests.post(service.scoring_uri, json=data, headers=headers)
response_data = json.loads(response.content)
assert response.status_code == 200
if __name__ == "__main__":
test_response()
| [
"erikzwi@microsoft.com"
] | erikzwi@microsoft.com |
663f5bdb39aa63f44cedcaac933aec08ffb6e5c4 | 967b70e4c5db22469382fc877441be5ef29c1f4c | /tf_venv/bin/wheel | 8c5d449b49bbc4513e525a495f6d296dec2608f3 | [] | no_license | Prakhar0409/Body-Pose-Estimation | 04ea14e7d8ab865b25e702ffaf957623c82bd001 | b1bf86a2f327cec1a175c42539e105543a13e67f | refs/heads/master | 2021-08-22T04:33:50.218705 | 2017-11-29T08:32:50 | 2017-11-29T08:32:50 | 112,444,672 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 251 | #!/home/prakhar0409/random/body_pose/tf_venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"prakhar0409@gmail.com"
] | prakhar0409@gmail.com | |
360997885329835ace27fb7874e496bda3e3fd3b | 5aebe67a1f8fd6004a9be435a019f85a3036cb59 | /convolution.py | 573d0e9297623d004713c20d4da054db608d2c8a | [] | no_license | sudhigk/FIP | e3354927b79d5d1f6cb909a09690b17eae1f7727 | fb79c24093e74842b0731156969f1b46cc83c34c | refs/heads/master | 2020-07-29T02:40:24.117410 | 2019-11-29T04:51:08 | 2019-11-29T04:51:08 | 209,636,330 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,482 | py | # convolution using mask
import cv2
import numpy as np
img1 = cv2.imread('images/test.jpg',cv2.IMREAD_GRAYSCALE)
(d1,d2) = img1.shape
print(d1,d2)
# img1 = np.zeros((2,3,3), np.uint8)
# d1 = 2
# d2 = 3
# nm = 4
# for i in range(0,2):
# for j in range(0,3):
# img1[i,j,0] = nm
# img1[i, j, 1] = nm
# img1[i, j, 2] = nm
# nm = nm +1
md1 = 3
md2 = 3
# mask = np.zeros((md1,md2,3), np.uint8)
mask =np.array([[0.0,0.0,0.0],[0.0,0.0,0.0],[0.0,0.0,0.0]])
for i in range(0,md1):
for j in range(0,md2):
mask[i,j] = 1/9
nd1 = d1+md2-1
nd2 = d2+md2-1
otpt = np.zeros((nd1,nd2,3), np.uint8)
w1 = 'input'
w2 = 'output'
cv2.namedWindow(w1,cv2.WINDOW_FREERATIO)
cv2.resizeWindow(w1,400,400)
cv2.moveWindow(w1,0,0)
cv2.namedWindow(w2,cv2.WINDOW_FREERATIO)
cv2.resizeWindow(w2,400,400)
cv2.moveWindow(w2,0,400)
for i in range(0,md1):
for j in range(0,md2):
print(mask[i,j])
for i in range(0,nd1):
for j in range(0,nd2):
s = 0.0
for m in range(0,md1):
if((i-m)>=0 and (i-m)<d1):
for n in range(0,md2):
if((j-n)>=0 and (j-n)<d2):
# print(m, n)
s = float(s) + float(mask[m, n]) * float(img1[i-m, j-n])
otpt[i,j] = int(s)
# for i in range(0,nd1):
# print("\n")
# for j in range(0,nd2):
# print(otpt[i,j,0]," ")
cv2.imshow(w1,img1)
cv2.imshow(w2,otpt)
cv2.waitKey(0)
cv2.destroyAllWindows() | [
"gksudhee6@gmail.com"
] | gksudhee6@gmail.com |
4ce5e00e70fde8e3af4c219e04e984a1a7838166 | 309aa3c26b4d52cfc6e9aab16aabab6d92063497 | /apiv1/urls.py | fb2d5ab74d02e7cb6753d323f17e8938341d1a7a | [] | no_license | suzupro2019/Deeparture | af1baba090a8ec6832dd5bbb5994a59b91d43c59 | 74b4de5bc2fc07adb0ade0207ac623997b5a0689 | refs/heads/master | 2020-09-19T14:01:53.121386 | 2020-03-11T10:24:37 | 2020-03-11T10:24:37 | 224,233,355 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | from django.urls import path, include
from rest_framework import routers
from apiv1 import views
router = routers.SimpleRouter()
router.register('projects', views.ProjectViewSet)
app_name = 'apiv1'
urlpatterns = [
path('chordprog/', views.ChordProgressionGenerateAPIView.as_view()),
path('', include(router.urls))
]
| [
"ne290008@senshu-u.jp"
] | ne290008@senshu-u.jp |
cc6895b8b702d18633c777f02493a8fe29b851f5 | 05263538c3ad0f577cdbbdb9bac87dcf450230ce | /alexa/ask-sdk/ask_sdk_model/dialog/elicit_slot_directive.py | 837a97063497b6119b45f42914b105a8118715ce | [] | no_license | blairharper/ISS-GoogleMap-project | cea027324fc675a9a309b5277de99fc0265dcb80 | 3df119036b454a0bb219af2d703195f4154a2471 | refs/heads/master | 2020-03-21T16:47:21.046174 | 2018-10-24T08:05:57 | 2018-10-24T08:05:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,761 | py | # coding: utf-8
#
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import pprint
import re # noqa: F401
import six
import typing
from enum import Enum
from ask_sdk_model.directive import Directive
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional
from datetime import datetime
from ask_sdk_model.intent import Intent
class ElicitSlotDirective(Directive):
"""
NOTE: This class is auto generated.
Do not edit the class manually.
:type updated_intent: (optional) ask_sdk_model.intent.Intent
:type slot_to_elicit: (optional) str
"""
deserialized_types = {
'object_type': 'str',
'updated_intent': 'ask_sdk_model.intent.Intent',
'slot_to_elicit': 'str'
}
attribute_map = {
'object_type': 'type',
'updated_intent': 'updatedIntent',
'slot_to_elicit': 'slotToElicit'
}
def __init__(self, updated_intent=None, slot_to_elicit=None): # noqa: E501
# type: (Optional[Intent], Optional[str]) -> None
"""
:type updated_intent: (optional) ask_sdk_model.intent.Intent
:type slot_to_elicit: (optional) str
"""
self.__discriminator_value = "Dialog.ElicitSlot"
self.object_type = self.__discriminator_value
super(ElicitSlotDirective, self).__init__(object_type=self.__discriminator_value) # noqa: E501
self.updated_intent = updated_intent
self.slot_to_elicit = slot_to_elicit
def to_dict(self):
# type: () -> Dict[str, object]
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.deserialized_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else
x.value if isinstance(x, Enum) else x,
value
))
elif isinstance(value, Enum):
result[attr] = value.value
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else
(item[0], item[1].value)
if isinstance(item[1], Enum) else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
# type: () -> str
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
# type: () -> str
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
# type: (object) -> bool
"""Returns true if both objects are equal"""
if not isinstance(other, ElicitSlotDirective):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (object) -> bool
"""Returns true if both objects are not equal"""
return not self == other
| [
"blair.harper@gmail.com"
] | blair.harper@gmail.com |
8b14a6c891a3dd046ffa57d48e202a1663099457 | 7460c55aedff0cc032d39ce6ce3781a3ec68102b | /8.py | d88615d24e2a7e12d7bb02eb6ad393f3d4803920 | [] | no_license | momoka0122y/sozojyoho_2022_test_submitted_code | 1f8e55d483c340ff62262abd935aff5fb8139628 | bd304647a39a9c7ff1f0b27268071bcb5ae08b65 | refs/heads/master | 2023-07-15T19:26:54.101003 | 2021-09-02T04:06:00 | 2021-09-02T04:06:00 | 402,284,164 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | import re
import math
f = open('data/infections2.txt', 'r')
listt = [s for s in f.readlines()]
data = listt[0]
# print(listt[0])
split_list = list(map(int, re.split('[:]', data)))
math.log()
n = len(split_list)
max_a = float("-inf")
for s in range(n-30):
| [
"momoka.my6@gmail.com"
] | momoka.my6@gmail.com |
5d8916a215340d65be153bdfb406612f73906979 | ce0b49ae0daf38691e657c14a38a46a8e131b7a9 | /set1/challenge3.py | 32b4153e38d552dd465b63ec73f52f686fa15443 | [] | no_license | Nbarnes1/cryptopals | d344156ee714705bf90942669e681c89d4ab2d2a | 1f0b7fc47152da0636945201707a621e06bbaa13 | refs/heads/master | 2021-06-15T02:42:30.958219 | 2021-04-01T22:12:39 | 2021-04-01T22:12:39 | 171,950,028 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | from binascii import *
from challenge2 import fixedXOR
ciphertext = "1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736"
result = fixedXOR(ciphertext, "c"*len(ciphertext))
print (result)
print (result[2:])
print (bytes.fromhex(result[2:]))
print (b2a_uu(bytes.fromhex(result[2:])))
| [
"nbarnes@hotmail.com"
] | nbarnes@hotmail.com |
6f0ba3d11ea0ff417f52bc4d1e609f0f442421c9 | 130215e73cd45824fc5b7b2bc85949ce03115f20 | /py/netmod_kar2.py | 4479417117ce2a4aa307bbe3d9f7777fdceeeb82 | [] | no_license | felicitygong/MINLPinstances | 062634bf709a782a860234ec2daa7e6bf374371e | 1cd9c799c5758baa0818394c07adea84659c064c | refs/heads/master | 2022-12-06T11:58:14.141832 | 2022-12-01T17:17:35 | 2022-12-01T17:17:35 | 119,295,560 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 66,023 | py | # MINLP written by GAMS Convert at 11/10/17 15:35:22
#
# Equation counts
# Total E G L N X C B
# 667 43 0 624 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 457 321 136 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 1849 1845 4 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x2 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x58 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x62 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x75 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x87 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x91 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x92 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x93 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x107 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x115 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x119 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x125 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x129 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x141 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x173 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x217 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x222 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x226 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x235 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x236 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x237 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x238 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x239 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x240 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x241 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x242 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x243 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x244 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x245 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x246 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x247 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x248 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x249 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x250 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x251 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x252 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x253 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x254 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x255 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x256 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x257 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x258 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x259 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x260 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x261 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x262 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x263 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x264 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x265 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x266 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x267 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x269 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x270 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x289 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x309 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(None,None),initialize=0)
m.b322 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b323 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b324 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b325 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b326 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b327 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b328 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b329 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b330 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b331 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b332 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b333 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b334 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b335 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b336 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b337 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b338 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b339 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b340 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b341 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b342 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b343 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b344 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b345 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b346 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b347 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b348 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b349 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b350 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b351 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b352 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b353 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b354 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b355 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b356 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b357 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b358 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b359 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b360 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b361 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b362 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b363 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b364 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b365 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b366 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b367 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b368 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b369 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b370 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b371 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b372 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b373 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b374 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b375 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b376 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b377 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b378 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b379 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b380 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b381 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b382 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b383 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b384 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b385 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b386 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b387 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b388 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b389 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b390 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b391 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b392 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b393 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b394 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b395 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b396 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b397 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b398 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b399 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b400 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b401 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b402 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b403 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b404 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b405 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b406 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b407 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b408 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b409 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b410 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b411 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b412 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b413 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b414 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b415 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b416 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b417 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b418 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b419 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b420 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b421 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b422 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b423 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b424 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b425 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b426 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b427 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b428 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b429 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b430 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b431 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b432 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b433 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b434 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b435 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b436 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b437 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b438 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b439 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b440 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b441 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b442 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b443 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b444 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b445 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b446 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b447 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b448 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b449 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b450 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b451 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b452 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b453 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b454 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b455 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b456 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b457 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr=-(-(0.00641025641025641*m.x6)**2 - (0.00641025641025641*m.x7)**2 - (0.00641025641025641*m.x8)**2
- (0.00641025641025641*m.x9)**2) - 0.0128205128205128*m.x2 - 0.0128205128205128*m.x3
- 0.0128205128205128*m.x4 - 0.0128205128205128*m.x5, sense=minimize)
m.c2 = Constraint(expr= m.b322 + m.b323 + m.b324 + m.b325 == 1)
m.c3 = Constraint(expr= m.b326 + m.b327 + m.b328 + m.b329 == 1)
m.c4 = Constraint(expr= m.b330 + m.b331 + m.b332 + m.b333 == 1)
m.c5 = Constraint(expr= m.b334 + m.b335 + m.b336 + m.b337 == 1)
m.c6 = Constraint(expr= m.b338 + m.b339 + m.b340 + m.b341 == 1)
m.c7 = Constraint(expr= m.b342 + m.b343 + m.b344 + m.b345 == 1)
m.c8 = Constraint(expr= m.b346 + m.b347 + m.b348 + m.b349 == 1)
m.c9 = Constraint(expr= m.b350 + m.b351 + m.b352 + m.b353 == 1)
m.c10 = Constraint(expr= m.b354 + m.b355 + m.b356 + m.b357 == 1)
m.c11 = Constraint(expr= m.b358 + m.b359 + m.b360 + m.b361 == 1)
m.c12 = Constraint(expr= m.b362 + m.b363 + m.b364 + m.b365 == 1)
m.c13 = Constraint(expr= m.b366 + m.b367 + m.b368 + m.b369 == 1)
m.c14 = Constraint(expr= m.b370 + m.b371 + m.b372 + m.b373 == 1)
m.c15 = Constraint(expr= m.b374 + m.b375 + m.b376 + m.b377 == 1)
m.c16 = Constraint(expr= m.b378 + m.b379 + m.b380 + m.b381 == 1)
m.c17 = Constraint(expr= m.b382 + m.b383 + m.b384 + m.b385 == 1)
m.c18 = Constraint(expr= m.b386 + m.b387 + m.b388 + m.b389 == 1)
m.c19 = Constraint(expr= m.b390 + m.b391 + m.b392 + m.b393 == 1)
m.c20 = Constraint(expr= m.b394 + m.b395 + m.b396 + m.b397 == 1)
m.c21 = Constraint(expr= m.b398 + m.b399 + m.b400 + m.b401 == 1)
m.c22 = Constraint(expr= m.b402 + m.b403 + m.b404 + m.b405 == 1)
m.c23 = Constraint(expr= m.b406 + m.b407 + m.b408 + m.b409 == 1)
m.c24 = Constraint(expr= m.b410 + m.b411 + m.b412 + m.b413 == 1)
m.c25 = Constraint(expr= m.b414 + m.b415 + m.b416 + m.b417 == 1)
m.c26 = Constraint(expr= m.b418 + m.b419 + m.b420 + m.b421 == 1)
m.c27 = Constraint(expr= m.b422 + m.b423 + m.b424 + m.b425 == 1)
m.c28 = Constraint(expr= m.b426 + m.b427 + m.b428 + m.b429 == 1)
m.c29 = Constraint(expr= m.b430 + m.b431 + m.b432 + m.b433 == 1)
m.c30 = Constraint(expr= m.b434 + m.b435 + m.b436 + m.b437 == 1)
m.c31 = Constraint(expr= m.b438 + m.b439 + m.b440 + m.b441 == 1)
m.c32 = Constraint(expr= m.b442 + m.b443 + m.b444 + m.b445 == 1)
m.c33 = Constraint(expr= m.b446 + m.b447 + m.b448 + m.b449 == 1)
m.c34 = Constraint(expr= m.b450 + m.b451 + m.b452 + m.b453 == 1)
m.c35 = Constraint(expr= m.b454 + m.b455 + m.b456 + m.b457 == 1)
m.c36 = Constraint(expr= m.x10 - m.b322 <= 0)
m.c37 = Constraint(expr= m.x11 - m.b323 <= 0)
m.c38 = Constraint(expr= m.x12 - m.b324 <= 0)
m.c39 = Constraint(expr= m.x13 - m.b325 <= 0)
m.c40 = Constraint(expr= m.x14 - m.b322 <= 0)
m.c41 = Constraint(expr= m.x15 - m.b323 <= 0)
m.c42 = Constraint(expr= m.x16 - m.b324 <= 0)
m.c43 = Constraint(expr= m.x17 - m.b325 <= 0)
m.c44 = Constraint(expr= m.x18 - m.b322 <= 0)
m.c45 = Constraint(expr= m.x19 - m.b323 <= 0)
m.c46 = Constraint(expr= m.x20 - m.b324 <= 0)
m.c47 = Constraint(expr= m.x21 - m.b325 <= 0)
m.c48 = Constraint(expr= m.x22 - m.b322 <= 0)
m.c49 = Constraint(expr= m.x23 - m.b323 <= 0)
m.c50 = Constraint(expr= m.x24 - m.b324 <= 0)
m.c51 = Constraint(expr= m.x25 - m.b325 <= 0)
m.c52 = Constraint(expr= m.x26 - m.b322 <= 0)
m.c53 = Constraint(expr= m.x27 - m.b323 <= 0)
m.c54 = Constraint(expr= m.x28 - m.b324 <= 0)
m.c55 = Constraint(expr= m.x29 - m.b325 <= 0)
m.c56 = Constraint(expr= m.x30 - m.b322 <= 0)
m.c57 = Constraint(expr= m.x31 - m.b323 <= 0)
m.c58 = Constraint(expr= m.x32 - m.b324 <= 0)
m.c59 = Constraint(expr= m.x33 - m.b325 <= 0)
m.c60 = Constraint(expr= m.x34 - m.b322 <= 0)
m.c61 = Constraint(expr= m.x35 - m.b323 <= 0)
m.c62 = Constraint(expr= m.x36 - m.b324 <= 0)
m.c63 = Constraint(expr= m.x37 - m.b325 <= 0)
m.c64 = Constraint(expr= m.x38 - m.b322 <= 0)
m.c65 = Constraint(expr= m.x39 - m.b323 <= 0)
m.c66 = Constraint(expr= m.x40 - m.b324 <= 0)
m.c67 = Constraint(expr= m.x41 - m.b325 <= 0)
m.c68 = Constraint(expr= m.x42 - m.b322 <= 0)
m.c69 = Constraint(expr= m.x43 - m.b323 <= 0)
m.c70 = Constraint(expr= m.x44 - m.b324 <= 0)
m.c71 = Constraint(expr= m.x45 - m.b325 <= 0)
m.c72 = Constraint(expr= m.x46 - m.b322 <= 0)
m.c73 = Constraint(expr= m.x47 - m.b323 <= 0)
m.c74 = Constraint(expr= m.x48 - m.b324 <= 0)
m.c75 = Constraint(expr= m.x49 - m.b325 <= 0)
m.c76 = Constraint(expr= m.x50 - m.b322 <= 0)
m.c77 = Constraint(expr= m.x51 - m.b323 <= 0)
m.c78 = Constraint(expr= m.x52 - m.b324 <= 0)
m.c79 = Constraint(expr= m.x53 - m.b325 <= 0)
m.c80 = Constraint(expr= m.x54 - m.b322 <= 0)
m.c81 = Constraint(expr= m.x55 - m.b323 <= 0)
m.c82 = Constraint(expr= m.x56 - m.b324 <= 0)
m.c83 = Constraint(expr= m.x57 - m.b325 <= 0)
m.c84 = Constraint(expr= m.x58 - m.b322 <= 0)
m.c85 = Constraint(expr= m.x59 - m.b323 <= 0)
m.c86 = Constraint(expr= m.x60 - m.b324 <= 0)
m.c87 = Constraint(expr= m.x61 - m.b325 <= 0)
m.c88 = Constraint(expr= m.x62 - m.b322 <= 0)
m.c89 = Constraint(expr= m.x63 - m.b323 <= 0)
m.c90 = Constraint(expr= m.x64 - m.b324 <= 0)
m.c91 = Constraint(expr= m.x65 - m.b325 <= 0)
m.c92 = Constraint(expr= m.x66 - m.b322 <= 0)
m.c93 = Constraint(expr= m.x67 - m.b323 <= 0)
m.c94 = Constraint(expr= m.x68 - m.b324 <= 0)
m.c95 = Constraint(expr= m.x69 - m.b325 <= 0)
m.c96 = Constraint(expr= m.x70 - m.b322 <= 0)
m.c97 = Constraint(expr= m.x71 - m.b323 <= 0)
m.c98 = Constraint(expr= m.x72 - m.b324 <= 0)
m.c99 = Constraint(expr= m.x73 - m.b325 <= 0)
m.c100 = Constraint(expr= m.x74 - m.b322 <= 0)
m.c101 = Constraint(expr= m.x75 - m.b323 <= 0)
m.c102 = Constraint(expr= m.x76 - m.b324 <= 0)
m.c103 = Constraint(expr= m.x77 - m.b325 <= 0)
m.c104 = Constraint(expr= m.x78 - m.b330 <= 0)
m.c105 = Constraint(expr= m.x79 - m.b331 <= 0)
m.c106 = Constraint(expr= m.x80 - m.b332 <= 0)
m.c107 = Constraint(expr= m.x81 - m.b333 <= 0)
m.c108 = Constraint(expr= m.x82 - m.b330 <= 0)
m.c109 = Constraint(expr= m.x83 - m.b331 <= 0)
m.c110 = Constraint(expr= m.x84 - m.b332 <= 0)
m.c111 = Constraint(expr= m.x85 - m.b333 <= 0)
m.c112 = Constraint(expr= m.x86 - m.b330 <= 0)
m.c113 = Constraint(expr= m.x87 - m.b331 <= 0)
m.c114 = Constraint(expr= m.x88 - m.b332 <= 0)
m.c115 = Constraint(expr= m.x89 - m.b333 <= 0)
m.c116 = Constraint(expr= m.x90 - m.b330 <= 0)
m.c117 = Constraint(expr= m.x91 - m.b331 <= 0)
m.c118 = Constraint(expr= m.x92 - m.b332 <= 0)
m.c119 = Constraint(expr= m.x93 - m.b333 <= 0)
m.c120 = Constraint(expr= m.x94 - m.b330 <= 0)
m.c121 = Constraint(expr= m.x95 - m.b331 <= 0)
m.c122 = Constraint(expr= m.x96 - m.b332 <= 0)
m.c123 = Constraint(expr= m.x97 - m.b333 <= 0)
m.c124 = Constraint(expr= m.x98 - m.b330 <= 0)
m.c125 = Constraint(expr= m.x99 - m.b331 <= 0)
m.c126 = Constraint(expr= m.x100 - m.b332 <= 0)
m.c127 = Constraint(expr= m.x101 - m.b333 <= 0)
m.c128 = Constraint(expr= m.x102 - m.b330 <= 0)
m.c129 = Constraint(expr= m.x103 - m.b331 <= 0)
m.c130 = Constraint(expr= m.x104 - m.b332 <= 0)
m.c131 = Constraint(expr= m.x105 - m.b333 <= 0)
m.c132 = Constraint(expr= m.x106 - m.b330 <= 0)
m.c133 = Constraint(expr= m.x107 - m.b331 <= 0)
m.c134 = Constraint(expr= m.x108 - m.b332 <= 0)
m.c135 = Constraint(expr= m.x109 - m.b333 <= 0)
m.c136 = Constraint(expr= m.x110 - m.b330 <= 0)
m.c137 = Constraint(expr= m.x111 - m.b331 <= 0)
m.c138 = Constraint(expr= m.x112 - m.b332 <= 0)
m.c139 = Constraint(expr= m.x113 - m.b333 <= 0)
m.c140 = Constraint(expr= m.x114 - m.b330 <= 0)
m.c141 = Constraint(expr= m.x115 - m.b331 <= 0)
m.c142 = Constraint(expr= m.x116 - m.b332 <= 0)
m.c143 = Constraint(expr= m.x117 - m.b333 <= 0)
m.c144 = Constraint(expr= m.x118 - m.b330 <= 0)
m.c145 = Constraint(expr= m.x119 - m.b331 <= 0)
m.c146 = Constraint(expr= m.x120 - m.b332 <= 0)
m.c147 = Constraint(expr= m.x121 - m.b333 <= 0)
m.c148 = Constraint(expr= m.x122 - m.b334 <= 0)
m.c149 = Constraint(expr= m.x123 - m.b335 <= 0)
m.c150 = Constraint(expr= m.x124 - m.b336 <= 0)
m.c151 = Constraint(expr= m.x125 - m.b337 <= 0)
m.c152 = Constraint(expr= m.x126 - m.b334 <= 0)
m.c153 = Constraint(expr= m.x127 - m.b335 <= 0)
m.c154 = Constraint(expr= m.x128 - m.b336 <= 0)
m.c155 = Constraint(expr= m.x129 - m.b337 <= 0)
m.c156 = Constraint(expr= m.x130 - m.b338 <= 0)
m.c157 = Constraint(expr= m.x131 - m.b339 <= 0)
m.c158 = Constraint(expr= m.x132 - m.b340 <= 0)
m.c159 = Constraint(expr= m.x133 - m.b341 <= 0)
m.c160 = Constraint(expr= m.x134 - m.b342 <= 0)
m.c161 = Constraint(expr= m.x135 - m.b343 <= 0)
m.c162 = Constraint(expr= m.x136 - m.b344 <= 0)
m.c163 = Constraint(expr= m.x137 - m.b345 <= 0)
m.c164 = Constraint(expr= m.x138 - m.b342 <= 0)
m.c165 = Constraint(expr= m.x139 - m.b343 <= 0)
m.c166 = Constraint(expr= m.x140 - m.b344 <= 0)
m.c167 = Constraint(expr= m.x141 - m.b345 <= 0)
m.c168 = Constraint(expr= m.x142 - m.b342 <= 0)
m.c169 = Constraint(expr= m.x143 - m.b343 <= 0)
m.c170 = Constraint(expr= m.x144 - m.b344 <= 0)
m.c171 = Constraint(expr= m.x145 - m.b345 <= 0)
m.c172 = Constraint(expr= m.x146 - m.b346 <= 0)
m.c173 = Constraint(expr= m.x147 - m.b347 <= 0)
m.c174 = Constraint(expr= m.x148 - m.b348 <= 0)
m.c175 = Constraint(expr= m.x149 - m.b349 <= 0)
m.c176 = Constraint(expr= m.x150 - m.b346 <= 0)
m.c177 = Constraint(expr= m.x151 - m.b347 <= 0)
m.c178 = Constraint(expr= m.x152 - m.b348 <= 0)
m.c179 = Constraint(expr= m.x153 - m.b349 <= 0)
m.c180 = Constraint(expr= m.x154 - m.b346 <= 0)
m.c181 = Constraint(expr= m.x155 - m.b347 <= 0)
m.c182 = Constraint(expr= m.x156 - m.b348 <= 0)
m.c183 = Constraint(expr= m.x157 - m.b349 <= 0)
m.c184 = Constraint(expr= m.x158 - m.b346 <= 0)
m.c185 = Constraint(expr= m.x159 - m.b347 <= 0)
m.c186 = Constraint(expr= m.x160 - m.b348 <= 0)
m.c187 = Constraint(expr= m.x161 - m.b349 <= 0)
m.c188 = Constraint(expr= m.x162 - m.b350 <= 0)
m.c189 = Constraint(expr= m.x163 - m.b351 <= 0)
m.c190 = Constraint(expr= m.x164 - m.b352 <= 0)
m.c191 = Constraint(expr= m.x165 - m.b353 <= 0)
m.c192 = Constraint(expr= m.x166 - m.b350 <= 0)
m.c193 = Constraint(expr= m.x167 - m.b351 <= 0)
m.c194 = Constraint(expr= m.x168 - m.b352 <= 0)
m.c195 = Constraint(expr= m.x169 - m.b353 <= 0)
m.c196 = Constraint(expr= m.x170 - m.b354 <= 0)
m.c197 = Constraint(expr= m.x171 - m.b355 <= 0)
m.c198 = Constraint(expr= m.x172 - m.b356 <= 0)
m.c199 = Constraint(expr= m.x173 - m.b357 <= 0)
m.c200 = Constraint(expr= m.x174 - m.b354 <= 0)
m.c201 = Constraint(expr= m.x175 - m.b355 <= 0)
m.c202 = Constraint(expr= m.x176 - m.b356 <= 0)
m.c203 = Constraint(expr= m.x177 - m.b357 <= 0)
m.c204 = Constraint(expr= m.x178 - m.b354 <= 0)
m.c205 = Constraint(expr= m.x179 - m.b355 <= 0)
m.c206 = Constraint(expr= m.x180 - m.b356 <= 0)
m.c207 = Constraint(expr= m.x181 - m.b357 <= 0)
m.c208 = Constraint(expr= m.x182 - m.b354 <= 0)
m.c209 = Constraint(expr= m.x183 - m.b355 <= 0)
m.c210 = Constraint(expr= m.x184 - m.b356 <= 0)
m.c211 = Constraint(expr= m.x185 - m.b357 <= 0)
m.c212 = Constraint(expr= m.x186 - m.b362 <= 0)
m.c213 = Constraint(expr= m.x187 - m.b363 <= 0)
m.c214 = Constraint(expr= m.x188 - m.b364 <= 0)
m.c215 = Constraint(expr= m.x189 - m.b365 <= 0)
m.c216 = Constraint(expr= m.x190 - m.b366 <= 0)
m.c217 = Constraint(expr= m.x191 - m.b367 <= 0)
m.c218 = Constraint(expr= m.x192 - m.b368 <= 0)
m.c219 = Constraint(expr= m.x193 - m.b369 <= 0)
m.c220 = Constraint(expr= m.x194 - m.b366 <= 0)
m.c221 = Constraint(expr= m.x195 - m.b367 <= 0)
m.c222 = Constraint(expr= m.x196 - m.b368 <= 0)
m.c223 = Constraint(expr= m.x197 - m.b369 <= 0)
m.c224 = Constraint(expr= m.x198 - m.b366 <= 0)
m.c225 = Constraint(expr= m.x199 - m.b367 <= 0)
m.c226 = Constraint(expr= m.x200 - m.b368 <= 0)
m.c227 = Constraint(expr= m.x201 - m.b369 <= 0)
m.c228 = Constraint(expr= m.x202 - m.b370 <= 0)
m.c229 = Constraint(expr= m.x203 - m.b371 <= 0)
m.c230 = Constraint(expr= m.x204 - m.b372 <= 0)
m.c231 = Constraint(expr= m.x205 - m.b373 <= 0)
m.c232 = Constraint(expr= m.x206 - m.b370 <= 0)
m.c233 = Constraint(expr= m.x207 - m.b371 <= 0)
m.c234 = Constraint(expr= m.x208 - m.b372 <= 0)
m.c235 = Constraint(expr= m.x209 - m.b373 <= 0)
m.c236 = Constraint(expr= m.x210 - m.b370 <= 0)
m.c237 = Constraint(expr= m.x211 - m.b371 <= 0)
m.c238 = Constraint(expr= m.x212 - m.b372 <= 0)
m.c239 = Constraint(expr= m.x213 - m.b373 <= 0)
m.c240 = Constraint(expr= m.x214 - m.b370 <= 0)
m.c241 = Constraint(expr= m.x215 - m.b371 <= 0)
m.c242 = Constraint(expr= m.x216 - m.b372 <= 0)
m.c243 = Constraint(expr= m.x217 - m.b373 <= 0)
m.c244 = Constraint(expr= m.x218 - m.b374 <= 0)
m.c245 = Constraint(expr= m.x219 - m.b375 <= 0)
m.c246 = Constraint(expr= m.x220 - m.b376 <= 0)
m.c247 = Constraint(expr= m.x221 - m.b377 <= 0)
m.c248 = Constraint(expr= m.x222 - m.b374 <= 0)
m.c249 = Constraint(expr= m.x223 - m.b375 <= 0)
m.c250 = Constraint(expr= m.x224 - m.b376 <= 0)
m.c251 = Constraint(expr= m.x225 - m.b377 <= 0)
m.c252 = Constraint(expr= m.x226 - m.b374 <= 0)
m.c253 = Constraint(expr= m.x227 - m.b375 <= 0)
m.c254 = Constraint(expr= m.x228 - m.b376 <= 0)
m.c255 = Constraint(expr= m.x229 - m.b377 <= 0)
m.c256 = Constraint(expr= m.x230 - m.b378 <= 0)
m.c257 = Constraint(expr= m.x231 - m.b379 <= 0)
m.c258 = Constraint(expr= m.x232 - m.b380 <= 0)
m.c259 = Constraint(expr= m.x233 - m.b381 <= 0)
m.c260 = Constraint(expr= m.x234 - m.b378 <= 0)
m.c261 = Constraint(expr= m.x235 - m.b379 <= 0)
m.c262 = Constraint(expr= m.x236 - m.b380 <= 0)
m.c263 = Constraint(expr= m.x237 - m.b381 <= 0)
m.c264 = Constraint(expr= m.x238 - m.b382 <= 0)
m.c265 = Constraint(expr= m.x239 - m.b383 <= 0)
m.c266 = Constraint(expr= m.x240 - m.b384 <= 0)
m.c267 = Constraint(expr= m.x241 - m.b385 <= 0)
m.c268 = Constraint(expr= m.x242 - m.b382 <= 0)
m.c269 = Constraint(expr= m.x243 - m.b383 <= 0)
m.c270 = Constraint(expr= m.x244 - m.b384 <= 0)
m.c271 = Constraint(expr= m.x245 - m.b385 <= 0)
m.c272 = Constraint(expr= m.x246 - m.b386 <= 0)
m.c273 = Constraint(expr= m.x247 - m.b387 <= 0)
m.c274 = Constraint(expr= m.x248 - m.b388 <= 0)
m.c275 = Constraint(expr= m.x249 - m.b389 <= 0)
m.c276 = Constraint(expr= m.x250 - m.b390 <= 0)
m.c277 = Constraint(expr= m.x251 - m.b391 <= 0)
m.c278 = Constraint(expr= m.x252 - m.b392 <= 0)
m.c279 = Constraint(expr= m.x253 - m.b393 <= 0)
m.c280 = Constraint(expr= m.x254 - m.b390 <= 0)
m.c281 = Constraint(expr= m.x255 - m.b391 <= 0)
m.c282 = Constraint(expr= m.x256 - m.b392 <= 0)
m.c283 = Constraint(expr= m.x257 - m.b393 <= 0)
m.c284 = Constraint(expr= m.x258 - m.b390 <= 0)
m.c285 = Constraint(expr= m.x259 - m.b391 <= 0)
m.c286 = Constraint(expr= m.x260 - m.b392 <= 0)
m.c287 = Constraint(expr= m.x261 - m.b393 <= 0)
m.c288 = Constraint(expr= m.x262 - m.b394 <= 0)
m.c289 = Constraint(expr= m.x263 - m.b395 <= 0)
m.c290 = Constraint(expr= m.x264 - m.b396 <= 0)
m.c291 = Constraint(expr= m.x265 - m.b397 <= 0)
m.c292 = Constraint(expr= m.x266 - m.b394 <= 0)
m.c293 = Constraint(expr= m.x267 - m.b395 <= 0)
m.c294 = Constraint(expr= m.x268 - m.b396 <= 0)
m.c295 = Constraint(expr= m.x269 - m.b397 <= 0)
m.c296 = Constraint(expr= m.x270 - m.b402 <= 0)
m.c297 = Constraint(expr= m.x271 - m.b403 <= 0)
m.c298 = Constraint(expr= m.x272 - m.b404 <= 0)
m.c299 = Constraint(expr= m.x273 - m.b405 <= 0)
m.c300 = Constraint(expr= m.x274 - m.b402 <= 0)
m.c301 = Constraint(expr= m.x275 - m.b403 <= 0)
m.c302 = Constraint(expr= m.x276 - m.b404 <= 0)
m.c303 = Constraint(expr= m.x277 - m.b405 <= 0)
m.c304 = Constraint(expr= m.x278 - m.b406 <= 0)
m.c305 = Constraint(expr= m.x279 - m.b407 <= 0)
m.c306 = Constraint(expr= m.x280 - m.b408 <= 0)
m.c307 = Constraint(expr= m.x281 - m.b409 <= 0)
m.c308 = Constraint(expr= m.x282 - m.b410 <= 0)
m.c309 = Constraint(expr= m.x283 - m.b411 <= 0)
m.c310 = Constraint(expr= m.x284 - m.b412 <= 0)
m.c311 = Constraint(expr= m.x285 - m.b413 <= 0)
m.c312 = Constraint(expr= m.x286 - m.b414 <= 0)
m.c313 = Constraint(expr= m.x287 - m.b415 <= 0)
m.c314 = Constraint(expr= m.x288 - m.b416 <= 0)
m.c315 = Constraint(expr= m.x289 - m.b417 <= 0)
m.c316 = Constraint(expr= m.x290 - m.b414 <= 0)
m.c317 = Constraint(expr= m.x291 - m.b415 <= 0)
m.c318 = Constraint(expr= m.x292 - m.b416 <= 0)
m.c319 = Constraint(expr= m.x293 - m.b417 <= 0)
m.c320 = Constraint(expr= m.x294 - m.b426 <= 0)
m.c321 = Constraint(expr= m.x295 - m.b427 <= 0)
m.c322 = Constraint(expr= m.x296 - m.b428 <= 0)
m.c323 = Constraint(expr= m.x297 - m.b429 <= 0)
m.c324 = Constraint(expr= m.x298 - m.b426 <= 0)
m.c325 = Constraint(expr= m.x299 - m.b427 <= 0)
m.c326 = Constraint(expr= m.x300 - m.b428 <= 0)
m.c327 = Constraint(expr= m.x301 - m.b429 <= 0)
m.c328 = Constraint(expr= m.x302 - m.b430 <= 0)
m.c329 = Constraint(expr= m.x303 - m.b431 <= 0)
m.c330 = Constraint(expr= m.x304 - m.b432 <= 0)
m.c331 = Constraint(expr= m.x305 - m.b433 <= 0)
m.c332 = Constraint(expr= m.x306 - m.b430 <= 0)
m.c333 = Constraint(expr= m.x307 - m.b431 <= 0)
m.c334 = Constraint(expr= m.x308 - m.b432 <= 0)
m.c335 = Constraint(expr= m.x309 - m.b433 <= 0)
m.c336 = Constraint(expr= m.x310 - m.b442 <= 0)
m.c337 = Constraint(expr= m.x311 - m.b443 <= 0)
m.c338 = Constraint(expr= m.x312 - m.b444 <= 0)
m.c339 = Constraint(expr= m.x313 - m.b445 <= 0)
m.c340 = Constraint(expr= m.x314 - m.b442 <= 0)
m.c341 = Constraint(expr= m.x315 - m.b443 <= 0)
m.c342 = Constraint(expr= m.x316 - m.b444 <= 0)
m.c343 = Constraint(expr= m.x317 - m.b445 <= 0)
m.c344 = Constraint(expr= m.x318 - m.b454 <= 0)
m.c345 = Constraint(expr= m.x319 - m.b455 <= 0)
m.c346 = Constraint(expr= m.x320 - m.b456 <= 0)
m.c347 = Constraint(expr= m.x321 - m.b457 <= 0)
m.c348 = Constraint(expr= m.x10 - m.b330 <= 0)
m.c349 = Constraint(expr= m.x11 - m.b331 <= 0)
m.c350 = Constraint(expr= m.x12 - m.b332 <= 0)
m.c351 = Constraint(expr= m.x13 - m.b333 <= 0)
m.c352 = Constraint(expr= m.x14 - m.b346 <= 0)
m.c353 = Constraint(expr= m.x15 - m.b347 <= 0)
m.c354 = Constraint(expr= m.x16 - m.b348 <= 0)
m.c355 = Constraint(expr= m.x17 - m.b349 <= 0)
m.c356 = Constraint(expr= m.x18 - m.b350 <= 0)
m.c357 = Constraint(expr= m.x19 - m.b351 <= 0)
m.c358 = Constraint(expr= m.x20 - m.b352 <= 0)
m.c359 = Constraint(expr= m.x21 - m.b353 <= 0)
m.c360 = Constraint(expr= m.x22 - m.b354 <= 0)
m.c361 = Constraint(expr= m.x23 - m.b355 <= 0)
m.c362 = Constraint(expr= m.x24 - m.b356 <= 0)
m.c363 = Constraint(expr= m.x25 - m.b357 <= 0)
m.c364 = Constraint(expr= m.x26 - m.b358 <= 0)
m.c365 = Constraint(expr= m.x27 - m.b359 <= 0)
m.c366 = Constraint(expr= m.x28 - m.b360 <= 0)
m.c367 = Constraint(expr= m.x29 - m.b361 <= 0)
m.c368 = Constraint(expr= m.x30 - m.b374 <= 0)
m.c369 = Constraint(expr= m.x31 - m.b375 <= 0)
m.c370 = Constraint(expr= m.x32 - m.b376 <= 0)
m.c371 = Constraint(expr= m.x33 - m.b377 <= 0)
m.c372 = Constraint(expr= m.x34 - m.b378 <= 0)
m.c373 = Constraint(expr= m.x35 - m.b379 <= 0)
m.c374 = Constraint(expr= m.x36 - m.b380 <= 0)
m.c375 = Constraint(expr= m.x37 - m.b381 <= 0)
m.c376 = Constraint(expr= m.x38 - m.b382 <= 0)
m.c377 = Constraint(expr= m.x39 - m.b383 <= 0)
m.c378 = Constraint(expr= m.x40 - m.b384 <= 0)
m.c379 = Constraint(expr= m.x41 - m.b385 <= 0)
m.c380 = Constraint(expr= m.x42 - m.b394 <= 0)
m.c381 = Constraint(expr= m.x43 - m.b395 <= 0)
m.c382 = Constraint(expr= m.x44 - m.b396 <= 0)
m.c383 = Constraint(expr= m.x45 - m.b397 <= 0)
m.c384 = Constraint(expr= m.x46 - m.b406 <= 0)
m.c385 = Constraint(expr= m.x47 - m.b407 <= 0)
m.c386 = Constraint(expr= m.x48 - m.b408 <= 0)
m.c387 = Constraint(expr= m.x49 - m.b409 <= 0)
m.c388 = Constraint(expr= m.x50 - m.b410 <= 0)
m.c389 = Constraint(expr= m.x51 - m.b411 <= 0)
m.c390 = Constraint(expr= m.x52 - m.b412 <= 0)
m.c391 = Constraint(expr= m.x53 - m.b413 <= 0)
m.c392 = Constraint(expr= m.x54 - m.b418 <= 0)
m.c393 = Constraint(expr= m.x55 - m.b419 <= 0)
m.c394 = Constraint(expr= m.x56 - m.b420 <= 0)
m.c395 = Constraint(expr= m.x57 - m.b421 <= 0)
m.c396 = Constraint(expr= m.x58 - m.b422 <= 0)
m.c397 = Constraint(expr= m.x59 - m.b423 <= 0)
m.c398 = Constraint(expr= m.x60 - m.b424 <= 0)
m.c399 = Constraint(expr= m.x61 - m.b425 <= 0)
m.c400 = Constraint(expr= m.x62 - m.b434 <= 0)
m.c401 = Constraint(expr= m.x63 - m.b435 <= 0)
m.c402 = Constraint(expr= m.x64 - m.b436 <= 0)
m.c403 = Constraint(expr= m.x65 - m.b437 <= 0)
m.c404 = Constraint(expr= m.x66 - m.b438 <= 0)
m.c405 = Constraint(expr= m.x67 - m.b439 <= 0)
m.c406 = Constraint(expr= m.x68 - m.b440 <= 0)
m.c407 = Constraint(expr= m.x69 - m.b441 <= 0)
m.c408 = Constraint(expr= m.x70 - m.b446 <= 0)
m.c409 = Constraint(expr= m.x71 - m.b447 <= 0)
m.c410 = Constraint(expr= m.x72 - m.b448 <= 0)
m.c411 = Constraint(expr= m.x73 - m.b449 <= 0)
m.c412 = Constraint(expr= m.x74 - m.b450 <= 0)
m.c413 = Constraint(expr= m.x75 - m.b451 <= 0)
m.c414 = Constraint(expr= m.x76 - m.b452 <= 0)
m.c415 = Constraint(expr= m.x77 - m.b453 <= 0)
m.c416 = Constraint(expr= m.x78 - m.b334 <= 0)
m.c417 = Constraint(expr= m.x79 - m.b335 <= 0)
m.c418 = Constraint(expr= m.x80 - m.b336 <= 0)
m.c419 = Constraint(expr= m.x81 - m.b337 <= 0)
m.c420 = Constraint(expr= m.x82 - m.b346 <= 0)
m.c421 = Constraint(expr= m.x83 - m.b347 <= 0)
m.c422 = Constraint(expr= m.x84 - m.b348 <= 0)
m.c423 = Constraint(expr= m.x85 - m.b349 <= 0)
m.c424 = Constraint(expr= m.x86 - m.b350 <= 0)
m.c425 = Constraint(expr= m.x87 - m.b351 <= 0)
m.c426 = Constraint(expr= m.x88 - m.b352 <= 0)
m.c427 = Constraint(expr= m.x89 - m.b353 <= 0)
m.c428 = Constraint(expr= m.x90 - m.b358 <= 0)
m.c429 = Constraint(expr= m.x91 - m.b359 <= 0)
m.c430 = Constraint(expr= m.x92 - m.b360 <= 0)
m.c431 = Constraint(expr= m.x93 - m.b361 <= 0)
m.c432 = Constraint(expr= m.x94 - m.b378 <= 0)
m.c433 = Constraint(expr= m.x95 - m.b379 <= 0)
m.c434 = Constraint(expr= m.x96 - m.b380 <= 0)
m.c435 = Constraint(expr= m.x97 - m.b381 <= 0)
m.c436 = Constraint(expr= m.x98 - m.b382 <= 0)
m.c437 = Constraint(expr= m.x99 - m.b383 <= 0)
m.c438 = Constraint(expr= m.x100 - m.b384 <= 0)
m.c439 = Constraint(expr= m.x101 - m.b385 <= 0)
m.c440 = Constraint(expr= m.x102 - m.b418 <= 0)
m.c441 = Constraint(expr= m.x103 - m.b419 <= 0)
m.c442 = Constraint(expr= m.x104 - m.b420 <= 0)
m.c443 = Constraint(expr= m.x105 - m.b421 <= 0)
m.c444 = Constraint(expr= m.x106 - m.b422 <= 0)
m.c445 = Constraint(expr= m.x107 - m.b423 <= 0)
m.c446 = Constraint(expr= m.x108 - m.b424 <= 0)
m.c447 = Constraint(expr= m.x109 - m.b425 <= 0)
m.c448 = Constraint(expr= m.x110 - m.b434 <= 0)
m.c449 = Constraint(expr= m.x111 - m.b435 <= 0)
m.c450 = Constraint(expr= m.x112 - m.b436 <= 0)
m.c451 = Constraint(expr= m.x113 - m.b437 <= 0)
m.c452 = Constraint(expr= m.x114 - m.b438 <= 0)
m.c453 = Constraint(expr= m.x115 - m.b439 <= 0)
m.c454 = Constraint(expr= m.x116 - m.b440 <= 0)
m.c455 = Constraint(expr= m.x117 - m.b441 <= 0)
m.c456 = Constraint(expr= m.x118 - m.b446 <= 0)
m.c457 = Constraint(expr= m.x119 - m.b447 <= 0)
m.c458 = Constraint(expr= m.x120 - m.b448 <= 0)
m.c459 = Constraint(expr= m.x121 - m.b449 <= 0)
m.c460 = Constraint(expr= m.x122 - m.b326 <= 0)
m.c461 = Constraint(expr= m.x123 - m.b327 <= 0)
m.c462 = Constraint(expr= m.x124 - m.b328 <= 0)
m.c463 = Constraint(expr= m.x125 - m.b329 <= 0)
m.c464 = Constraint(expr= m.x126 - m.b338 <= 0)
m.c465 = Constraint(expr= m.x127 - m.b339 <= 0)
m.c466 = Constraint(expr= m.x128 - m.b340 <= 0)
m.c467 = Constraint(expr= m.x129 - m.b341 <= 0)
m.c468 = Constraint(expr= m.x130 - m.b326 <= 0)
m.c469 = Constraint(expr= m.x131 - m.b327 <= 0)
m.c470 = Constraint(expr= m.x132 - m.b328 <= 0)
m.c471 = Constraint(expr= m.x133 - m.b329 <= 0)
m.c472 = Constraint(expr= m.x134 - m.b326 <= 0)
m.c473 = Constraint(expr= m.x135 - m.b327 <= 0)
m.c474 = Constraint(expr= m.x136 - m.b328 <= 0)
m.c475 = Constraint(expr= m.x137 - m.b329 <= 0)
m.c476 = Constraint(expr= m.x138 - m.b334 <= 0)
m.c477 = Constraint(expr= m.x139 - m.b335 <= 0)
m.c478 = Constraint(expr= m.x140 - m.b336 <= 0)
m.c479 = Constraint(expr= m.x141 - m.b337 <= 0)
m.c480 = Constraint(expr= m.x142 - m.b338 <= 0)
m.c481 = Constraint(expr= m.x143 - m.b339 <= 0)
m.c482 = Constraint(expr= m.x144 - m.b340 <= 0)
m.c483 = Constraint(expr= m.x145 - m.b341 <= 0)
m.c484 = Constraint(expr= m.x146 - m.b326 <= 0)
m.c485 = Constraint(expr= m.x147 - m.b327 <= 0)
m.c486 = Constraint(expr= m.x148 - m.b328 <= 0)
m.c487 = Constraint(expr= m.x149 - m.b329 <= 0)
m.c488 = Constraint(expr= m.x150 - m.b398 <= 0)
m.c489 = Constraint(expr= m.x151 - m.b399 <= 0)
m.c490 = Constraint(expr= m.x152 - m.b400 <= 0)
m.c491 = Constraint(expr= m.x153 - m.b401 <= 0)
m.c492 = Constraint(expr= m.x154 - m.b402 <= 0)
m.c493 = Constraint(expr= m.x155 - m.b403 <= 0)
m.c494 = Constraint(expr= m.x156 - m.b404 <= 0)
m.c495 = Constraint(expr= m.x157 - m.b405 <= 0)
m.c496 = Constraint(expr= m.x158 - m.b406 <= 0)
m.c497 = Constraint(expr= m.x159 - m.b407 <= 0)
m.c498 = Constraint(expr= m.x160 - m.b408 <= 0)
m.c499 = Constraint(expr= m.x161 - m.b409 <= 0)
m.c500 = Constraint(expr= m.x162 - m.b326 <= 0)
m.c501 = Constraint(expr= m.x163 - m.b327 <= 0)
m.c502 = Constraint(expr= m.x164 - m.b328 <= 0)
m.c503 = Constraint(expr= m.x165 - m.b329 <= 0)
m.c504 = Constraint(expr= m.x166 - m.b334 <= 0)
m.c505 = Constraint(expr= m.x167 - m.b335 <= 0)
m.c506 = Constraint(expr= m.x168 - m.b336 <= 0)
m.c507 = Constraint(expr= m.x169 - m.b337 <= 0)
m.c508 = Constraint(expr= m.x170 - m.b326 <= 0)
m.c509 = Constraint(expr= m.x171 - m.b327 <= 0)
m.c510 = Constraint(expr= m.x172 - m.b328 <= 0)
m.c511 = Constraint(expr= m.x173 - m.b329 <= 0)
m.c512 = Constraint(expr= m.x174 - m.b334 <= 0)
m.c513 = Constraint(expr= m.x175 - m.b335 <= 0)
m.c514 = Constraint(expr= m.x176 - m.b336 <= 0)
m.c515 = Constraint(expr= m.x177 - m.b337 <= 0)
m.c516 = Constraint(expr= m.x178 - m.b338 <= 0)
m.c517 = Constraint(expr= m.x179 - m.b339 <= 0)
m.c518 = Constraint(expr= m.x180 - m.b340 <= 0)
m.c519 = Constraint(expr= m.x181 - m.b341 <= 0)
m.c520 = Constraint(expr= m.x182 - m.b342 <= 0)
m.c521 = Constraint(expr= m.x183 - m.b343 <= 0)
m.c522 = Constraint(expr= m.x184 - m.b344 <= 0)
m.c523 = Constraint(expr= m.x185 - m.b345 <= 0)
m.c524 = Constraint(expr= m.x186 - m.b326 <= 0)
m.c525 = Constraint(expr= m.x187 - m.b327 <= 0)
m.c526 = Constraint(expr= m.x188 - m.b328 <= 0)
m.c527 = Constraint(expr= m.x189 - m.b329 <= 0)
m.c528 = Constraint(expr= m.x190 - m.b326 <= 0)
m.c529 = Constraint(expr= m.x191 - m.b327 <= 0)
m.c530 = Constraint(expr= m.x192 - m.b328 <= 0)
m.c531 = Constraint(expr= m.x193 - m.b329 <= 0)
m.c532 = Constraint(expr= m.x194 - m.b362 <= 0)
m.c533 = Constraint(expr= m.x195 - m.b363 <= 0)
m.c534 = Constraint(expr= m.x196 - m.b364 <= 0)
m.c535 = Constraint(expr= m.x197 - m.b365 <= 0)
m.c536 = Constraint(expr= m.x198 - m.b386 <= 0)
m.c537 = Constraint(expr= m.x199 - m.b387 <= 0)
m.c538 = Constraint(expr= m.x200 - m.b388 <= 0)
m.c539 = Constraint(expr= m.x201 - m.b389 <= 0)
m.c540 = Constraint(expr= m.x202 - m.b326 <= 0)
m.c541 = Constraint(expr= m.x203 - m.b327 <= 0)
m.c542 = Constraint(expr= m.x204 - m.b328 <= 0)
m.c543 = Constraint(expr= m.x205 - m.b329 <= 0)
m.c544 = Constraint(expr= m.x206 - m.b334 <= 0)
m.c545 = Constraint(expr= m.x207 - m.b335 <= 0)
m.c546 = Constraint(expr= m.x208 - m.b336 <= 0)
m.c547 = Constraint(expr= m.x209 - m.b337 <= 0)
m.c548 = Constraint(expr= m.x210 - m.b338 <= 0)
m.c549 = Constraint(expr= m.x211 - m.b339 <= 0)
m.c550 = Constraint(expr= m.x212 - m.b340 <= 0)
m.c551 = Constraint(expr= m.x213 - m.b341 <= 0)
m.c552 = Constraint(expr= m.x214 - m.b342 <= 0)
m.c553 = Constraint(expr= m.x215 - m.b343 <= 0)
m.c554 = Constraint(expr= m.x216 - m.b344 <= 0)
m.c555 = Constraint(expr= m.x217 - m.b345 <= 0)
m.c556 = Constraint(expr= m.x218 - m.b334 <= 0)
m.c557 = Constraint(expr= m.x219 - m.b335 <= 0)
m.c558 = Constraint(expr= m.x220 - m.b336 <= 0)
m.c559 = Constraint(expr= m.x221 - m.b337 <= 0)
m.c560 = Constraint(expr= m.x222 - m.b358 <= 0)
m.c561 = Constraint(expr= m.x223 - m.b359 <= 0)
m.c562 = Constraint(expr= m.x224 - m.b360 <= 0)
m.c563 = Constraint(expr= m.x225 - m.b361 <= 0)
m.c564 = Constraint(expr= m.x226 - m.b398 <= 0)
m.c565 = Constraint(expr= m.x227 - m.b399 <= 0)
m.c566 = Constraint(expr= m.x228 - m.b400 <= 0)
m.c567 = Constraint(expr= m.x229 - m.b401 <= 0)
m.c568 = Constraint(expr= m.x230 - m.b358 <= 0)
m.c569 = Constraint(expr= m.x231 - m.b359 <= 0)
m.c570 = Constraint(expr= m.x232 - m.b360 <= 0)
m.c571 = Constraint(expr= m.x233 - m.b361 <= 0)
m.c572 = Constraint(expr= m.x234 - m.b450 <= 0)
m.c573 = Constraint(expr= m.x235 - m.b451 <= 0)
m.c574 = Constraint(expr= m.x236 - m.b452 <= 0)
m.c575 = Constraint(expr= m.x237 - m.b453 <= 0)
m.c576 = Constraint(expr= m.x238 - m.b338 <= 0)
m.c577 = Constraint(expr= m.x239 - m.b339 <= 0)
m.c578 = Constraint(expr= m.x240 - m.b340 <= 0)
m.c579 = Constraint(expr= m.x241 - m.b341 <= 0)
m.c580 = Constraint(expr= m.x242 - m.b350 <= 0)
m.c581 = Constraint(expr= m.x243 - m.b351 <= 0)
m.c582 = Constraint(expr= m.x244 - m.b352 <= 0)
m.c583 = Constraint(expr= m.x245 - m.b353 <= 0)
m.c584 = Constraint(expr= m.x246 - m.b326 <= 0)
m.c585 = Constraint(expr= m.x247 - m.b327 <= 0)
m.c586 = Constraint(expr= m.x248 - m.b328 <= 0)
m.c587 = Constraint(expr= m.x249 - m.b329 <= 0)
m.c588 = Constraint(expr= m.x250 - m.b326 <= 0)
m.c589 = Constraint(expr= m.x251 - m.b327 <= 0)
m.c590 = Constraint(expr= m.x252 - m.b328 <= 0)
m.c591 = Constraint(expr= m.x253 - m.b329 <= 0)
m.c592 = Constraint(expr= m.x254 - m.b362 <= 0)
m.c593 = Constraint(expr= m.x255 - m.b363 <= 0)
m.c594 = Constraint(expr= m.x256 - m.b364 <= 0)
m.c595 = Constraint(expr= m.x257 - m.b365 <= 0)
m.c596 = Constraint(expr= m.x258 - m.b386 <= 0)
m.c597 = Constraint(expr= m.x259 - m.b387 <= 0)
m.c598 = Constraint(expr= m.x260 - m.b388 <= 0)
m.c599 = Constraint(expr= m.x261 - m.b389 <= 0)
m.c600 = Constraint(expr= m.x262 - m.b326 <= 0)
m.c601 = Constraint(expr= m.x263 - m.b327 <= 0)
m.c602 = Constraint(expr= m.x264 - m.b328 <= 0)
m.c603 = Constraint(expr= m.x265 - m.b329 <= 0)
m.c604 = Constraint(expr= m.x266 - m.b338 <= 0)
m.c605 = Constraint(expr= m.x267 - m.b339 <= 0)
m.c606 = Constraint(expr= m.x268 - m.b340 <= 0)
m.c607 = Constraint(expr= m.x269 - m.b341 <= 0)
m.c608 = Constraint(expr= m.x270 - m.b358 <= 0)
m.c609 = Constraint(expr= m.x271 - m.b359 <= 0)
m.c610 = Constraint(expr= m.x272 - m.b360 <= 0)
m.c611 = Constraint(expr= m.x273 - m.b361 <= 0)
m.c612 = Constraint(expr= m.x274 - m.b398 <= 0)
m.c613 = Constraint(expr= m.x275 - m.b399 <= 0)
m.c614 = Constraint(expr= m.x276 - m.b400 <= 0)
m.c615 = Constraint(expr= m.x277 - m.b401 <= 0)
m.c616 = Constraint(expr= m.x278 - m.b334 <= 0)
m.c617 = Constraint(expr= m.x279 - m.b335 <= 0)
m.c618 = Constraint(expr= m.x280 - m.b336 <= 0)
m.c619 = Constraint(expr= m.x281 - m.b337 <= 0)
m.c620 = Constraint(expr= m.x282 - m.b334 <= 0)
m.c621 = Constraint(expr= m.x283 - m.b335 <= 0)
m.c622 = Constraint(expr= m.x284 - m.b336 <= 0)
m.c623 = Constraint(expr= m.x285 - m.b337 <= 0)
m.c624 = Constraint(expr= m.x286 - m.b326 <= 0)
m.c625 = Constraint(expr= m.x287 - m.b327 <= 0)
m.c626 = Constraint(expr= m.x288 - m.b328 <= 0)
m.c627 = Constraint(expr= m.x289 - m.b329 <= 0)
m.c628 = Constraint(expr= m.x290 - m.b342 <= 0)
m.c629 = Constraint(expr= m.x291 - m.b343 <= 0)
m.c630 = Constraint(expr= m.x292 - m.b344 <= 0)
m.c631 = Constraint(expr= m.x293 - m.b345 <= 0)
m.c632 = Constraint(expr= m.x294 - m.b362 <= 0)
m.c633 = Constraint(expr= m.x295 - m.b363 <= 0)
m.c634 = Constraint(expr= m.x296 - m.b364 <= 0)
m.c635 = Constraint(expr= m.x297 - m.b365 <= 0)
m.c636 = Constraint(expr= m.x298 - m.b366 <= 0)
m.c637 = Constraint(expr= m.x299 - m.b367 <= 0)
m.c638 = Constraint(expr= m.x300 - m.b368 <= 0)
m.c639 = Constraint(expr= m.x301 - m.b369 <= 0)
m.c640 = Constraint(expr= m.x302 - m.b326 <= 0)
m.c641 = Constraint(expr= m.x303 - m.b327 <= 0)
m.c642 = Constraint(expr= m.x304 - m.b328 <= 0)
m.c643 = Constraint(expr= m.x305 - m.b329 <= 0)
m.c644 = Constraint(expr= m.x306 - m.b338 <= 0)
m.c645 = Constraint(expr= m.x307 - m.b339 <= 0)
m.c646 = Constraint(expr= m.x308 - m.b340 <= 0)
m.c647 = Constraint(expr= m.x309 - m.b341 <= 0)
m.c648 = Constraint(expr= m.x310 - m.b326 <= 0)
m.c649 = Constraint(expr= m.x311 - m.b327 <= 0)
m.c650 = Constraint(expr= m.x312 - m.b328 <= 0)
m.c651 = Constraint(expr= m.x313 - m.b329 <= 0)
m.c652 = Constraint(expr= m.x314 - m.b338 <= 0)
m.c653 = Constraint(expr= m.x315 - m.b339 <= 0)
m.c654 = Constraint(expr= m.x316 - m.b340 <= 0)
m.c655 = Constraint(expr= m.x317 - m.b341 <= 0)
m.c656 = Constraint(expr= m.x318 - m.b326 <= 0)
m.c657 = Constraint(expr= m.x319 - m.b327 <= 0)
m.c658 = Constraint(expr= m.x320 - m.b328 <= 0)
m.c659 = Constraint(expr= m.x321 - m.b329 <= 0)
m.c660 = Constraint(expr= m.x2 - m.x10 - m.x14 - m.x18 - m.x22 - m.x26 - m.x30 - m.x34 - m.x38 - m.x42 - m.x46 - m.x50
- m.x54 - m.x58 - m.x62 - m.x66 - m.x70 - m.x74 - m.x78 - m.x82 - m.x86 - m.x90 - m.x94
- m.x98 - m.x102 - m.x106 - m.x110 - m.x114 - m.x118 - m.x122 - m.x126 - m.x130 - m.x134
- m.x138 - m.x142 - m.x146 - m.x150 - m.x154 - m.x158 - m.x162 - m.x166 - m.x170 - m.x174
- m.x178 - m.x182 - m.x186 - m.x190 - m.x194 - m.x198 - m.x202 - m.x206 - m.x210 - m.x214
- m.x218 - m.x222 - m.x226 - m.x230 - m.x234 - m.x238 - m.x242 - m.x246 - m.x250 - m.x254
- m.x258 - m.x262 - m.x266 - m.x270 - m.x274 - m.x278 - m.x282 - m.x286 - m.x290 - m.x294
- m.x298 - m.x302 - m.x306 - m.x310 - m.x314 - m.x318 == 0)
m.c661 = Constraint(expr= m.x3 - m.x11 - m.x15 - m.x19 - m.x23 - m.x27 - m.x31 - m.x35 - m.x39 - m.x43 - m.x47 - m.x51
- m.x55 - m.x59 - m.x63 - m.x67 - m.x71 - m.x75 - m.x79 - m.x83 - m.x87 - m.x91 - m.x95
- m.x99 - m.x103 - m.x107 - m.x111 - m.x115 - m.x119 - m.x123 - m.x127 - m.x131 - m.x135
- m.x139 - m.x143 - m.x147 - m.x151 - m.x155 - m.x159 - m.x163 - m.x167 - m.x171 - m.x175
- m.x179 - m.x183 - m.x187 - m.x191 - m.x195 - m.x199 - m.x203 - m.x207 - m.x211 - m.x215
- m.x219 - m.x223 - m.x227 - m.x231 - m.x235 - m.x239 - m.x243 - m.x247 - m.x251 - m.x255
- m.x259 - m.x263 - m.x267 - m.x271 - m.x275 - m.x279 - m.x283 - m.x287 - m.x291 - m.x295
- m.x299 - m.x303 - m.x307 - m.x311 - m.x315 - m.x319 == 0)
m.c662 = Constraint(expr= m.x4 - m.x12 - m.x16 - m.x20 - m.x24 - m.x28 - m.x32 - m.x36 - m.x40 - m.x44 - m.x48 - m.x52
- m.x56 - m.x60 - m.x64 - m.x68 - m.x72 - m.x76 - m.x80 - m.x84 - m.x88 - m.x92 - m.x96
- m.x100 - m.x104 - m.x108 - m.x112 - m.x116 - m.x120 - m.x124 - m.x128 - m.x132 - m.x136
- m.x140 - m.x144 - m.x148 - m.x152 - m.x156 - m.x160 - m.x164 - m.x168 - m.x172 - m.x176
- m.x180 - m.x184 - m.x188 - m.x192 - m.x196 - m.x200 - m.x204 - m.x208 - m.x212 - m.x216
- m.x220 - m.x224 - m.x228 - m.x232 - m.x236 - m.x240 - m.x244 - m.x248 - m.x252 - m.x256
- m.x260 - m.x264 - m.x268 - m.x272 - m.x276 - m.x280 - m.x284 - m.x288 - m.x292 - m.x296
- m.x300 - m.x304 - m.x308 - m.x312 - m.x316 - m.x320 == 0)
m.c663 = Constraint(expr= m.x5 - m.x13 - m.x17 - m.x21 - m.x25 - m.x29 - m.x33 - m.x37 - m.x41 - m.x45 - m.x49 - m.x53
- m.x57 - m.x61 - m.x65 - m.x69 - m.x73 - m.x77 - m.x81 - m.x85 - m.x89 - m.x93 - m.x97
- m.x101 - m.x105 - m.x109 - m.x113 - m.x117 - m.x121 - m.x125 - m.x129 - m.x133 - m.x137
- m.x141 - m.x145 - m.x149 - m.x153 - m.x157 - m.x161 - m.x165 - m.x169 - m.x173 - m.x177
- m.x181 - m.x185 - m.x189 - m.x193 - m.x197 - m.x201 - m.x205 - m.x209 - m.x213 - m.x217
- m.x221 - m.x225 - m.x229 - m.x233 - m.x237 - m.x241 - m.x245 - m.x249 - m.x253 - m.x257
- m.x261 - m.x265 - m.x269 - m.x273 - m.x277 - m.x281 - m.x285 - m.x289 - m.x293 - m.x297
- m.x301 - m.x305 - m.x309 - m.x313 - m.x317 - m.x321 == 0)
m.c664 = Constraint(expr= m.x6 - 17*m.b322 - 16*m.b326 - 12*m.b330 - 10*m.b334 - 9*m.b338 - 6*m.b342 - 6*m.b346
- 5*m.b350 - 5*m.b354 - 5*m.b358 - 4*m.b362 - 4*m.b366 - 4*m.b370 - 4*m.b374 - 4*m.b378
- 4*m.b382 - 3*m.b386 - 3*m.b390 - 3*m.b394 - 3*m.b398 - 3*m.b402 - 3*m.b406 - 2*m.b410
- 2*m.b414 - 2*m.b418 - 2*m.b422 - 2*m.b426 - 2*m.b430 - 2*m.b434 - 2*m.b438 - 2*m.b442
- 2*m.b446 - 2*m.b450 - m.b454 == 0)
m.c665 = Constraint(expr= m.x7 - 17*m.b323 - 16*m.b327 - 12*m.b331 - 10*m.b335 - 9*m.b339 - 6*m.b343 - 6*m.b347
- 5*m.b351 - 5*m.b355 - 5*m.b359 - 4*m.b363 - 4*m.b367 - 4*m.b371 - 4*m.b375 - 4*m.b379
- 4*m.b383 - 3*m.b387 - 3*m.b391 - 3*m.b395 - 3*m.b399 - 3*m.b403 - 3*m.b407 - 2*m.b411
- 2*m.b415 - 2*m.b419 - 2*m.b423 - 2*m.b427 - 2*m.b431 - 2*m.b435 - 2*m.b439 - 2*m.b443
- 2*m.b447 - 2*m.b451 - m.b455 == 0)
m.c666 = Constraint(expr= m.x8 - 17*m.b324 - 16*m.b328 - 12*m.b332 - 10*m.b336 - 9*m.b340 - 6*m.b344 - 6*m.b348
- 5*m.b352 - 5*m.b356 - 5*m.b360 - 4*m.b364 - 4*m.b368 - 4*m.b372 - 4*m.b376 - 4*m.b380
- 4*m.b384 - 3*m.b388 - 3*m.b392 - 3*m.b396 - 3*m.b400 - 3*m.b404 - 3*m.b408 - 2*m.b412
- 2*m.b416 - 2*m.b420 - 2*m.b424 - 2*m.b428 - 2*m.b432 - 2*m.b436 - 2*m.b440 - 2*m.b444
- 2*m.b448 - 2*m.b452 - m.b456 == 0)
m.c667 = Constraint(expr= m.x9 - 17*m.b325 - 16*m.b329 - 12*m.b333 - 10*m.b337 - 9*m.b341 - 6*m.b345 - 6*m.b349
- 5*m.b353 - 5*m.b357 - 5*m.b361 - 4*m.b365 - 4*m.b369 - 4*m.b373 - 4*m.b377 - 4*m.b381
- 4*m.b385 - 3*m.b389 - 3*m.b393 - 3*m.b397 - 3*m.b401 - 3*m.b405 - 3*m.b409 - 2*m.b413
- 2*m.b417 - 2*m.b421 - 2*m.b425 - 2*m.b429 - 2*m.b433 - 2*m.b437 - 2*m.b441 - 2*m.b445
- 2*m.b449 - 2*m.b453 - m.b457 == 0)
| [
"feligongcity17@gmail.com"
] | feligongcity17@gmail.com |
4c60d476694d7b1b3c584e6ad662291bfd8aec0d | 520a65eac9248e200acfc8bf34a9165f5bf81b4d | /Node5040.py | a04456947ad479809a4c9f71580879252cc9a0d0 | [] | no_license | DeepakMehta000001/Gossip-Protocol | a425da7b62d9b7b01de8c975ee355d667b1231b6 | dffc0f27b6d43969ccca79bfd8090aa152c01b80 | refs/heads/master | 2021-01-10T18:00:49.854722 | 2016-04-25T08:03:08 | 2016-04-25T08:03:08 | 55,046,030 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | # import the GossipNode class
from Gossip import GossipNode
# port for this node
port = 5040
# ports for the nodes connected to this node
connected_nodes = [5010]
node = GossipNode(port, connected_nodes)
| [
"arundeepak92@gmail.com"
] | arundeepak92@gmail.com |
b717d72194a54b62e91551fe2539bd1154cafe3f | 0f9eba12953fd01890d21dc9a0ded59db762e61c | /Evenimente/Domain/Exceptions.py | e97029937b79c77dac4a181245fdf9115ca2661e | [] | no_license | alexvasiu/FP-Labs | e347195299ec38357aac2bce3efd7957ac0fb988 | 57c96f9c9c83a63c57f6c2d003ac0409dd8f068f | refs/heads/master | 2021-01-19T15:32:47.589088 | 2017-01-24T22:05:10 | 2017-01-24T22:05:10 | 79,826,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 944 | py | """
Modul pentru exceptii
"""
class EventException(Exception):
"""
Exceptii pentru Events
"""
def __init__(self, errors):
"""
constructor
"""
self.errors = errors
def getErrors(self):
"""
getter pentru errori
"""
return self.errors
class PersonException(Exception):
"""
Exceptii pentru Persons
"""
def __init__(self, errors):
"""
constructor
"""
self.errors = errors
def getErrors(self):
"""
getter pentru errori
"""
return self.errors
class DatabaseException(Exception):
"""
Exceptii pentru baza de date
"""
def __init__(self, errors):
"""
constructor
"""
self.errors = errors
def getErrors(self):
"""
getter pentru errori
"""
return self.errors | [
"alexandru.vasiu@cnglsibiu.ro"
] | alexandru.vasiu@cnglsibiu.ro |
eea176a62a5d8b1c0b1802bdc97c05245818753d | 09621e26041cdb8c362d9de1dedf9a6476db7230 | /theme.py | b88d84193931ccb4578a15936f26ed362e2a921c | [
"MIT"
] | permissive | havocesp/Serial-Sequence-Creator | b664019cd689e56afbf73dee32fe182172290446 | cf468a3db777d6b4348fd53d1daa8432f6889f11 | refs/heads/master | 2020-04-03T13:07:36.577686 | 2016-12-29T10:04:58 | 2016-12-29T10:04:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,115 | py | #---------------------------------------------------------------------
# This file was generated by wxImage Embedding Tool
from wx.lib.embeddedimage import PyEmbeddedImage
image_index = {}
image_catalog = {}
#---------------------------------------------------------------------
image_catalog["multimeter_png"] = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAABHNCSVQIC"
"AgIfAhkiAAAAUpJREFUaIHtml0OgjAMgDumdzDchMv54OW4CfEOBvEJs5"
"Cutlv3U8P3SJewb2tXBJ0bPFhmaD2BXE6B1lyo4PZet1oToXCDd7EYKQA"
"A8Ho+dGcj5Hq7k/H/TqGQXyuhDXfn2QIAAPOyJk1GyjTye5NIgHOzUBKL"
"UeNTUKmBeVmjE8Fi1Hgp5ovYvIC4BsIc3tOAKjosdrw2jf5bH9LUEgtgN"
"6BEsBhHnIv5FOpCIDV9ABT6AEA8p7FY7FrqsZotQNUEFdNCJKBRdNqwBV"
"o/VscQ7UCtJ1LJYqn0AU2kadrFMZqDyjEqBevKqTTZAc00rC4wjT6562J"
"UTaHjxDUkqu2A5qqHFN+BvWBLHb9FBUqtekiSgKTZSMZWe5yu9X6Ig/lO"
"fAq05hRojXmBrDdzPSAS6PF3sfkUMi/gqL8aWPjMSgpYwHwKnQKt+QB52"
"IBgteZUVAAAAABJRU5ErkJggg=="
)
image_index[0] = "multimeter_png"
#---------------------------------------------------------------------
image_catalog["psu_png"] = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAABHNCSVQIC"
"AgIfAhkiAAAAYZJREFUaIHtmDFuwkAQRf+acIOI0gonoIiRUlHDEZCPkj"
"pHsXKEUFNFCik4gaMtUW6AsFPF2QR7vGObHTbaV5md0TKf/WMPVioawWc"
"i6QL6EgRIc0MFy+JUuiqEQkUj1RQjBQCA1vmw1TCJ4ykZ/98WMmn7JYbG"
"9uStBQDAYvHQqRgu2+2rdS5LQB2b4/7X5+V4Rsao/C4M0gPL8ayxkLoYl"
"c/F+yb2XkDvHgDOvd4W+7u2Oe6r/uBaK/SANFchoKt9gAv1gFmMTQ8A3Z"
"8HvQXUffH3GhUbCpYAziPeFdYCpMfqJlgnQE2kk8ktdru31qlV6xxJMsf"
"h8Enm2MLugabNk2QO4EcIlUPtwx3br+I22gfvBbAtZNrg0jk2KOrNXFmc"
"Sum7TxxPybcS3lsoCJAmCJAmCJAmCJBmkL+UNtRNmVrnKNb3Z+vR87v1v"
"s4EAECaptV1lmXV9ePdqrp++nhh7em9hbwX4NRCpm1MuLYxCeO0NK0n4L"
"CWRqgTIAX4gPcWCgKk+QLxZni7L2b6WQAAAABJRU5ErkJggg=="
)
image_index[1] = "psu_png"
#---------------------------------------------------------------------
image_catalog["ok_png"] = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAABHNCSVQIC"
"AgIfAhkiAAAAUNJREFUaIHtmkEOgjAQRV/RK7g18QLcxlN6Gy9gwpYzKK"
"5MCJHCdKadNuFvK3Hem0HaYAjdiZbTeRegzQHgnXNscfq8p1KFxBK6U1h"
"biwIADMPLthphrtdbdL35EToALNOPd/rxLrqmGoB54RKIKgCk1uepAmCZ"
"5+Wx+7PuABr7UAHAMhL74AygtQ+VdUBqHxwBLOxDRR1IsQ9OAFb2wQHgX"
"/Gp9qGCEdIUD4UBLEfnF9cOaO1DQYAc9sGxAxb2IQEg5dCRyz4IAVIOHd"
"Y/m8uoRijFrGXxIAT49+UxiJyj84u4A1KIrWu1SRqhPRAl7IPiHpB2Iod"
"9UN7EaxCl7IPBg2yP2Vz2wehJnLPArZhtJdYgcsOZ7oU8OmG+mZtDlADa"
"fMGRkpKdcD9SanMAeKd5gBD7q0ELr1mjAC2k+RE6ALzzBcHsWuTPimOXA"
"AAAAElFTkSuQmCC"
)
image_index[2] = "ok_png"
#---------------------------------------------------------------------
image_catalog["cancel_png"] = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAABHNCSVQIC"
"AgIfAhkiAAAAU5JREFUaIHtmcENgzAQBNeQIvJCShG0QbFpI0VE4kUT4H"
"x5wHH27WFQvF+LY0ec7cUOoWlxZzWlDVhVAUrrIQ3GZY5nGZEUmjbsjYk"
"AADCOX66bRHXdSxy/fQtVgNKqAKWVBDANPaahdzGSW1sNsC7OhrDUzm4h"
"FoS1jhrg+f7QX771/NZ7JCV9ASYEwzyQ0UIMCJZ5IHMOWCCY5gHDJM6BY"
"JsHjBtZCoSHeYCwE2sgvMwDpCghQXiaB4hZSNtOTPMAOcwdmWObBxzS6J"
"5JD/PAiXHaK8XSASSjHhBUAI1BNgQNYG/F8Uixa1EAjpZLTwgzgHat94I"
"wAaRuVB4Q1F9KzVrPhsgCsEYEJkQyACvfsCCSz4U0RrRiQNB/aM6ukX0q"
"wQxnlrpBuqWMyxyvcMEh3dD81+HuFVUBSutwEp/oZVfSJBYB7qDbt1AFK"
"K0fYD2cJd8ouioAAAAASUVORK5CYII="
)
image_index[3] = "cancel_png"
#---------------------------------------------------------------------
image_catalog["add"] = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAABHNCSVQIC"
"AgIfAhkiAAAAH9JREFUSIljZGRiZqAlYKKp6fSwgAXG+P/v739qGszIxM"
"yIYgEDAwPDo0f3qWK4nJwinD244sD+gxuD/Qc32llADhi1gCBgwSZIKCJ"
"xyR8U2IUhNjA+wOYSBgaEy3HJYwNDP5KHvgVYIxkXICVyYYDmPmCE1cm0"
"qnAYRyt9QgAAOtsYvy5X+iwAAAAASUVORK5CYII="
)
image_index[4] = "add"
#---------------------------------------------------------------------
def GetData(name):
return image_catalog[name].GetData()
def GetImage(name):
return image_catalog[name].GetImage()
def GetBitmap(name):
return image_catalog[name].GetBitmap()
def GetIcon(name):
return image_catalog[name].GetIcon()
| [
"swprojects@gmx.com"
] | swprojects@gmx.com |
130484c602be97cd2b6a18b574890ebccd326ccf | 1280b7f41ec000a89e54aa7c04f62356603699fd | /week1apple.py | 84474b2f2b821285e1e0c39b726d06e1e0b786aa | [] | no_license | 49paunilay/coursera-python-classes-and-instance | 857685a4a3252705b3f16fedf64b109d63dec5f8 | 954974e323116f29de62b72e40db5e071c45e3f4 | refs/heads/master | 2022-11-25T20:14:53.984593 | 2020-07-29T18:58:05 | 2020-07-29T18:58:05 | 283,578,478 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 439 | py | class AppleBasket():
def __init__(self,apple_color,apple_quantity):
self.apple_color=apple_color
self.apple_quantity=apple_quantity
def increase(self):
self.apple_quantity=self.apple_quantity+1
def __str__(self):
return 'A basket of {quantity} {color} apples.'.format(quantity=self.apple_quantity,color=self.apple_color)
#apple1=AppleBasket('red',1)
#apple1.increase()
#print(apple1) | [
"noreply@github.com"
] | noreply@github.com |
a76078e2a0d76cfe7b993a4ca91e75cee09e58ed | dd724bfa22bffdffbb39d660a47a23031afe732a | /test_concat_string.py | 36750fef5580e80ba445591d3632297897ba56e7 | [] | no_license | onika2332/python_blockchain | 6214744bc53808c2d2adf4393bfd67132bd5cd44 | 77cfbd6cc9207cc1d2f47c777f723e3c03369681 | refs/heads/main | 2023-04-12T03:53:01.879946 | 2021-05-15T10:25:46 | 2021-05-15T10:25:46 | 366,103,904 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 642 | py | # from neo4j import GraphDatabase as gr
# adds = str(0xd80956d9c0b6fa21c9eaa4d4cfd4742d9294356fc1b4f715e6048ef9dc760f14)
# print(adds, type(adds))
# add = "address:{}".format(adds)
# query4 = "create (n:{}".format("player100") + "{" + "{},".format(add) + "team: juventus})"
# print(query4)
# session = gr.driver(uri="bolt://localhost:7687",auth=("neo4j","3.14159265")).session()
# session.run(query4)
str = '''MATCH (w:Wallet), (token:NFT)
WHERE w.address = "{}" AND token.address = "{}"
CREATE (w)-[rel:OWN]->(token)'''.format(1234,5678)
print(str)
#relation ship : wallet transfer token to another wallet ( transfer have its properties) | [
"namnam@coder.com"
] | namnam@coder.com |
7d82bac171aa94528a9fcf2bed9c90d61a47539b | c1596db5f04fc3f1fc06620f513783bb467b2b11 | /resources/markov_port.py | a29ef1fe388c09c67a21635086037cccddd376c0 | [
"MIT"
] | permissive | amunger3/PySABr | c8079278e0b38bb6022c7709dbe2f2431ef2e269 | cddce670be6d4b6e2aa8abb61ae6d79ad0fed645 | refs/heads/master | 2023-02-01T11:14:52.409060 | 2020-12-22T19:34:41 | 2020-12-22T19:34:41 | 318,056,564 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,090 | py | from collections import Counter, namedtuple
from pprint import pprint
_bline_def = Counter(
{
'AB': 2074,
'H': 556,
'2B': 130,
'3B': 3,
'HR': 103,
'BB': 239,
'SO': 573
}
)
# Format: {event: {state: {outs: [0, 1, 2]}}
_xtrabase_def = {
'1B': {
'1B': [0.25, 0.25, 0.25],
'2B': [0.30, 0.50, 0.75]
},
'2B': {
'1B': [0.15, 0.25, 0.50]
},
'OUT': {
'1B': [0.05, 0.05, 0.00],
'2B': [0.25, 0.25, 0.00],
'3B': [0.50, 0.50, 0.00]
}
}
# Calculated Run Expectancies given a batting line
class RunExpCalc:
def __init__(self):
self.bl = _bline_def
self.xb = _xtrabase_def
self._outs_9 = 27.0
def calc_frqs(self, bline: Counter = _bline_def):
# Calc missing counting stats
bline['PA'] = bline['AB'] + bline['BB'];
bline['OBE'] = bline['H'] + bline['BB'];
bline['OUT'] = bline['PA'] - bline['OBE'];
bline['1B'] = bline['H'] - sum([bline['2B'], bline['3B'], bline['HR']]);
# Calc outcome frequencies
fq = {}
freq_keys = ['OBE', 'BB', '1B', '2B', '3B', 'HR', 'SO', 'OUT']
for fk in freq_keys:
fq[fk] = bline[fk] / bline['PA'] * 1.0;
fq['PA'] = (fq['OBE'] / fq['OUT']) * self._outs_9 + self._outs_9 # Clever
# Calc rate stats
rt = {
'BBO': 1 - fq['SO'] / fq['OUT'],
'SLG': sum(list(map(lambda x, y: x * y, [1, 2, 3, 4], [fq['1B'], fq['2B'], fq['3B'], fq['HR']]))) / (1 - fq['BB']),
'AVG': sum([fq['1B'], fq['2B'], fq['3B'], fq['HR']]) / (1 - fq['BB'])
}
return {'fqs': fq, 'rts': rt}
def re_engine(self):
# Chance of not scoring from each BaseOut state
fqrts = self.calc_frqs()
xb = self.xb
# Construct XScoreProb Matrix as a dictionary
ld_bsrn = ['3B', '2B', '1B']
n_outs = [2, 1, 0]
n_add_bsrn = {'3B': [2, 1, 0], '2B': [1, 0], '1B': [0]}
bsox = dict()
for lead in ld_bsrn:
bsox[lead] = dict()
for out in n_outs:
bsox[lead][out] = dict()
for add_bsrn in n_add_bsrn[lead]:
bsox[lead][out][add_bsrn] = None
# Markov Chaining -> bsox[lead][outs][add_bsrn]
# Short defs
fq1B = fqrts['fqs']['1B']
fq2B = fqrts['fqs']['2B']
fqBB = fqrts['fqs']['BB']
fqOUT = fqrts['fqs']['OUT']
rtBBO = fqrts['rts']['BBO']
# 3B - 2 outs
bsox['3B'][2][2] = fqOUT
bsox['3B'][2][1] = bsox['3B'][2][2] * fqBB + fqOUT
bsox['3B'][2][0] = bsox['3B'][2][1] * fqBB + fqOUT
# 3B - 1 out
bsox['3B'][1][2] = bsox['3B'][2][2] * fqOUT * (1 - rtBBO * xb['OUT']['3B'][1])
bsox['3B'][1][1] = bsox['3B'][1][2] * fqBB + bsox['3B'][2][1] * fqOUT * (1 - rtBBO * xb['OUT']['3B'][1])
bsox['3B'][1][0] = bsox['3B'][1][1] * fqBB + bsox['3B'][2][0] * fqOUT * (1 - rtBBO * xb['OUT']['3B'][1])
# 3B - 0 outs
bsox['3B'][0][2] = bsox['3B'][1][2] * fqOUT * (1 - rtBBO * xb['OUT']['3B'][0])
bsox['3B'][0][1] = bsox['3B'][0][2] * fqBB + bsox['3B'][1][1] * fqOUT * (1 - rtBBO * xb['OUT']['3B'][0])
bsox['3B'][0][0] = bsox['3B'][0][1] * fqBB + bsox['3B'][1][0] * fqOUT * (1 - rtBBO * xb['OUT']['3B'][0])
# 2B - 2 outs
bsox['2B'][2][1] = bsox['3B'][2][2] * (fqBB + fq1B * (1 - xb['1B']['2B'][2])) + fqOUT
bsox['2B'][2][0] = bsox['2B'][2][1] * fqBB + bsox['3B'][2][1] * fq1B * (1 - xb['1B']['2B'][2]) + fqOUT
# 2B - 1 out
bsox['2B'][1][1] = (
bsox['3B'][1][2] * (fqBB + fq1B * (1 - xb['1B']['2B'][1])) +
bsox['3B'][2][1] * fqOUT * rtBBO * xb['OUT']['2B'][1] +
bsox['2B'][2][1] * fqOUT * (1 - rtBBO * xb['OUT']['2B'][1])
)
bsox['2B'][1][0] = (
bsox['2B'][1][1] * fqBB +
bsox['3B'][1][1] * fq1B * (1 - xb['1B']['2B'][1]) +
bsox['3B'][2][0] * fqOUT * rtBBO * xb['OUT']['2B'][1] +
bsox['2B'][2][0] * fqOUT * (1 - rtBBO * xb['OUT']['2B'][1])
)
# 2B - 0 outs
bsox['2B'][0][1] = (
bsox['3B'][0][2] * (fqBB + fq1B * (1 - xb['1B']['2B'][0])) +
bsox['3B'][1][1] * fqOUT * rtBBO * xb['OUT']['2B'][0] +
bsox['2B'][1][1] * fqOUT * (1 - rtBBO * xb['OUT']['2B'][0])
)
bsox['2B'][0][0] = (
bsox['2B'][0][1] * fqBB +
bsox['3B'][0][1] * fq1B * (1 - xb['1B']['2B'][0]) +
bsox['3B'][1][0] * fqOUT * rtBBO * xb['OUT']['2B'][0] +
bsox['2B'][1][0] * fqOUT * (1 - rtBBO * xb['OUT']['2B'][0])
)
# 1B - 2 outs
bsox['1B'][2][0] = (
bsox['2B'][2][1] * (fqBB + fq1B * (1 - xb['1B']['1B'][2])) +
bsox['3B'][2][1] * (fq1B * (xb['1B']['1B'][2]) + fq2B * (1 - xb['2B']['1B'][2])) +
fqOUT
)
# 1B - 1 out
bsox['1B'][1][0] = (
bsox['2B'][1][1] * (fqBB + fq1B * (1 - xb['1B']['1B'][1])) +
bsox['3B'][1][1] * (fq1B * (xb['1B']['1B'][1]) + fq2B * (1 - xb['2B']['1B'][1])) +
bsox['2B'][2][0] * fqOUT * rtBBO * xb['OUT']['1B'][1] +
bsox['1B'][2][0] * fqOUT * (1 - rtBBO * xb['OUT']['1B'][1])
)
# 1B - 0 outs
bsox['1B'][0][0] = (
bsox['2B'][0][1] * (fqBB + fq1B * (1 - xb['1B']['1B'][0])) +
bsox['3B'][0][1] * (fq1B * (xb['1B']['1B'][0]) + fq2B * (1 - xb['2B']['1B'][0])) +
bsox['2B'][1][0] * fqOUT * rtBBO * xb['OUT']['1B'][0] +
bsox['1B'][1][0] * fqOUT * (1 - rtBBO * xb['OUT']['1B'][0])
)
return bsox
def chance_rho(self):
# Chance of scoring
bsox = self.re_engine()
chrun = {'3B': dict(), '2B': dict(), '1B': dict()}
# 0 outs (3 remain)
chrun['3B'][3] = 1 - (bsox['3B'][2][0] + bsox['3B'][1][0] + bsox['3B'][0][0]) / 3.0
chrun['2B'][3] = 1 - (bsox['2B'][2][0] + bsox['2B'][1][0] + bsox['2B'][0][0]) / 3.0
chrun['1B'][3] = 1 - (bsox['1B'][2][0] + bsox['1B'][1][0] + bsox['1B'][0][0]) / 3.0
# 1 out (2 remain)
chrun['3B'][2] = 1 - (bsox['3B'][2][0] + bsox['3B'][1][0]) / 3.0
chrun['2B'][2] = 1 - (bsox['2B'][2][0] + bsox['2B'][1][0]) / 3.0
chrun['1B'][2] = 1 - (bsox['1B'][2][0] + bsox['1B'][1][0]) / 3.0
# 2 outs (1 remains)
chrun['3B'][1] = 1 - (bsox['3B'][2][0]) / 3.0
chrun['2B'][1] = 1 - (bsox['2B'][2][0]) / 3.0
chrun['1B'][1] = 1 - (bsox['1B'][2][0]) / 3.0
return chrun
def mk_rexmat(self):
# Prereqs
fqrts = self.calc_frqs()
bsox = self.re_engine()
chrun = self.chance_rho()
# Defs
RexMat = []
# Times a runner scores from an outcome
rHR = fqrts['fqs']['HR']
r3B_3 = chrun['3B'][3] * fqrts['fqs']['3B']
r2B_3 = chrun['2B'][3] * fqrts['fqs']['2B']
r1B_3 = chrun['1B'][3] * (fqrts['fqs']['1B'] + fqrts['fqs']['BB'])
r3B_2 = chrun['3B'][2] * fqrts['fqs']['3B']
r2B_2 = chrun['2B'][2] * fqrts['fqs']['2B']
r1B_2 = chrun['1B'][2] * (fqrts['fqs']['1B'] + fqrts['fqs']['BB'])
r3B_1 = chrun['3B'][1] * fqrts['fqs']['3B']
r2B_1 = chrun['2B'][1] * fqrts['fqs']['2B']
r1B_1 = chrun['1B'][1] * (fqrts['fqs']['1B'] + fqrts['fqs']['BB'])
# Runs/Game
rALL = (rHR + r3B_3 + r2B_3 + r1B_3) * fqrts['fqs']['PA']
rpi_3 = (rHR + r3B_3 + r2B_3 + r1B_3) * fqrts['fqs']['PA'] / 9.0
rpi_2 = (rHR + r3B_2 + r2B_2 + r1B_2) * fqrts['fqs']['PA'] * 2.0 / 3.0 / 9.0
rpi_1 = (rHR + r3B_1 + r2B_1 + r1B_1) * fqrts['fqs']['PA'] * 1.0 / 3.0 / 9.0
RE_1xx_0 = (1 - bsox['1B'][0][0]) + rpi_3
RE_1xx_1 = (1 - bsox['1B'][1][0]) + rpi_2
RE_1xx_2 = (1 - bsox['1B'][2][0]) + rpi_1
return rALL
if __name__ == '__main__':
rec = RunExpCalc()
pprint(rec.calc_frqs())
pprint(rec.re_engine())
pprint(rec.chance_rho())
pprint(rec.mk_rexmat())
| [
"munger.alex@gmail.com"
] | munger.alex@gmail.com |
a245864e2d7964cfa9019d8f25c2383734f5d3b6 | 1e97c9db02a906b48c359b684d713ef93d78d5e0 | /src/pas/plugins/azure_ad/__init__.py | 9f5aeb36aed90aec59449058816bd6910142e5ed | [] | no_license | enfold/msgraph-pas | 8a483f5cabcb22a4cf413c830f72791ddce5c196 | 1dde602f95f0ab90daad5b6b4b186de8aa705f3a | refs/heads/master | 2020-06-04T00:32:16.425082 | 2016-01-14T19:48:23 | 2016-01-14T19:48:23 | 191,796,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 846 | py | # -*- coding: utf-8 -*-
from AccessControl.Permissions import add_user_folders
from Products.PluggableAuthService import registerMultiPlugin
from pas.plugins.azure_ad.plugin import AzureADPlugin
from pas.plugins.azure_ad.plugin import manage_addAzureADPlugin
from pas.plugins.azure_ad.plugin import manage_addAzureADPluginForm
from pas.plugins.azure_ad.plugin import zmidir
import os
PROJECTNAME = 'pas.plugins.azure_ad'
def initialize(context):
"""Initializer called when used as a Zope 2 product."""
registerMultiPlugin(AzureADPlugin.meta_type) # Add to PAS menu
context.registerClass(
AzureADPlugin,
permission=add_user_folders,
icon=os.path.join(zmidir, 'azure_ad.png'),
constructors=(manage_addAzureADPluginForm,
manage_addAzureADPlugin),
visibility=None
)
| [
"jpg@rosario.com"
] | jpg@rosario.com |
fda25defe3f21558072dc520a6d9cc3681593a30 | af19447f93a1657de8ca77aeb6c9597b23ddd5e7 | /python/005_wholikesit.py | 3eacd95833ff6d744a4eddcdae262c0457f9eb81 | [] | no_license | juanedflores/Code-Katas | 43ee4f8ed851c4a7ccd2edc737a2df4da166778b | a7c68ea37857f3489f6df86c158c315ccb68000e | refs/heads/master | 2022-12-23T12:00:27.372980 | 2020-10-02T15:04:08 | 2020-10-02T15:04:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,829 | py | # Code Kata #5 - 6 kyu
# description: You probably know the "like" system from Facebook and other pages. People can "like" blog posts, pictures or other items. We want to create the text that should be displayed next to such an item.
# Implement a function likes :: [String] -> String, which must take in input array, containing the names of people who like an item. It must return the display text as shown in the examples:
# likes([]) # must be "no one likes this"
# likes(["Peter"]) # must be "Peter likes this"
# likes(["Jacob", "Alex"]) # must be "Jacob and Alex like this"
# likes(["Max", "John", "Mark"]) # must be "Max, John and Mark like this"
# likes(["Alex", "Jacob", "Mark", "Max"]) # must be "Alex, Jacob and 2 others like this"
# For 4 or more names, the number in `and 2 others` simply increases.
# My Solution:
def likes(names):
listlength = len(names)
text = ""
if (listlength == 0):
text = "no one likes this"
elif (listlength == 1):
text = f"{names[0]} like this"
elif (listlength == 2):
text = f"{names[0]} and {names[1]} like this"
elif (listlength == 3):
text = f"{names[0]}, {names[1]}, and {names[2]} like this"
else:
text = f"{names[0]}, {names[1]}, and {listlength-2} others like this"
return(text)
# Top Solution:
def likes1(names):
n = len(names)
return {
0: 'no one likes this',
1: '{} likes this',
2: '{} and {} like this',
3: '{}, {} and {} like this',
4: '{}, {} and {others} others like this'
}[min(4, n)].format(*names[:3], others=n-2)
# print(likes(["juan", "tomas"]))
print(likes(["juan", "tomas", "diana", "jose"]))
# What I learned:
# * String literals Literal String Interpolation.
# * Using dictionary mapping as switch
# * The min() function
| [
"juanedflores@gmail.com"
] | juanedflores@gmail.com |
3f922e75f5f171c2885caab43d2367500edd7630 | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-ice/aliyunsdkice/request/v20201109/UpdateCustomTemplateRequest.py | 6bcadb5d64d94f2e897f29f23f3529c5c5868e10 | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 1,843 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkice.endpoint import endpoint_data
class UpdateCustomTemplateRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'ICE', '2020-11-09', 'UpdateCustomTemplate','ice')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_TemplateId(self): # String
return self.get_query_params().get('TemplateId')
def set_TemplateId(self, TemplateId): # String
self.add_query_param('TemplateId', TemplateId)
def get_TemplateConfig(self): # String
return self.get_query_params().get('TemplateConfig')
def set_TemplateConfig(self, TemplateConfig): # String
self.add_query_param('TemplateConfig', TemplateConfig)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
1e5fcdb51fb8f715285cf4c0db3154605b1038a2 | d71a87fe9fb19d8ef032d7130c5f927707eab5e8 | /restapi/serializer/migrations/0001_initial.py | 3ef3a2a2511d90d9e5f362148a6812a36fb89a24 | [] | no_license | awesomiaaa/Backup-Files | f047c8badd5298d0e869be7023704810bc23539e | f07164b20e78e51bd2795db63f50b071c3d703d1 | refs/heads/master | 2020-04-18T19:23:19.998047 | 2019-02-02T17:25:09 | 2019-02-02T17:25:09 | 167,710,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,534 | py | # Generated by Django 2.1.4 on 2018-12-31 04:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Plant_Info',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('plant_no', models.IntegerField()),
('condition', models.CharField(max_length=50)),
('disease', models.TextField()),
('diagnosis', models.TextField()),
('model_pic', models.ImageField(default='restapi/imagemodel', upload_to='restapi/imagemodel')),
],
options={
'ordering': ('scan_no',),
},
),
migrations.CreateModel(
name='Scan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.BooleanField(default=True)),
('date', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ('date',),
},
),
migrations.AddField(
model_name='plant_info',
name='scan_no',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scan_details', to='serializer.Scan'),
),
]
| [
"mjanette0513@gmail.com"
] | mjanette0513@gmail.com |
e4a793af798746349b9f5ccd7d60cb495492f42c | 194cdb69db2a06ba911c5c7f3b8d960e184cf85c | /src/common/trainerutils.py | a23fac5adfb4d7158288e2eaa95e76769b22a13e | [
"Apache-2.0"
] | permissive | liuweiping2020/pyml | a45477c3006130cd017f348302f5bef809ff32bb | 0b9a7a307b93f9313d7e1bb92b33ae330d681c73 | refs/heads/main | 2023-03-22T05:50:31.239186 | 2021-03-13T16:01:37 | 2021-03-13T16:01:37 | 347,101,380 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,302 | py | # -*- coding: utf-8 -*-
import numpy as np
import tensorflow as tf
# 1.compute label weight after each epoch using validation data.
def compute_labels_weights(weights_label, logits, labels):
"""
compute weights for labels in current batch, and update weights_label(a dict)
:param weights_label:a dict
:param logit: [None,Vocabulary_size]
:param label: [None,]
:return:
"""
labels_predict = np.argmax(logits, axis=1) # logits:(256,108,754)
for i in range(len(labels)):
label = labels[i]
label_predict = labels_predict[i]
weight = weights_label.get(label, None)
if weight == None:
if label_predict == label:
weights_label[label] = (1, 1)
else:
weights_label[label] = (1, 0)
else:
number = weight[0]
correct = weight[1]
number = number + 1
if label_predict == label:
correct = correct + 1
weights_label[label] = (number, correct)
return weights_label
# 2.get weights for each batch during traininig process
def get_weights_for_current_batch(answer_list, weights_dict):
"""
get weights for current batch
:param answer_list: a numpy array contain labels for a batch
:param weights_dict: a dict that contain weights for all labels
:return: a list. length is label size.
"""
weights_list_batch = list(np.ones((len(answer_list))))
answer_list = list(answer_list)
for i, label in enumerate(answer_list):
acc = weights_dict[label]
weights_list_batch[i] = min(1.5, 1.0 / (acc + 0.001))
# if np.random.choice(200)==0: #print something from time to time
# print("weights_list_batch:",weights_list_batch)
return weights_list_batch
# 3.compute loss using cross entropy with weights
def loss(logits, labels, weights):
loss = tf.losses.sparse_softmax_cross_entropy(labels, logits, weights=weights)
return loss
#######################################################################
# util function
def get_weights_label_as_standard_dict(weights_label):
weights_dict = {}
for k, v in weights_label.items():
count, correct = v
weights_dict[k] = float(correct) / float(count)
return weights_dict
| [
"liuweiping2020@163.com"
] | liuweiping2020@163.com |
ac889e857ffdec46dee63487ce3d200b38c4f3d8 | 326cf143962010cc0e14c6239824e4dd1807b18e | /homework01/generate_animals.py | 0fb77db457b1eff880743debca54bb54830b785f | [] | no_license | jweaston/COE332 | 8af73d423d3dd0631d6e6a807f84f30a75b488cf | 6f12ed10e9049cbfc0736ea33ea3697c4756e0d6 | refs/heads/main | 2023-04-19T23:59:24.327473 | 2021-05-08T00:58:56 | 2021-05-08T00:58:56 | 333,574,629 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | import petname
import json
import random
heads = ["snake", "bull", "raven", "bunny"]
animal = {}
animals = []
for i in range(20):
animal["head"] = heads[random.randint(0,3)]
animal["body"] = petname.name() + "-" + petname.name()
animal["arms"] = random.randint(1,5)*2
animal["legs"] = random.randint(1,4)*3
animal["tails"] = animal['arms'] + animal['legs']
animals.append(animal.copy())
with open('./animals.json', 'w') as out:
json.dump(animals, out, indent=4) | [
"jweaston@isp02.tacc.utexas.edu"
] | jweaston@isp02.tacc.utexas.edu |
6d4f999516d1bf9e5181244fe6b40e4b59277e3c | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/4/lkf.py | d19b23b9ac9f5e630d69a188c39ceef2922114b2 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'lkF':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
998fd4942208a0e62b3528f8d40105fda9168973 | 7c41edc0e9915c92fe9df7ed8e66c19083457bc6 | /36.3.2.py | b6de32eee2391e057faef1c0412b281e492da493 | [] | no_license | evalley20/module36_2 | 9f874f22802dfa4d9b5f68817b73847fe26266a2 | d07ad7d83f2abf86fe10b5b40ba954608218df4e | refs/heads/master | 2020-04-14T21:40:44.369520 | 2019-01-09T17:21:28 | 2019-01-09T17:21:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 156 | py | import RoboPiLib as RPL
import wall_evasion
RPL.pinMode(16,RPL.INPUT)
RPL.pinMode(17,RPL.INPUT)
while True:
approach_and_turn16()
approach_and_turn17()
| [
"noreply@github.com"
] | noreply@github.com |
db72a78f672999d75cfb78a8226a98eec51b1be8 | 5838fee3b684e30dca940edd6f9fe05806fd47f9 | /Quiz Application.py | a1a4754a2e441b442f8408eaab0529ce707f10f4 | [] | no_license | aakashverma7869/Small-projects | 11bae0bf1e1eed5c2039337cbda7857486148e53 | a33c24b2b4b6e78441645d1031ce8c202a29d0fb | refs/heads/master | 2020-06-17T21:51:49.626633 | 2019-07-09T19:15:08 | 2019-07-09T19:15:08 | 196,068,616 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,605 | py | from random import randrange
questions = []
def choose_y_n(string):
choice = input(string + " : ").lower()
while True:
if choice == 'n':
return 'n'
elif choice == 'y':
return 'y'
else:
choice = input('invalid choice .... select either (y/n) : ').lower()
def choose_options(string, options):
option_string = '/'.join(options)
string += '(' + option_string + ') : '
choice = input(string).upper()
while True:
if choice in options:
return choice
else:
choice = input('invalid choice.......please try again with ('+ option_string + ') : ').upper()
def add_a_question():
while True:
ques = {}
print('Enter a Question : ' , end="")
ques['question'] = input()
ques['option_A'] = input('enter option A : ')
ques['option_B'] = input('enter option B : ')
options = ['A', 'B']
for i in "CDEF":
if choose_y_n(" want to add more options (y/n)") == 'y' :
ques['option_'+i] = input("enter option " + i + " : ")
options.append(i)
else:
break
ques['options'] = options
ques['answer'] = choose_options('Enter the option which is the answer', options)
questions.append(ques)
if choose_y_n("would you like to add another question (y/n)") == 'n' :
break
def new_random_questions():
ques_visited = []
cnt = 0
stop_flag = 0
while len(ques_visited) < len(questions) and cnt < 15 and stop_flag < 100:
number = randrange(0, len(questions))
stop_flag += 1
if number in ques_visited:
continue
ques_visited.append(number)
cnt += 1
if stop_flag >= 100 and not ques_visited:
print('No More Questions')
return
return ques_visited
def play_quiz():
wrong_questions = []
questions_correct = 0
questions_wrong = 0
ques_asked = new_random_questions()
if not ques_asked:
print('NO questions available')
return
ques_cnt = 0
print('#########################################################')
print('\n:):):):):) Quiz Now Starting :):):):):):)\n')
for q_no in ques_asked:
for u, v in questions[q_no].items():
if u == 'answer' or u == 'options':
continue
elif u == 'question':
print(u.upper(), ques_cnt+1, ' : ', v, '\n')
else:
print(u.upper(), ' : ', v)
choice = choose_options('\nChoose among options which are', questions[q_no]['options'])
if choice == questions[q_no]['answer']:
print('\nGreat.....you are correct\n')
questions_correct += 1
else:
print('\nSorry.....you are wrong\n')
questions_wrong += 1
wrong_questions.append(questions[q_no])
ques_cnt += 1
print('\n$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$\n')
print('\n\n^^^^^^^^^^^^^^^^^^^^^Quiz Over^^^^^^^^^^^^^^^^^^^\n')
print('..........Result...........')
print('Total Questions : ', len(ques_asked))
print('Questions Answered Correctly = ', questions_correct)
print('Questions Answered Incorrectly = ', questions_wrong)
print()
if questions_wrong > 0 :
if choose_y_n('Would you like to see the questions you attempted incorrectly (y/n)') == 'y':
print()
for wrong_question in wrong_questions:
print('Question : ', wrong_question['question'])
print('Correct Option : ', wrong_question['answer'])
print('\n')
print('####################################################################')
def openWindow():
while True:
print("\n**************************************************************************\n")
print(':D:D:D:D:D:D:D:D Menu :D:D:D:D:D:D:D\n')
print("1. Add a Question")
print('2. Play the Quiz')
print('0. Exit')
print('\nEnter your Choice')
ch = int(input())
if ch == 1:
add_a_question()
elif ch == 2:
play_quiz()
elif ch==0:
return
print('************************************************************************\n')
if __name__ == '__main__':
openWindow()
| [
"noreply@github.com"
] | noreply@github.com |
fc8411ec07e75043aedf73e15f386af93d873cfc | 130b3396af4da46974153dbea27d62c0ae51d6aa | /middle_project/middle_project/urls.py | 64e273d6863390ae3b22000c371376d942f50913 | [] | no_license | mrdu123456/python | f259b9f28d8219338e05cf5190557f9e0ecad2ef | 2ee614d0ad453350d1db29ac465dea689d933292 | refs/heads/master | 2020-05-27T16:48:57.327011 | 2019-05-26T16:48:30 | 2019-05-26T16:48:30 | 188,709,102 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 906 | py | """middle_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('admin/', admin.site.urls),
path('booksapp/',include('booksapp.urls')),
path('userapp/',include('userapp.urls')),
path('orderapp/',include('orderapp.urls')),
]
| [
"18000343457m@sina.cn"
] | 18000343457m@sina.cn |
a2a273620bbc564927e4d6d8cfa087e30985ab68 | 54587e1561459031baed0f201ab62494db896e82 | /setup.py | e124369d235b2a6789c1c7adcc0f65741ac1703d | [
"BSD-2-Clause"
] | permissive | achirko/vpc.sh | 2ca86c9f7327cb15f15ebfb532aedd266b1fae05 | a16ea45820942bbe9444f401efeea4727bc91e2a | refs/heads/master | 2021-01-17T20:55:43.747262 | 2018-04-04T12:09:40 | 2018-04-04T12:09:40 | 61,823,227 | 2 | 2 | null | 2016-07-09T14:45:16 | 2016-06-23T17:16:15 | Python | UTF-8 | Python | false | false | 1,099 | py | from setuptools import setup
try:
import pypandoc
readme = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
readme = open('README.md').read()
setup(
name='vpc.sh',
version='0.1',
description="CLI tool to run shell commands on ec2 instances.",
long_description=readme,
url="https://github.com/achirko/vpc.sh",
author='Oleksandr Chyrko',
author_email='aleksandr.chirko@gmail.com',
py_modules=['vpc_sh'],
install_requires=['Click', 'boto', 'fabric', 'tabulate'],
entry_points='''
[console_scripts]
vpc.sh=vpc_sh:vpc_sh
''',
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Clustering',
'Topic :: System :: Systems Administration',
],
)
| [
"oleksandrc@backbase.com"
] | oleksandrc@backbase.com |
33a14933d7ae682cf71a2741911a156808cdadeb | 9ee80444d82e91add510832ae91828acd84c809e | /TBennett_TTT.py | bc698e52217071ed32e900d97e83c1b41ae9adff | [] | no_license | IAlwaysBeCoding/UdemyPythonBC_TTT | dcfe2bd8f51962becf6bb2dcc46332519d8ee14c | 3b3ce7c4a32ed7f4bdc130ae0809f0224ff83060 | refs/heads/master | 2020-04-06T04:18:04.139614 | 2016-01-13T01:48:23 | 2016-01-13T01:48:23 | 49,545,804 | 0 | 0 | null | 2016-01-13T03:08:52 | 2016-01-13T03:08:52 | null | UTF-8 | Python | false | false | 4,746 | py | import sys
game = ['','','','','','','','','']
p1 = 'Player 1'
p2 = 'Player 2'
again = ''
def board():
print '''
___________
(' | | ')
(' 0 | 1 | 2 ')
('___|___|___')
(' | | ')
(' 3 | 4 | 5 ')
('___|___|___')
(' | | ')
(' 6 | 7 | 8 ')
('___|___|___')
'''
def gameOver():
if game[0] == 'X' and game[1] == 'X' and game[2] =='X' or game[3] ==' X' and game[4] == 'X' and game[5] =='X' or game[6] == 'X' and game[7] == 'X' and game[8] == 'X' or game[6] == 'X' and game[4] == 'X' and game[2] =='X'or game[0] == 'X' and game[4] == 'X' and game[8] =='X'or game[0] == 'X' and game[3] == 'X' and game[6] =='X'or game[1] == 'X' and game[4] == 'X' and game[7] =='X' or game[2] == 'X' and game[5] == 'X' and game[8] =='X':
print p1, 'WINS!', '''
___ ____ ___ _ ___ ___ _ _____ ____
/ _ `/ _ `/ ' \/ -_) / _ \ |/ / -_) __/
\_, /\_,_/_/_/_/\__/ \___/___/\__/_/
/___/
'''
return True
if game[0] == 'O' and game[1] == 'O' and game[2] =='O' or game[3] == 'O' and game[4] == 'O' and game[5] =='O' or game[6] == 'O' and game[7] == 'O' and game[8] == 'O' or game[6] == 'O' and game[4] == 'O' and game[2] =='O'or game[0] == 'O' and game[4] == 'O' and game[8] =='O'or game[0] == 'O' and game[3] == 'O' and game[6] =='O'or game[1] == 'O' and game[4] == 'O' and game[7] =='O' or game[2] == 'O' and game[5] == 'O' and game[8] =='O':
print p2, 'WINS!', '''
___ ____ ___ _ ___ ___ _ _____ ____
/ _ `/ _ `/ ' \/ -_) / _ \ |/ / -_) __/
\_, /\_,_/_/_/_/\__/ \___/___/\__/_/
/___/
'''
return True
elif '' not in game:
print 'TIE', '''
__ .__
_/ |_|__| ____ _________ _____ ____
\ __\ |/ __ \ / ___\__ \ / \_/ __ \
| | | \ ___/ / /_/ > __ \| Y Y \ ___/
|__| |__|\___ > \___ (____ /__|_| /\___ >
\/ /_____/ \/ \/ \/
'''
playAgain()
def formatValue(val):
return ' ' if val == '' else val
def printBoard(state):
board = map(formatValue, state)
#Formats the values of state
print '''
___________
(' | | ')
(' {0} | {1} | {2} ')
('___|___|___')
(' | | ')
(' {3} | {4} | {5} ')
('___|___|___')
(' | | ')
(' {6} | {7} | {8} ')
('___|___|___')
'''.format(board[0], board[1], board[2], board[3], board[4], board[5], board[6], board[7], board[8]);
def playAgain():
global game
game = ['','','','','','','','','']
global again
again == raw_input("Would you like to play again? (Y/N)")
if again == 'yes' or 'y' or 'Y' or 'YES':
start()
else: sys.exit()
def getMove():
return raw_input("Please select your corresponding square using 0,1,2,3,4,5,6,7,8\r\n You selected: ")
def playA():
print "\r\nYour move, ", p1
n = getMove()
while True:
if n not in ('0','1','2','3','4','5','6','7','8'):
print "Please select a valid box number"
n = getMove()
if game[int(n)] != '':
print "Sorry, that box is already taken"
n = getMove()
else:
game[(int(n))] = 'X'
board()
printBoard(game)
if not gameOver():
playB()
else:
playAgain()
def playB():
print "\r\nYour move, ", p2
n = getMove()
while True:
if n not in ('0','1','2','3','4','5','6','7','8'):
print "Please select a valid box number"
n = getMove()
if game[int(n)] != '':
print "Sorry, that box is already taken"
n = getMove()
else:
game[(int(n))] = 'O'
board()
printBoard(game)
if not gameOver():
playA()
else:
playAgain()
def start():
print "Hello, and welcome to Travis' first python project - Tic Tac Toe!\r\n \r\nThis version requires two human players. Player One will move first as X and Player Two will be O."
print '''\r\nMoves will be made by selecting the corresponding number with the box you want to mark
0 | 1 | 2
-----------
3 | 4 | 5
-----------
6 | 7 | 8
'''
global p1
p1 = raw_input('Player One Name: ')
global p2
p2 = raw_input('Player Two Name: ')
board()
playA()
start() | [
"soldierforus@yahoo.com"
] | soldierforus@yahoo.com |
76588fb6be5fce9ae0dd579229278744751922cc | 818f369e36539c57f3995e5acb1d1b78eb516cd9 | /client-side-partV2/Client-V2/frontal-face-detection-client-video-record.py | 0acaf29f6f2c00e96d85ade318fd68033e55dacd | [] | no_license | adhomse/FaceRecognitionForEmployees | 2f0361c8a05cddb3a212ac50a1af3b0bbdfdbfe5 | 7093a54bfba2c9a66bfcbb08fb1269f08501c9e3 | refs/heads/master | 2020-09-10T02:04:09.125352 | 2019-11-14T07:33:38 | 2019-11-14T07:33:38 | 221,621,893 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,232 | py | #import cv2
import time
import json
import socket
import base64
import numpy as np
from threading import Thread
import os
import sys
import numpy as np
sys.path.append('.')
import tensorflow as tf
#import detect_face
import time
import pickle
import cv2
import io
import socket
import struct
import time
import pickle
import zlib
IP_SERVER = "192.168.1.222"
PORT_SERVER = 8485
TIMEOUT_SOCKET = 10
SIZE_PACKAGE = 4096
DEVICE_NUMBER = 0
IMAGE_HEIGHT = 480
IMAGE_WIDTH = 640
COLOR_PIXEL = 3
#cascPath = sys.argv[1]
faceCascade = cv2.CascadeClassifier('E:/clients/Face_Detection_&_RecognitionV2/client-side-partV2/Client-V2/haarcascade_frontalface_alt.xml')
#face_Cascade = 'E:/clients/client-side-part/haarcascade_frontalface_alt.xml'
# client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# client_socket.connect(('192.168.1.222', 8485))
# connection = client_socket.makefile('wb')
# while True:
# video_capture = cv2.VideoCapture(0)
# video_capture.set(3, IMAGE_WIDTH)
# video_capture.set(4, IMAGE_HEIGHT)
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect(('192.168.1.222', 8485))
connection = client_socket.makefile('wb')
video_capture = cv2.VideoCapture(DEVICE_NUMBER)
video_capture.set(3, IMAGE_WIDTH)
video_capture.set(4, IMAGE_HEIGHT)
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('output.avi', fourcc, 20.0, (640, 480))
#fourcc = cv2.VideoWriter_fourcc(*'DIVX')
#out = cv2.VideoWriter('3F_0726.mp4', fourcc, fps=30, frameSize=(640,480))
img_counter = 0
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
font = cv2.FONT_HERSHEY_SIMPLEX
while True:
# Capture frame-by-frame
ret, frame = video_capture.read()
out.write(frame)
gray1 = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray1,scaleFactor=1.1,minNeighbors=5,minSize=(30, 30),flags=cv2.CASCADE_SCALE_IMAGE)
# Draw a rectangle around the faces
cv2.rectangle(frame, (261,174),(457,380),(255,0,255),2)
result, frame1 = cv2.imencode('.jpg', frame, encode_param)
data1 = pickle.dumps(frame1, 0)
size = len(data1)
print("{}: {}".format(img_counter, size))
client_socket.sendall(struct.pack(">L", size) + data1)
img_counter += 1
for (x, y, w, h) in faces:
print(x,y)
print(x+w,y+h)
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
if(210<int(x)<350 and 150<int(y)<250 and 300<int(x+w)<460 and 310<int(y+h)<450):
welcome=" Welcome to Infogen labs"
cv2.putText(frame,welcome, (0, 50), font, 1, (0,0,255), 2, cv2.LINE_AA)
# Display the resulting frame
cv2.imshow('Video', frame)
# result, frame1 = cv2.imencode('.jpg', frame, encode_param)
# data1 = pickle.dumps(frame1, 0)
# size = len(data1)
# print("{}: {}".format(img_counter, size))
# client_socket.sendall(struct.pack(">L", size) + data1)
# img_counter += 1
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything is done, release the capture
video_capture.release()
out.release()
cv2.destroyAllWindows()
| [
"noreply@github.com"
] | noreply@github.com |
5e06499b451d23569125ce565ef17ecde79523be | 83e6f6bcd3dd97bc053ed21fd44daf9f4c32814b | /Crawler_ADS.py | df1979a724348da28efaf608efc16199916e54c1 | [] | no_license | fallingelf/Crawler_ADS | 2adfe72af35815d25642493f968f1142f1d3b050 | 83c0e0ada3699c800f2f1b0a4ffe141fb0a4cf97 | refs/heads/master | 2020-03-18T01:08:55.758041 | 2018-05-20T07:42:27 | 2018-05-20T07:42:27 | 134,129,243 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,992 | py | # 在ADS上检索并爬取文件
import requests
from urllib.request import urlopen, urlretrieve,Request
from bs4 import BeautifulSoup
import re, os
from urllib.error import *
import time, sys
from PyPDF2 import PdfFileReader
def check_pdf(pdf_file):
try:
PdfFileReader(pdf_file)
return 1
except :
return 0
def double_check(pdf_file,lable_href, url_href, ads_paper):
try:
PdfFileReader(pdf_file)
except :
print('double check fails, re-download form Sci-Hub!')
down_DOI(lable_href, url_href, ads_paper)
def report(count, blockSize, totalSize):
percent = int(count * blockSize * 100 / totalSize)
sys.stdout.write("\r%d%%" % percent + ' complete')
sys.stdout.flush()
def url_decomposed(url):
param_soup=url.replace(re.findall(re.compile('.*\?'), url)[0], '&')+'&'
param={}
try:
while param_soup.index('='):
key=param_soup[param_soup.index('&')+1:param_soup.index('=')]
param_soup=param_soup.replace('&' + key + '=', '')
param[key]=param_soup[0:param_soup.index('&')]
param_soup = param_soup.replace(param_soup[0:param_soup.index('&')], '')
except ValueError:
return param
def mode_switch(url, filename): #对于A&A可能需要由get变为post
try:
urlretrieve(url, filename, reporthook=report)
except URLError:
param = url_decomposed(url)
obj=BeautifulSoup(requests.post(url, data=param).text,'html5lib')
raw_paper_url = obj.find_all('dd')[0].find_all('a')[0]['href']
raw_paper_url=raw_paper_url.replace(re.findall(re.compile('.*url=https%3a/'), raw_paper_url)[0],'https://')
url =raw_paper_url.replace(re.findall(re.compile('\.pdf.*'), raw_paper_url)[0], '.pdf')
urlretrieve(url, filename, reporthook=report)
return
def auto_down(url, filename,lable_href,url_href, ads_paper):
try:
try:
sys.stdout.write('\rFetching ' + filename + ' from ' + url +' ...\n')
mode_switch(url, filename)
#urlretrieve(url, filename, reporthook=report)
sys.stdout.write("\rDownload complete, saved as %s" % (filename) + '\n\n')
sys.stdout.flush()
except ContentTooShortError:
print('Network conditions is not good. Reloading...')
auto_down(url, filename)
except :# HTTPError:
print('Try to download from Sci-Hub')
down_DOI(lable_href,url_href, ads_paper)
return
def get_context(url):
bsobj = BeautifulSoup(urlopen( url), 'html5lib')
return bsobj
def down_DOI(lable_href,url_href, ads_paper):
print(lable_href,url_href)
DOI = re.findall(re.compile('>(.*)</a>'),
str(get_context(url_href).find_all('a', {'href': re.compile('https:\/\/doi\.org\/.*')})))
if DOI:
param_sci = {'request': DOI[0]}
url_list = BeautifulSoup(requests.post('http://sci-hub.tw/', data=param_sci).text, 'html5lib').find_all(
'iframe')
if url_list:
url_tail = url_list[0]['src']
if str(url_tail)[0:5] != 'http:':
url_paper = 'http:' + url_tail
else:
url_paper = url_tail
print(url_paper)
auto_down(url_paper, path_paper + lable_href + '.pdf',lable_href,url_href, ads_paper)
print(lable_href, DOI[0], url_paper)
ads_paper.write(str(lable_href + ' ' + DOI[0] + ' ' + url_paper + '\n'))
else:
print(lable_href, DOI[0], 'SCI does not provide pdf file')
ads_paper.write(str(lable_href + ' ' + DOI[0] + ' ' + 'SCI does not provide pdf file' + '\n'))
else:
print(lable_href, 'This paper does not have DOI')
ads_paper.write(str(lable_href + '\n'))
print('--------------------------------------------------------------------------------------------------------------')
return None
def down_FXG(lable_single_paper,lable_href,url_single_paper,order_paper,url_href,i,ads_paper):
if ['F'] in lable_single_paper:
print(lable_href[order_paper[i]][0], 'F', url_single_paper[lable_single_paper.index(['F'])])
auto_down(url_single_paper[lable_single_paper.index(['F'])],
path_paper + lable_href[order_paper[i]][0] + '.pdf', lable_href[order_paper[i]][0],
url_href[order_paper[i]], ads_paper)
ads_paper.write(str(
lable_href[order_paper[i]][0] + ' ' + 'F' + ' ' + url_single_paper[lable_single_paper.index(['F'])] + '\n'))
elif ['X'] in lable_single_paper:
print(lable_href[order_paper[i]][0], 'X', url_single_paper[lable_single_paper.index(['X'])])
index = re.findall(re.compile('http://arxiv.org/pdf/(.*)" name="citation_pdf_url"/>'),
str(get_context(url_single_paper[lable_single_paper.index(['X'])])));
print(index)
try:
auto_down('https://arxiv.org/pdf/' + index[0] + '.pdf', path_paper + lable_href[order_paper[i]][0] + '.pdf',
lable_href[order_paper[i]][0], url_href[order_paper[i]], ads_paper)
except HTTPError:
auto_down('https://arxiv.org/pdf/astro-ph/' + index[0] + '.pdf',
path_paper + lable_href[order_paper[i]][0] + '.pdf', lable_href[order_paper[i]][0],
url_href[order_paper[i]], ads_paper)
ads_paper.write(str(lable_href[order_paper[i]][0] + ' ' + 'X' + ' ' + 'https://arxiv.org/pdf/astro-ph/' + index[
0] + '.pdf' + '\n'))
elif ['G'] in lable_single_paper:
print(lable_href[order_paper[i]][0], 'G', url_single_paper[lable_single_paper.index(['G'])])
url_sample = 'http://adsbit.harvard.edu/cgi-bin/nph-iarticle_query?XXX&defaultprint=YES&filetype=.pdf'
auto_down(url_sample.replace('XXX', lable_href[order_paper[i]][0]),
path_paper + lable_href[order_paper[i]][0] + '.pdf', lable_href[order_paper[i]][0],
url_href[order_paper[i]], ads_paper)
ads_paper.write(str(lable_href[order_paper[i]][0] + ' ' + 'G' + ' ' + url_sample.replace('XXX', lable_href[
order_paper[i]][0]) + '\n'))
else:
ads_paper.write(str(lable_href[order_paper[i]][0] + ' ' + '!(FXG)' + '\n'))
def crawler_single_web(bsobj, path_paper, ads_paper):
hrefs = bsobj.find_all('a', {'href': re.compile('http://adsabs.harvard.edu/cgi-bin/nph-data_query\?bibcode=.*')})
order_paper = [];
n_op = 0 # 含有文章名字的链接在hrefs中的位置
url_href = [];
lable_href = []
for href in hrefs:
lable_href.append(re.findall(re.compile('>(.*)</a>'), str(href)))
url_href.append(href['href'])
if len(str(lable_href[-1])) > 5:
order_paper.append(n_op)
n_op = n_op + 1
order_paper.append(len(url_href)-1)
for i in range(len(order_paper) - 1):
lable_single_paper = lable_href[order_paper[i]:order_paper[i + 1]]
url_single_paper = url_href[order_paper[i]:order_paper[i + 1]]
pdf_file=path_paper + lable_href[order_paper[i]][0] + '.pdf'
if lable_href[order_paper[i]][0] + '.pdf' not in os.listdir(path_paper):
if ['F'] in lable_single_paper:
print(lable_href[order_paper[i]][0], 'F', url_single_paper[lable_single_paper.index(['F'])])
auto_down(url_single_paper[lable_single_paper.index(['F'])],path_paper + lable_href[order_paper[i]][0] + '.pdf',lable_href[order_paper[i]][0],url_href[order_paper[i]], ads_paper)
double_check(pdf_file, lable_href[order_paper[i]][0], url_href[order_paper[i]], ads_paper)
ads_paper.write(str(lable_href[order_paper[i]][0] + ' ' + 'F' + ' ' + url_single_paper[lable_single_paper.index(['F'])] + '\n'))
elif ['X'] in lable_single_paper:
print(lable_href[order_paper[i]][0], 'X', url_single_paper[lable_single_paper.index(['X'])])
index =re.findall(re.compile('http://arxiv.org/pdf/(.*)" name="citation_pdf_url"/>'), str(get_context(url_single_paper[lable_single_paper.index(['X'])])));print(index)
try:
auto_down('https://arxiv.org/pdf/'+index[0]+'.pdf',path_paper + lable_href[order_paper[i]][0] + '.pdf',lable_href[order_paper[i]][0],url_href[order_paper[i]], ads_paper)
except HTTPError:
auto_down('https://arxiv.org/pdf/astro-ph/' + index[0] + '.pdf',path_paper + lable_href[order_paper[i]][0] + '.pdf',lable_href[order_paper[i]][0],url_href[order_paper[i]], ads_paper)
ads_paper.write(str(lable_href[order_paper[i]][0] + ' ' + 'X' + ' ' + 'https://arxiv.org/pdf/astro-ph/'+index[0]+'.pdf' + '\n'))
elif ['G'] in lable_single_paper:
print(lable_href[order_paper[i]][0], 'G', url_single_paper[lable_single_paper.index(['G'])])
url_sample='http://adsbit.harvard.edu/cgi-bin/nph-iarticle_query?XXX&defaultprint=YES&filetype=.pdf'
auto_down(url_sample.replace('XXX', lable_href[order_paper[i]][0]),path_paper + lable_href[order_paper[i]][0] + '.pdf',lable_href[order_paper[i]][0],url_href[order_paper[i]], ads_paper)
ads_paper.write(str(lable_href[order_paper[i]][0] + ' ' + 'G' + ' ' + url_sample.replace('XXX', lable_href[order_paper[i]][0]) + '\n'))
else:
ads_paper.write(str(lable_href[order_paper[i]][0] + ' ' + '!(FXG)' + '\n'))
elif check_pdf(path_paper+lable_href[order_paper[i]][0] + '.pdf'):
print(lable_href[order_paper[i]][0], ' This paper had been downloaded')
else:
down_FXG(lable_single_paper, lable_href, url_single_paper, order_paper, url_href, i, ads_paper)
double_check(pdf_file, lable_href[order_paper[i]][0], url_href[order_paper[i]], ads_paper)
print('-----------------------------------------------------------------------------------------')
return None
params = {'db_key': '', 'sim_query': 'YES', 'ned_query': 'YES', 'adsobj_query': 'YES', 'aut_logic': 'OR',
'obj_logic': 'OR', 'author': '', 'object': '', 'start_mon': '', 'start_year': '', 'end_mon': '',
'end_year': '', 'ttl_logic': 'OR', 'title': '', 'txt_logic': 'OR', 'text': '', 'nr_to_return': '200',
'start_nr': '1', 'jou_pick': 'ALL', 'ref_stems': '', 'data_and': 'ALL', 'group_and': 'ALL',
'start_entry_day': '', 'start_entry_mon': '', 'start_entry_year': '', 'end_entry_day': '',
'end_entry_mon': '', 'end_entry_year': '', 'min_score': '', 'sort': 'SCORE', 'data_type': 'SHORT',
'aut_syn': 'YES', 'ttl_syn': 'YES', 'txt_syn': 'YES', 'aut_wt': '1.0', 'obj_wt': '1.0', 'ttl_wt': '0.3',
'txt_wt': '3.0', 'aut_wgt': 'YES', 'obj_wgt': 'YES', 'ttl_wgt': 'YES', 'txt_wgt': 'YES', 'ttl_sco': 'YES',
'txt_sco': 'YES', 'version': '1'}
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
params['object'] = '' # 指明查询的目标
params['author'] = '^Qian, S.-B.' # 指明作者
params['start_year'] = '' #检索年份-开始
params['end_year'] = '' #检索年份-结束
path_paper = os.getcwd().replace('\\', '\\\\') + '\\' + params['author'][1:4] + '\\\\' # 指明储存文件的目录名字
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
db_keys = {'PRE', 'PHY', 'AST' } # #指明查询的范围
if not path_paper:
os.makedirs(path_paper)
ads_paper = open(path_paper + 'ads_paper.txt', 'w+')
for key in db_keys:
params['db_key'] = key;print('#############################',key,'#############################')
bsobj = BeautifulSoup(requests.post('http://adsabs.harvard.edu/cgi-bin/nph-abs_connect', data=params).text,
'html5lib') # 表单提交
crawler_single_web(bsobj, path_paper, ads_paper) # 收集文章的url获得DOI用于爬取http://sci-hub.tw,并将结果储存
if bsobj.find_all(text='next set of references'): # 检查ADS检索出来的文章列表是否有下一页
bsobj = get_context(
re.findall(re.compile('href="(http://adsabs.harvard.edu/cgi-bin/nph-abs_connect?.*start_cnt.*)"'),
str(bsobj.find_all('h3')))[0])
crawler_single_web(bsobj, path_paper, ads_paper)
print('-----------------------------------------------------------over-------------------------------------------------------------------------')
ads_paper.close()
| [
"noreply@github.com"
] | noreply@github.com |
870303a97b55bc23103e103feeb55cc29ad9c0e5 | 172357b8d3646387ccddad39ee1098e1d0d1dad3 | /CyclopeptideScoringProblem.py | a11f55aa681971df61ce6fa724fc90027e701d47 | [] | no_license | Necheva/Bioinformatic | 06754ad004daf8c2af80a1b2b93c6b86425ba015 | 3caa97fe0e7e56d274d96f84424eb96bd1814b02 | refs/heads/master | 2020-03-31T20:52:55.781290 | 2018-11-28T17:15:03 | 2018-11-28T17:15:03 | 152,559,142 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,953 | py | def DataOfMass():
table = {'G': 57,'A': 71,'S': 87,'P': 97,'V': 99,'T': 101,'C': 103,'I': 113,'L': 113,'N': 114,'D': 115,'K': 128,'Q': 128,'E': 129,'M': 131,'H': 137,'F': 147,'R': 156,'Y': 163,'W': 186 }
return table
if __name__ == "__main__":
Peptide = input()
Spectrum = input()
TmpDataMass = DataOfMass()
initialData = []
experimentalData = []
index_first = 0
index_second = 1
index_fourth = 0
index_fifth = 0
index_sixth = 0
index_seventh=0
numberOfContents = 0
while index_second < len(Peptide):
index_count = 0
while index_first < len(Peptide):
ingex_third = 0
tmp = 0
while ingex_third < index_second:
foo1=(index_first + ingex_third) % len(Peptide)
foo=Peptide[(index_first + ingex_third) % len(Peptide)]
tmp += int(TmpDataMass[Peptide[(index_first + ingex_third) % len(Peptide)]])
ingex_third += 1
index_count+=1
initialData.append(tmp)
index_first += 1
if index_count >= 2:
if index_second>=2:
index_count += -1
index_first = 0
index_second += 1
if len(Peptide) != 0:
tmp = 0
for index_sixth in Peptide:
tmp += int(TmpDataMass[index_sixth])
initialData.append(tmp)
initialData.append(0)
initialData.sort()
for v in Spectrum.split():
experimentalData.append(int(v))
for i in initialData:
while (index_seventh < len(experimentalData)):
if (i == experimentalData[index_seventh]):
numberOfContents += 1
index_seventh += 1
break
elif (i > experimentalData[index_seventh]):
index_seventh += 1
elif (i < experimentalData[index_seventh]):
break
print(numberOfContents)
| [
"noreply@github.com"
] | noreply@github.com |
afc06ae4b405fbce9055d076027588304160a0e4 | 83b242997a1560214285fd38ab4d39a0b1210ddc | /SOL4Py/network/ZThreadedTCPServer.py | add7627d5756461363417a09cff04384cc3dbf66 | [] | no_license | ivartz/vid2fft | 0a25d853e178b43fd0a5f765934887963f5c37f9 | 1b6ec82de04f86819ab4c1056d4f9d9bde1ed9c8 | refs/heads/master | 2020-08-07T21:44:28.745553 | 2019-10-08T09:18:41 | 2019-10-08T09:18:41 | 213,594,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,963 | py | #/******************************************************************************
#
# Copyright (c) 2018 Antillia.com TOSHIYUKI ARAI. ALL RIGHTS RESERVED.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#******************************************************************************/
# 2018/09/20
# ZThreadedTCPServer.py
# encoding utf-8
# Simple TCPServer example to accept single TCPClien
# See https://docs.python.org/3/library/socketserver.html
# See also: https://gist.github.com/arthurafarias/7258a2b83433dfda013f1954aaecd50a#file-server-py
import os
import sys
import time
import socketserver
import threading
import traceback
from SOL4Py.ZSingleton import *
##
# Simple TCPServer thread class, which handles a stream request from a TCP client.
#
class ZThreadedTCPServer(threading.Thread, ZSingleton):
#---------------------------------------------------------
# Inner class starts.
# Define your subclass derived from StreanRequestHandler
class _TCPRequestHandler(socketserver.StreamRequestHandler):
# Define your own handle method if needed.
def handle(self):
print(self.__class__.__name__ + self.handle.__name__ + " start")
print("Curent thread name:{}".format(threading.current_thread().name))
try:
while True:
print("Curent thread name:{}".format(threading.current_thread().name))
bytes = self.rfile.readline().strip()
if len(bytes) == 0:
print("breaking handle loop")
break
ZSingleton.get_instance().request_handle_callback(bytes, self.wfile)
self.request.close()
except:
traceback.print_exc()
# Inner class ends.
##
#
# Constructor
def __init__(self, ipaddress, port, request_handler_class = None):
super(ZThreadedTCPServer, self).__init__()
print(self.__class__.__name__ + "::" + self.run.__name__ + " start")
ZSingleton.set_instance(self)
print("IPAddress:{} Port:{}".format(ipaddress, port))
self.server_address = (ipaddress, port)
if request_handler_class == None:
# Register the default request handler class: self._TCPRequestHandler.
self.sock_server = socketserver.TCPServer(self.server_address, self._TCPRequestHandler)
else:
self.sock_server = socketserver.TCPServer(self.server_address, request_handler_class)
self.sock_server.allow_reuse_address = True
# Please redefine your own method 'request_handle_callback' in a subclass derived from this class.
def request_handle_callback(self, bytes, writer):
text = bytes.decode("utf-8")
import datetime
now = datetime.datetime.now()
print("Recieved at {} data :{}".format(now, text))
reply = "OK"
breply = reply.encode("utf-8")
writer.write(breply)
# Thread main procedure.
def run(self):
print(self.__class__.__name__ + "::" + self.run.__name__ + " start")
if self.sock_server != None:
self.sock_server.serve_forever()
print(self.__class__.__name__ + "::" + self.run.__name__ + " end")
# Shdown and close server_socket.
def close(self):
if self.sock_server != None:
self.sock_server.shutdown()
print("sock_server shutdown")
self.sock_server.server_close()
print("sock_server close")
| [
"djloek@gmail.com"
] | djloek@gmail.com |
21320723172971f71a7492ea2e040575734041b8 | 916353445ee27e6f8105f609a6292ab1d44e8c53 | /core/utils.py | 39151007e673ad8bee3596f874e0f49784dd9e28 | [] | no_license | webclinic017/DjangoImageSystem | f7af4b32821b2a115cd28a31d4322310405d4a6c | 4a63d6b05d12db33e9f3e8da6d9a15815d12f298 | refs/heads/master | 2023-06-19T14:16:11.609085 | 2021-07-26T16:41:11 | 2021-07-26T16:41:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 907 | py | import os
from django.db import models
def get_small_image_path(instance, filename):
return os.path.join('products', 'small', str(instance.product.vendor.name), filename)
def get_medium_image_path(instance, filename):
return os.path.join('products', 'medium', str(instance.product.vendor.name), filename)
def get_large_image_path(instance, filename):
return os.path.join('products', 'large', str(instance.product.vendor.name), filename)
import random
import string
def code_gen(size=7, chars=string.ascii_lowercase + string.digits + string.ascii_uppercase):
return ''.join(random.choice(chars) for _ in range(size))
def create_shortcode(instance, size=7):
new_code = code_gen(size=size)
klass = instance.__class__
qs_exists = klass.objects.filter(short_code=new_code).exists()
if qs_exists:
return create_shortcode(instance, size=size)
return new_code | [
"amirbahador.pv@gmail.com"
] | amirbahador.pv@gmail.com |
f18b0dee7599c2f3e829481c7fa3afaffb0f6157 | 8516e5d43d954d311d22b92151c3a67b1ac9ef8c | /mnist.py | 407a4268b7363a1b35590c799399ac1d682cf081 | [] | no_license | chaitralitalekar/Handwritten-Digits-Recognition | 7a96f964f97a5598b9ad0aff4654e10b66ffd290 | 493d11afcd1c59865736f1a954c8fa4eb2a6ca98 | refs/heads/master | 2020-07-03T03:30:29.953768 | 2019-08-11T15:57:35 | 2019-08-11T15:57:35 | 201,770,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | import numpy as n
import pandas as p
import matplotlib.pyplot as plt
from sklearn.tree import DecisionTreeClassifier
dataSet = p.read_csv("dataset/train.csv").as_matrix()
classfy = DecisionTreeClassifier()
train = dataSet[0:21000 , 1:]
train_label = dataSet[0:21000 , 0]
classfy.fit(train,train_label)
test = dataSet[21000: , 1:]
actual_label = data[21000: , 0]
d = test[5]
d.shape = (28,28)
pt.imshow(255-d , cmap = 'gray')
print (clf.predict([test[5]]))
plt.show()
pred = classfy.predict(test)
count = 0
for i in range(0,21000):
count+=1 if pre[i]==actual_label else 0
print ("Accuracy=",(count/21000)*100)
| [
"talekarchaitrali@gmail.com"
] | talekarchaitrali@gmail.com |
9b6ce6a37ed4bf2e73c6e15c5304e06143c79244 | d1991c1b97cf0913ffab05d9821e3778f2616b36 | /DMPHN-v2_1_2_4_8.py | af0b5bd00f3032deedd03811ee4cf80605fab249 | [
"MIT"
] | permissive | zhaozunjin/DMPHN-v2-Deblur | d70fd443ef2b1bc5787e1d9fe09807a759365c0b | 0e84aab9c07a880b16dec8ee182868db93cd1d12 | refs/heads/master | 2020-11-25T08:34:27.026364 | 2020-09-17T09:09:05 | 2020-09-17T09:09:05 | 228,576,078 | 16 | 1 | MIT | 2020-06-21T05:59:54 | 2019-12-17T09:04:57 | Python | UTF-8 | Python | false | false | 19,327 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from torch.optim.lr_scheduler import StepLR
import numpy as np
import os
import math
import argparse
import random
import modelstitus as models
import torchvision
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms, datasets
from datasets import GoProDataset
import time
from utils.radam import RAdam
parser = argparse.ArgumentParser(description="Deep Multi-Patch Hierarchical Network")
parser.add_argument("-e","--epochs",type = int, default = 4000)
parser.add_argument("-se","--start_epoch",type = int, default = 0)
parser.add_argument("-b","--batchsize",type = int, default = 2)
parser.add_argument("-s","--imagesize",type = int, default = 256)
parser.add_argument("-l","--learning_rate", type = float, default = 0.0001)
parser.add_argument("-g","--gpu",type=int, default=0)
args = parser.parse_args()
#Hyper Parameters
METHOD = "DMPHN_1_2_4_8"
LEARNING_RATE = args.learning_rate
EPOCHS = args.epochs
GPU = args.gpu
BATCH_SIZE = args.batchsize
IMAGE_SIZE = args.imagesize
def save_deblur_images(images, iteration, epoch):
filename = './checkpoints/' + METHOD + "/epoch" + str(epoch) + "/" + "Iter_" + str(iteration) + "_deblur.png"
torchvision.utils.save_image(images, filename)
def save_feature_images(images, iteration, epoch,feature_lv):
filename = './checkpoints/' + METHOD + "/epoch" + str(epoch) + "/" +"feature/"+ "Iter_" + str(iteration) +feature_lv+ ".png"
torchvision.utils.save_image(images, filename)
def weight_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0.0, 0.5*math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif classname.find('BatchNorm') != -1:
m.weight.data.fill_(1)
m.bias.data.zero_()
elif classname.find('Linear') != -1:
n = m.weight.size(1)
m.weight.data.normal_(0, 0.01)
m.bias.data = torch.ones(m.bias.data.size())
def main():
print("init data folders")
encoder_lv1 = models.Encoder()
encoder_lv2 = models.Encoder()
encoder_lv3 = models.Encoder()
encoder_lv4 = models.Encoder()
decoder_lv1 = models.Decoder()
decoder_lv2 = models.Decoder()
decoder_lv3 = models.Decoder()
decoder_lv4 = models.Decoder()
encoder_lv1.apply(weight_init).cuda(GPU)
encoder_lv2.apply(weight_init).cuda(GPU)
encoder_lv3.apply(weight_init).cuda(GPU)
encoder_lv4.apply(weight_init).cuda(GPU)
decoder_lv1.apply(weight_init).cuda(GPU)
decoder_lv2.apply(weight_init).cuda(GPU)
decoder_lv3.apply(weight_init).cuda(GPU)
decoder_lv4.apply(weight_init).cuda(GPU)
encoder_lv1_optim = RAdam(encoder_lv1.parameters(),lr=LEARNING_RATE)
encoder_lv1_scheduler = StepLR(encoder_lv1_optim,step_size=1000,gamma=0.1)
encoder_lv2_optim = RAdam(encoder_lv2.parameters(),lr=LEARNING_RATE)
encoder_lv2_scheduler = StepLR(encoder_lv2_optim,step_size=1000,gamma=0.1)
encoder_lv3_optim = RAdam(encoder_lv3.parameters(),lr=LEARNING_RATE)
encoder_lv3_scheduler = StepLR(encoder_lv3_optim,step_size=1000,gamma=0.1)
encoder_lv4_optim = RAdam(encoder_lv4.parameters(),lr=LEARNING_RATE)
encoder_lv4_scheduler = StepLR(encoder_lv4_optim,step_size=1000,gamma=0.1)
decoder_lv1_optim = RAdam(decoder_lv1.parameters(),lr=LEARNING_RATE)
decoder_lv1_scheduler = StepLR(decoder_lv1_optim,step_size=1000,gamma=0.1)
decoder_lv2_optim = RAdam(decoder_lv2.parameters(),lr=LEARNING_RATE)
decoder_lv2_scheduler = StepLR(decoder_lv2_optim,step_size=1000,gamma=0.1)
decoder_lv3_optim = RAdam(decoder_lv3.parameters(),lr=LEARNING_RATE)
decoder_lv3_scheduler = StepLR(decoder_lv3_optim,step_size=1000,gamma=0.1)
decoder_lv4_optim = RAdam(decoder_lv4.parameters(),lr=LEARNING_RATE)
decoder_lv4_scheduler = StepLR(decoder_lv4_optim,step_size=1000,gamma=0.1)
if os.path.exists(str('./checkpoints/' + METHOD + "/encoder_lv1.pkl")):
encoder_lv1.load_state_dict(torch.load(str('./checkpoints/' + METHOD + "/encoder_lv1.pkl")))
print("load encoder_lv1 success")
if os.path.exists(str('./checkpoints/' + METHOD + "/encoder_lv2.pkl")):
encoder_lv2.load_state_dict(torch.load(str('./checkpoints/' + METHOD + "/encoder_lv2.pkl")))
print("load encoder_lv2 success")
if os.path.exists(str('./checkpoints/' + METHOD + "/encoder_lv3.pkl")):
encoder_lv3.load_state_dict(torch.load(str('./checkpoints/' + METHOD + "/encoder_lv3.pkl")))
print("load encoder_lv3 success")
if os.path.exists(str('./checkpoints/' + METHOD + "/encoder_lv4.pkl")):
encoder_lv4.load_state_dict(torch.load(str('./checkpoints/' + METHOD + "/encoder_lv4.pkl")))
print("load encoder_lv4 success")
# for param in decoder_lv4.layer24.parameters():
# param.requires_grad = False
# for param in encoder_lv3.parameters():
# param.requires_grad = False
# # print("检查部分参数是否固定......")
# print(encoder_lv3.layer1.bias.requires_grad)
# for param in decoder_lv3.parameters():
# param.requires_grad = False
# for param in encoder_lv2.parameters():
# param.requires_grad = False
# # print("检查部分参数是否固定......")
# print(encoder_lv2.layer1.bias.requires_grad)
# for param in decoder_lv2.parameters():
# param.requires_grad = False
if os.path.exists(str('./checkpoints/' + METHOD + "/decoder_lv1.pkl")):
decoder_lv1.load_state_dict(torch.load(str('./checkpoints/' + METHOD + "/decoder_lv1.pkl")))
print("load encoder_lv1 success")
if os.path.exists(str('./checkpoints/' + METHOD + "/decoder_lv2.pkl")):
decoder_lv2.load_state_dict(torch.load(str('./checkpoints/' + METHOD + "/decoder_lv2.pkl")))
print("load decoder_lv2 success")
if os.path.exists(str('./checkpoints/' + METHOD + "/decoder_lv3.pkl")):
decoder_lv3.load_state_dict(torch.load(str('./checkpoints/' + METHOD + "/decoder_lv3.pkl")))
print("load decoder_lv3 success")
if os.path.exists(str('./checkpoints/' + METHOD + "/decoder_lv4.pkl")):
decoder_lv4.load_state_dict(torch.load(str('./checkpoints/' + METHOD + "/decoder_lv4.pkl")))
print("load decoder_lv4 success")
if os.path.exists('./checkpoints/' + METHOD) == False:
os.system('mkdir ./checkpoints/' + METHOD)
for epoch in range(args.start_epoch, EPOCHS):
epoch += 1
encoder_lv1_scheduler.step(epoch)
encoder_lv2_scheduler.step(epoch)
encoder_lv3_scheduler.step(epoch)
encoder_lv4_scheduler.step(epoch)
decoder_lv1_scheduler.step(epoch)
decoder_lv2_scheduler.step(epoch)
decoder_lv3_scheduler.step(epoch)
decoder_lv4_scheduler.step(epoch)
print("Training...")
print('lr:',encoder_lv1_scheduler.get_lr())
train_dataset = GoProDataset(
blur_image_files = './datas/GoPro/train_blur_file.txt',
sharp_image_files = './datas/GoPro/train_sharp_file.txt',
root_dir = './datas/GoPro',
crop = True,
crop_size = IMAGE_SIZE,
transform = transforms.Compose([
transforms.ToTensor()
]))
train_dataloader = DataLoader(train_dataset, batch_size = BATCH_SIZE, shuffle=True,num_workers=8,pin_memory=True)
start = 0
for iteration, images in enumerate(train_dataloader):
mse = nn.MSELoss().cuda(GPU)
gt = Variable(images['sharp_image'] - 0.5).cuda(GPU)
H = gt.size(2)
W = gt.size(3)
images_lv1 = Variable(images['blur_image'] - 0.5).cuda(GPU)
images_lv2_1 = images_lv1[:,:,0:int(H/2),:]
images_lv2_2 = images_lv1[:,:,int(H/2):H,:]
images_lv3_1 = images_lv2_1[:,:,:,0:int(W/2)]
images_lv3_2 = images_lv2_1[:,:,:,int(W/2):W]
images_lv3_3 = images_lv2_2[:,:,:,0:int(W/2)]
images_lv3_4 = images_lv2_2[:,:,:,int(W/2):W]
images_lv4_1 = images_lv3_1[:,:,0:int(H/4),:]
images_lv4_2 = images_lv3_1[:,:,int(H/4):int(H/2),:]
images_lv4_3 = images_lv3_2[:,:,0:int(H/4),:]
images_lv4_4 = images_lv3_2[:,:,int(H/4):int(H/2),:]
images_lv4_5 = images_lv3_3[:,:,0:int(H/4),:]
images_lv4_6 = images_lv3_3[:,:,int(H/4):int(H/2),:]
images_lv4_7 = images_lv3_4[:,:,0:int(H/4),:]
images_lv4_8 = images_lv3_4[:,:,int(H/4):int(H/2),:]
feature_lv4_1 = encoder_lv4(images_lv4_1)
feature_lv4_2 = encoder_lv4(images_lv4_2)
feature_lv4_3 = encoder_lv4(images_lv4_3)
feature_lv4_4 = encoder_lv4(images_lv4_4)
feature_lv4_5 = encoder_lv4(images_lv4_5)
feature_lv4_6 = encoder_lv4(images_lv4_6)
feature_lv4_7 = encoder_lv4(images_lv4_7)
feature_lv4_8 = encoder_lv4(images_lv4_8)
feature_lv4_top_left = torch.cat((feature_lv4_1, feature_lv4_2), 2)
feature_lv4_top_right = torch.cat((feature_lv4_3, feature_lv4_4), 2)
feature_lv4_bot_left = torch.cat((feature_lv4_5, feature_lv4_6), 2)
feature_lv4_bot_right = torch.cat((feature_lv4_7, feature_lv4_8), 2)
feature_lv4_top = torch.cat((feature_lv4_top_left, feature_lv4_top_right), 3)
feature_lv4_bot = torch.cat((feature_lv4_bot_left, feature_lv4_bot_right), 3)
feature_lv4 = torch.cat((feature_lv4_top, feature_lv4_bot), 2)
residual_lv4_top_left = decoder_lv4(feature_lv4_top_left)
residual_lv4_top_right = decoder_lv4(feature_lv4_top_right)
residual_lv4_bot_left = decoder_lv4(feature_lv4_bot_left)
residual_lv4_bot_right = decoder_lv4(feature_lv4_bot_right)
feature_lv3_1 = encoder_lv3(images_lv3_1 + residual_lv4_top_left)
feature_lv3_2 = encoder_lv3(images_lv3_2 + residual_lv4_top_right)
feature_lv3_3 = encoder_lv3(images_lv3_3 + residual_lv4_bot_left)
feature_lv3_4 = encoder_lv3(images_lv3_4 + residual_lv4_bot_right)
feature_lv3_top = torch.cat((feature_lv3_1, feature_lv3_2), 3) + feature_lv4_top
feature_lv3_bot = torch.cat((feature_lv3_3, feature_lv3_4), 3) + feature_lv4_bot
feature_lv3 = torch.cat((feature_lv3_top, feature_lv3_bot), 2)
residual_lv3_top = decoder_lv3(feature_lv3_top)
residual_lv3_bot = decoder_lv3(feature_lv3_bot)
feature_lv2_1 = encoder_lv2(images_lv2_1 + residual_lv3_top)
feature_lv2_2 = encoder_lv2(images_lv2_2 + residual_lv3_bot)
feature_lv2 = torch.cat((feature_lv2_1, feature_lv2_2), 2) + feature_lv3
residual_lv2 = decoder_lv2(feature_lv2)
feature_lv1 = encoder_lv1(images_lv1 + residual_lv2) + feature_lv2
deblur_image = decoder_lv1(feature_lv1)
loss = mse(deblur_image, gt)
encoder_lv1.zero_grad()
encoder_lv2.zero_grad()
encoder_lv3.zero_grad()
encoder_lv4.zero_grad()
decoder_lv1.zero_grad()
decoder_lv2.zero_grad()
decoder_lv3.zero_grad()
decoder_lv4.zero_grad()
loss.backward()
encoder_lv1_optim.step()
encoder_lv2_optim.step()
encoder_lv3_optim.step()
encoder_lv4_optim.step()
decoder_lv1_optim.step()
decoder_lv2_optim.step()
decoder_lv3_optim.step()
decoder_lv4_optim.step()
if (iteration+1)%10 == 0:
stop = time.time()
print("epoch:", epoch, "iteration:", iteration+1, "loss:%.4f"%loss.item(), 'time:%.4f'%(stop-start))
start = time.time()
if (epoch)%100==0:
if os.path.exists('./checkpoints/' + METHOD + '/epoch' + str(epoch)) == False:
os.system('mkdir ./checkpoints/' + METHOD + '/epoch' + str(epoch))
print("Testing...")
test_dataset = GoProDataset(
blur_image_files = './datas/GoPro/test_blur_file.txt',
sharp_image_files = './datas/GoPro/test_sharp_file.txt',
root_dir = './datas/GoPro',
transform = transforms.Compose([
transforms.ToTensor()
]))
test_dataloader = DataLoader(test_dataset, batch_size = 1, shuffle=False,num_workers=8,pin_memory=True)
test_time = 0
for iteration, images in enumerate(test_dataloader):
with torch.no_grad():
start = time.time()
images_lv1 = Variable(images['blur_image'] - 0.5).cuda(GPU)
H = images_lv1.size(2)
W = images_lv1.size(3)
images_lv2_1 = images_lv1[:,:,0:int(H/2),:]
images_lv2_2 = images_lv1[:,:,int(H/2):H,:]
images_lv3_1 = images_lv2_1[:,:,:,0:int(W/2)]
images_lv3_2 = images_lv2_1[:,:,:,int(W/2):W]
images_lv3_3 = images_lv2_2[:,:,:,0:int(W/2)]
images_lv3_4 = images_lv2_2[:,:,:,int(W/2):W]
images_lv4_1 = images_lv3_1[:,:,0:int(H/4),:]
images_lv4_2 = images_lv3_1[:,:,int(H/4):int(H/2),:]
images_lv4_3 = images_lv3_2[:,:,0:int(H/4),:]
images_lv4_4 = images_lv3_2[:,:,int(H/4):int(H/2),:]
images_lv4_5 = images_lv3_3[:,:,0:int(H/4),:]
images_lv4_6 = images_lv3_3[:,:,int(H/4):int(H/2),:]
images_lv4_7 = images_lv3_4[:,:,0:int(H/4),:]
images_lv4_8 = images_lv3_4[:,:,int(H/4):int(H/2),:]
feature_lv4_1 = encoder_lv4(images_lv4_1)
feature_lv4_2 = encoder_lv4(images_lv4_2)
feature_lv4_3 = encoder_lv4(images_lv4_3)
feature_lv4_4 = encoder_lv4(images_lv4_4)
feature_lv4_5 = encoder_lv4(images_lv4_5)
feature_lv4_6 = encoder_lv4(images_lv4_6)
feature_lv4_7 = encoder_lv4(images_lv4_7)
feature_lv4_8 = encoder_lv4(images_lv4_8)
feature_lv4_top_left = torch.cat((feature_lv4_1, feature_lv4_2), 2)
feature_lv4_top_right = torch.cat((feature_lv4_3, feature_lv4_4), 2)
feature_lv4_bot_left = torch.cat((feature_lv4_5, feature_lv4_6), 2)
feature_lv4_bot_right = torch.cat((feature_lv4_7, feature_lv4_8), 2)
feature_lv4_top = torch.cat((feature_lv4_top_left, feature_lv4_top_right), 3)
feature_lv4_bot = torch.cat((feature_lv4_bot_left, feature_lv4_bot_right), 3)
residual_lv4_top_left = decoder_lv4(feature_lv4_top_left)
residual_lv4_top_right = decoder_lv4(feature_lv4_top_right)
residual_lv4_bot_left = decoder_lv4(feature_lv4_bot_left)
residual_lv4_bot_right = decoder_lv4(feature_lv4_bot_right)
feature_lv3_1 = encoder_lv3(images_lv3_1 + residual_lv4_top_left)
feature_lv3_2 = encoder_lv3(images_lv3_2 + residual_lv4_top_right)
feature_lv3_3 = encoder_lv3(images_lv3_3 + residual_lv4_bot_left)
feature_lv3_4 = encoder_lv3(images_lv3_4 + residual_lv4_bot_right)
feature_lv3_top = torch.cat((feature_lv3_1, feature_lv3_2), 3) + feature_lv4_top
feature_lv3_bot = torch.cat((feature_lv3_3, feature_lv3_4), 3) + feature_lv4_bot
residual_lv3_top = decoder_lv3(feature_lv3_top)
residual_lv3_bot = decoder_lv3(feature_lv3_bot)
feature_lv2_1 = encoder_lv2(images_lv2_1 + residual_lv3_top)
feature_lv2_2 = encoder_lv2(images_lv2_2 + residual_lv3_bot)
feature_lv2 = torch.cat((feature_lv2_1, feature_lv2_2), 2) + torch.cat((feature_lv3_top, feature_lv3_bot), 2)
residual_lv2 = decoder_lv2(feature_lv2)
feature_lv1 = encoder_lv1(images_lv1 + residual_lv2) + feature_lv2
deblur_image = decoder_lv1(feature_lv1)
stop = time.time()
test_time += stop - start
print('RunTime:%.4f'%(stop-start), ' Average Runtime:%.4f'%(test_time/(iteration+1)))
save_deblur_images(deblur_image.data + 0.5, iteration, epoch)
#
torch.save(encoder_lv1.state_dict(),
str('./checkpoints/' + METHOD + '/epoch' + str(epoch) + "/encoder_lv1.pkl"))
torch.save(encoder_lv2.state_dict(),
str('./checkpoints/' + METHOD + '/epoch' + str(epoch) + "/encoder_lv2.pkl"))
torch.save(encoder_lv3.state_dict(),
str('./checkpoints/' + METHOD + '/epoch' + str(epoch) + "/encoder_lv3.pkl"))
torch.save(encoder_lv4.state_dict(),
str('./checkpoints/' + METHOD + '/epoch' + str(epoch) + "/encoder_lv4.pkl"))
torch.save(decoder_lv1.state_dict(),
str('./checkpoints/' + METHOD + '/epoch' + str(epoch) + "/decoder_lv1.pkl"))
torch.save(decoder_lv2.state_dict(),
str('./checkpoints/' + METHOD + '/epoch' + str(epoch) + "/decoder_lv2.pkl"))
torch.save(decoder_lv3.state_dict(),
str('./checkpoints/' + METHOD + '/epoch' + str(epoch) + "/decoder_lv3.pkl"))
torch.save(decoder_lv4.state_dict(),
str('./checkpoints/' + METHOD + '/epoch' + str(epoch) + "/decoder_lv4.pkl"))
torch.save(encoder_lv1.state_dict(),str('./checkpoints/' + METHOD + "/encoder_lv1.pkl"))
torch.save(encoder_lv2.state_dict(),str('./checkpoints/' + METHOD + "/encoder_lv2.pkl"))
torch.save(encoder_lv3.state_dict(),str('./checkpoints/' + METHOD + "/encoder_lv3.pkl"))
torch.save(encoder_lv4.state_dict(),str('./checkpoints/' + METHOD + "/encoder_lv4.pkl"))
torch.save(decoder_lv1.state_dict(),str('./checkpoints/' + METHOD + "/decoder_lv1.pkl"))
torch.save(decoder_lv2.state_dict(),str('./checkpoints/' + METHOD + "/decoder_lv2.pkl"))
torch.save(decoder_lv3.state_dict(),str('./checkpoints/' + METHOD + "/decoder_lv3.pkl"))
torch.save(decoder_lv4.state_dict(),str('./checkpoints/' + METHOD + "/decoder_lv4.pkl"))
if __name__ == '__main__':
main()
| [
"1763020129@qq.com"
] | 1763020129@qq.com |
84c806a6c6711ceb7dc060bcec0926b8246fdadb | e0980f704a573894350e285f66f4cf390837238e | /.history/rocketman/settings/production_20210104181634.py | 89446f098f49d16a5372b9f81e7bc516ac235f9c | [] | no_license | rucpata/WagtailWebsite | 28008474ec779d12ef43bceb61827168274a8b61 | 5aa44f51592f49c9a708fc5515ad877c6a29dfd9 | refs/heads/main | 2023-02-09T15:30:02.133415 | 2021-01-05T14:55:45 | 2021-01-05T14:55:45 | 303,961,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | import os
from .base import *
DEBUG = False
SECRET_KEY = '$^8&x#8a5!7@r!#6ov9bfl(j8k^6+$v-1x+*#!uqf(=^n+*$w3'
ALLOWED_HOSTS = ['localhost', 'rocketman.naukawagtail.com', '*']
cwd=os.getcwd()
CASHES = {
'default': {
'BA'
}
}
try:
from .local import *
except ImportError:
pass
| [
"rucinska.patrycja@gmail.com"
] | rucinska.patrycja@gmail.com |
fc7d9c649edf3139b463fd3faa9d651e068aae3d | 28e47009982e6475aaade74989be41ef08220344 | /RSA/LSB/local2.py | 01e86f7c8cbec3fdc527d707a63b5a6071171db6 | [] | no_license | v3ct0r719/Crypto-Attacks | 58984b92829597b6630ea5885a3247b74d1e5a6f | ec47615643df4376f50d592501b113a92056b03f | refs/heads/master | 2020-12-21T04:23:28.806814 | 2020-01-26T17:30:48 | 2020-01-26T17:30:48 | 236,305,551 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,458 | py |
from Crypto.Util.number import *
from Crypto.PublicKey import RSA
from gmpy2 import *
from pwn import *
def encrypt(m):
return pow(m,e,n)
def decrypt(c):
io.sendline('2')
io.sendline(long_to_bytes(c).encode('hex'))
io.recvuntil('Here take your plaintext (in hex): ')
x = io.recv().split('\n')[0]
return x
def oracle(c):
x = decrypt(c)
if x == '00':
return 0
elif x=='01':
return 1
else:
exit()
def loop(num):
#io = remote("13.233.196.46", 8671)
io = process('./bin.sh')
io.recv()
x = io.recv().split()
c = x[0]
c = int(c,16)
n = int(x[4])
for i in range(num):
c = (inv * c) % n
return c,io
if __name__ == '__main__':
#io = remote("13.233.196.46", 8671)
io = process('./bin.sh')
context.log_level = "debug"
io.recv()
x = io.recv().split()
c = x[0]
c = int(c,16)
n = int(x[4])
e = 65537
inv = pow(2,e,n)
low = 0
high = n
for i in range(2048-120):
c = (inv * c)%n
high = (low+high)/2
io.close()
for j in range(120):
io = process('./bin.sh')
io.recv()
x = io.recv().split()
c = x[0]
c = int(c,16)
n = int(x[4])
inv = pow(2,e,n)
for i in range(2048-120):
c = (c * inv) % n
for k in range(j+1):
c = (c * inv) % n
if oracle(c) == 0:
high = (low + high)/2
else:
low = (low + high)/2
print long_to_bytes(high)
| [
"viveknj719@gmail.com"
] | viveknj719@gmail.com |
04733626ed2d1c1e7f3d6268284da5c21b8a90a6 | bb9944477cbbf4abdc2a2dbdc61ec450c79c6c90 | /test/ml_algs/test_mix_gaussian.py | 5c67799550b8b68546f643dd4fbe0540b653451b | [] | no_license | feigeZzzz/myalgorithms | 8de07347daf3d35fc8ef444974f8a41699b163e7 | 1d7092a28f9c67c9dac49bb8873c5bda7f1d28a8 | refs/heads/master | 2023-01-20T18:10:19.081982 | 2020-11-16T08:25:43 | 2020-11-16T08:25:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 213 | py | from sklearn.datasets import load_iris
from algs.ml_algorithms.mixgaussian import Gmm
x, y = load_iris(return_X_y=1)
gmm = Gmm(max_iter=50, n_components=3)
gmm.fit(x)
y_pre = gmm.predict(x)
print(y_pre)
print(y)
| [
"wooxy610@icloud.com"
] | wooxy610@icloud.com |
3e29d4d7c333026e5344ef9516e21f5e220cfd24 | f98de2db6b24d30d64f1145c7d8da4a40385a87f | /packages/grid_control_cms/lumi_tools.py | 50eb266c862e2c20ae303976fae5474ea14c2247 | [] | no_license | greyxray/grid-control | f9f453491fe7bc506d4cfc240afaa364ba9db84b | ed10fdb6ff604006a5d52dcd43c2e55c9e962c0a | refs/heads/master | 2020-04-15T13:15:21.103357 | 2019-01-08T18:23:07 | 2019-01-08T18:23:07 | 164,709,043 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,665 | py | # | Copyright 2010-2016 Karlsruhe Institute of Technology
# |
# | Licensed under the Apache License, Version 2.0 (the "License");
# | you may not use this file except in compliance with the License.
# | You may obtain a copy of the License at
# |
# | http://www.apache.org/licenses/LICENSE-2.0
# |
# | Unless required by applicable law or agreed to in writing, software
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.
import os
from python_compat import imap, json, lmap, sort_inplace
def makeint(x):
if x.strip().upper() not in ['', 'MAX', 'MIN']:
return int(x)
def parseLumiFromJSON(data, select = ''):
runs = json.loads(data)
rr = lmap(makeint, select.split('-') + [''])[:2]
for run in imap(int, runs.keys()):
if (rr[0] and run < rr[0]) or (rr[1] and run > rr[1]):
continue
for lumi in runs[str(run)]:
yield ([run, lumi[0]], [run, lumi[1]])
def keyLumi(a):
return tuple(a[0])
def mergeLumi(rlrange):
""" Merge consecutive lumi sections
>>> mergeLumi([([1, 11], [1, 20]), ([1, 1], [1, 10]), ([1, 22], [1, 30])])
[([1, 1], [1, 20]), ([1, 22], [1, 30])]
>>> mergeLumi([([1, 1], [2, 2]), ([2, 3], [2, 10]), ([2, 11], [4, 30])])
[([1, 1], [4, 30])]
"""
sort_inplace(rlrange, keyLumi)
i = 0
while i < len(rlrange) - 1:
(end_run, end_lumi) = rlrange[i][1]
(start_next_run, start_next_lumi) = rlrange[i+1][0]
if (end_run == start_next_run) and (end_lumi == start_next_lumi - 1):
rlrange[i] = (rlrange[i][0], rlrange[i + 1][1])
del rlrange[i+1]
else:
i += 1
return rlrange
def parseLumiFromString(rlrange):
""" Parse user supplied lumi info into easier to handle format
>>> lmap(parseLumiFromString, ['1', '1-', '-1', '1-2'])
[([1, None], [1, None]), ([1, None], [None, None]), ([None, None], [1, None]), ([1, None], [2, None])]
>>> lmap(parseLumiFromString, ['1:5', '1:5-', '-1:5', '1:5-2:6'])
[([1, 5], [1, 5]), ([1, 5], [None, None]), ([None, None], [1, 5]), ([1, 5], [2, 6])]
>>> lmap(parseLumiFromString, ['1-:5', ':5-1', ':5-:6'])
[([1, None], [None, 5]), ([None, 5], [1, None]), ([None, 5], [None, 6])]
>>> lmap(parseLumiFromString, ['1:5-2', '1-2:5'])
[([1, 5], [2, None]), ([1, None], [2, 5])]
"""
def parseRunLumi(rl):
if ':' in rl:
return lmap(makeint, rl.split(':'))
else:
return [makeint(rl), None]
if '-' in rlrange:
return tuple(imap(parseRunLumi, rlrange.split('-')))
else:
tmp = parseRunLumi(rlrange)
return (tmp, tmp)
def parseLumiFilter(lumiexpr):
if lumiexpr == '':
return None
lumis = []
from grid_control.config import ConfigError
for token in imap(str.strip, lumiexpr.split(',')):
token = lmap(str.strip, token.split('|'))
if True in imap(str.isalpha, token[0].lower().replace('min', '').replace('max', '')):
if len(token) == 1:
token.append('')
try:
json_fn = os.path.normpath(os.path.expandvars(os.path.expanduser(token[0].strip())))
json_fp = open(json_fn)
lumis.extend(parseLumiFromJSON(json_fp.read(), token[1]))
json_fp.close()
except Exception:
raise ConfigError('Could not process lumi filter file: %r (filter: %r)' % tuple(token))
else:
try:
lumis.append(parseLumiFromString(token[0]))
except Exception:
raise ConfigError('Could not process lumi filter expression:\n\t%s' % token[0])
return mergeLumi(lumis)
def filterLumiFilter(runs, lumifilter):
""" Filter lumifilter for entries that contain the given runs
>>> formatLumi(filterLumiFilter([2,3,6], [([1, None], [2, None]), ([4, 1], [4, None]), ([5, 1], [None,3])]))
['1:MIN-2:MAX', '5:1-9999999:3']
>>> formatLumi(filterLumiFilter([2,3,6], [([1, 1], [2, 2]), ([3, 1], [5, 2]), ([5, 2], [7,3])]))
['1:1-2:2', '3:1-5:2', '5:2-7:3']
"""
for filterEntry in lumifilter:
(sel_start, sel_end) = (filterEntry[0][0], filterEntry[1][0])
for run in runs:
if (sel_start is None) or (run >= sel_start):
if (sel_end is None) or (run <= sel_end):
yield filterEntry
break
def selectRun(run, lumifilter):
""" Check if lumifilter selects the given run/lumi
>>> selectRun(1, [([1, None], [2, None])])
True
>>> selectRun(2, [([1, 3], [5, 12])])
True
>>> selectRun(6, [([1, 3], [5, 12])])
False
>>> selectRun(9, [([3, 23], [None, None])])
True
"""
for (sel_start, sel_end) in lumifilter:
(sel_start_run, sel_end_run) = (sel_start[0], sel_end[0])
if (sel_start_run is None) or (run >= sel_start_run):
if (sel_end_run is None) or (run <= sel_end_run):
return True
return False
def selectLumi(run_lumi, lumifilter):
""" Check if lumifilter selects the given run/lumi
>>> selectLumi((1,2), [([1, None], [2, None])])
True
>>> selectLumi((1,2), [([1, 3], [5, 12])])
False
>>> selectLumi((2,1), [([1, 3], [5, 12])])
True
>>> selectLumi((9,2), [([3, 23], [None, None])])
True
"""
(run, lumi) = run_lumi
for (sel_start, sel_end) in lumifilter:
(sel_start_run, sel_start_lumi) = sel_start
(sel_end_run, sel_end_lumi) = sel_end
if (sel_start_run is None) or (run >= sel_start_run):
if (sel_end_run is None) or (run <= sel_end_run):
# At this point, run_lumi is contained in the selected run
if (sel_start_run is not None) and (run > sel_start_run):
sel_start_lumi = None
if (sel_start_lumi is None) or (lumi >= sel_start_lumi):
if (sel_end_run is not None) and (run < sel_end_run):
sel_end_lumi = None
if (sel_end_lumi is None) or (lumi <= sel_end_lumi):
return True
return False
def formatLumi(lumifilter):
""" Check if lumifilter selects the given run/lumi
>>> formatLumi(imap(parseLumiFromString, ['1', '1-', '-1', '1-2']))
['1:MIN-1:MAX', '1:MIN-9999999:MAX', '1:MIN-1:MAX', '1:MIN-2:MAX']
>>> formatLumi(imap(parseLumiFromString, ['1:5', '1:5-', '-1:5', '1:5-2:6']))
['1:5-1:5', '1:5-9999999:MAX', '1:MIN-1:5', '1:5-2:6']
>>> formatLumi(imap(parseLumiFromString, ['1-:5', ':5-1', ':5-:6']))
['1:MIN-9999999:5', '1:5-1:MAX', '1:5-9999999:6']
>>> formatLumi(imap(parseLumiFromString, ['1:5-2', '1-2:5']))
['1:5-2:MAX', '1:MIN-2:5']
"""
def formatRange(rlrange):
(start, end) = rlrange
default = lambda x, d: (x, d)[x is None]
start = [default(start[0], '1'), default(start[1], 'MIN')]
end = [default(end[0], '9999999'), default(end[1], 'MAX')]
return str.join('-', imap(lambda x: '%s:%s' % tuple(x), (start, end)))
if lumifilter:
return lmap(formatRange, lumifilter)
return ''
def strLumi(lumifilter):
return str.join(',', formatLumi(lumifilter))
if __name__ == '__main__':
import doctest
doctest.testmod()
| [
"stober@cern.ch"
] | stober@cern.ch |
562bd02172de569d1f59d18029b48daf417ddfa5 | 9d351d7fb64ddcca324c4f923e7ac3df263cbf0b | /blog/migrations/0008_post_published.py | 14c2a72daf2c9d49d7d46a8699e50165848b0e81 | [] | no_license | coderek/my_django | f83f70f5021f55b91cfb42fe05237d11f3eb28d0 | a497b17f453e18949a507fe07c1d8f7683e16e69 | refs/heads/master | 2021-01-10T13:09:07.412941 | 2016-09-04T02:56:03 | 2016-09-04T02:56:03 | 50,731,339 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.3 on 2016-08-28 01:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0007_post_preview'),
]
operations = [
migrations.AddField(
model_name='post',
name='published',
field=models.BooleanField(default=False),
),
]
| [
"dzeng@rubiconproject.com"
] | dzeng@rubiconproject.com |
0c1cfa94dc68106fc0503d151bf619120f9d7fda | f840fa133382ad79adc49ab12878df6132426b90 | /TDF-Vision-GCP.py | 272b115d3079186222ceada5ef5249a826494fac | [] | no_license | vivivek/TDF-Vision | fc194c34ed69147d1df02a20144d3d355269dfcb | 5f4fc6968997be7b3e5e8610d01e365f97def9ed | refs/heads/main | 2023-04-04T01:32:33.406568 | 2021-04-07T22:36:20 | 2021-04-07T22:36:20 | 355,411,703 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,082 | py | import picamera #camera library
import pygame as pg #audio library
import os #communicate with os/command line
from google.cloud import vision #gcp vision library
from time import sleep
from adafruit_crickit import crickit
import time
import signal
import sys
import re #regular expression lib for string searches!
import subprocess
#set up your GCP credentials - replace the " " in the following line with your .json file and path
os.environ["GOOGLE_APPLICATION_CREDENTIALS"]="TDF-VoiceVision.json"
# this line connects to Google Cloud Vision!
client = vision.ImageAnnotatorClient()
# global variable for our image file - to be captured soon!
image = 'image.jpg'
def espeak(text: str, pitch: int=50) -> int:
""" Use espeak to convert text to speech. """
return subprocess.run(['espeak', f'-p {pitch}', text]).returncode
def takephoto(camera):
# this triggers an on-screen preview, so you know what you're photographing!
camera.start_preview()
sleep(.5) #give it a pause so you can adjust if needed
camera.capture('image.jpg') #save the image
camera.stop_preview() #stop the preview
def ocr_handwriting(image):
#this function sends your image to google cloud using the
#text_detection method, collects a response, and parses that
#response for all of the associated words detected.
#these are captured as a single joined string in word_text.
#if there is handwriting detected, strings are sent to motor_turn()
#to determine if and how the motor should actuate!
#these two lines connect to google cloud vision in the text_detection mode
response = client.text_detection(image=image)
text = response.full_text_annotation
word_text = ""
#this next block of code parses google cloud's response
#down to words detected, which are combined into word_text
for page in text.pages:
for block in page.blocks:
for paragraph in block.paragraphs:
for word in paragraph.words:
word_text += " "
word_text += ''.join([
symbol.text for symbol in word.symbols
])
#this next block checks if any word text was detected - and
#if it was, the text and search strings are sent to motor_turn()
if word_text:
print('ocr_handwriting(): {}'.format(word_text))
espeak(word_text)
else:
print('ocr_handwriting(): No Handwriting Text Detected!')
def image_labeling(image):
#this function sends your image to google cloud using the
#label_detection method, collects a response, and parses that
#response for all of the label descriptions collected - that is,
#the AI's guesses at what is contained in the image.
#each of these labels, identified as .description, are combined into
#a single string label_text.
#this time we'll be triggering different sounds - a bark or a meow! -
#depending on what's in the image.
string1 = "dog"
string2 = "cat"
sound1 = "dog2.wav"
sound2 = "cat.wav"
response = client.label_detection(image=image)
labels = response.label_annotations
label_text = ""
#this next block of code parses the various labels returned by google,
#extracts the text descriptions, and combines them into a single string.
for label in labels:
label_text += ''.join([label.description, " "])
#if labels are identified, send the sound files, search strings, and label
#text to speaker_out()
if label_text:
print('image_labeling(): {}'.format(label_text))
speaker_out(sound1, sound2, label_text, string1, string2)
else:
print('image_labeling(): No Label Descriptions')
def web_search(image):
#this function sends your image to google cloud using the
#web_detection method, collects a response, and parses that
#response for the 'best web association' found for the image.
#there's no actuation here - just printing - but you can easily
#engage with speaker_out() or motor_turn() if you like!
response = client.web_detection(image=image)
web_guess = response.web_detection
for label in web_guess.best_guess_labels:
print('Best Web Guess Label: {}'.format(label.label))
def speaker_out(sound1, sound2, text, string1, string2):
#this function plays sound1
#if string1 is found in the text descriptions returned
#using regular expressions as in motor_turn().
#similarly, sound 2 is played if string 2 is detected.
#the pygame library is used to playback audio.
#please note, if you're changing out sound files, 16-bit
#.wav files are needed, otherwise you risk getting some
#underrun errors.
# print(text)
if re.search(string1, text, re.IGNORECASE):
pg.mixer.music.load(sound1) #pygame - load the sound file
pg.mixer.music.play() #pygame - play the sound file
elif re.search(string2, text, re.IGNORECASE):
pg.mixer.music.load(sound2)
pg.mixer.music.play()
def main():
#generate a camera object for the takephoto function to
#work with
camera = picamera.PiCamera()
#setup our pygame mixer to play audio in subsequent stages
pg.init()
pg.mixer.init()
#this while loop lets the script run until you ctrl+c (command line)
#or press 'stop' (Thonny IDE)
while True:
takephoto(camera) # First take a picture
"""Run a label request on a single image"""
with open('image.jpg', 'rb') as image_file:
#read the image file
content = image_file.read()
#convert the image file to a GCP Vision-friendly type
image = vision.Image(content=content)
ocr_handwriting(image)
image_labeling(image)
#web_search(image)
time.sleep(0.1)
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | noreply@github.com |
7acb167d39a0b91b67c889f4cc7255bc999b54a5 | 3c5956cdd28d524a74578edf382007a5b2ea5bdd | /component/goldair_heater/__init__.py | a52c230c945e68654a0c1210f9e596ce97c0c0e2 | [
"MIT"
] | permissive | SmbKiwi/homeassistant-goldair-heater | 096757ff28d844b772ac5b7ee1a517f850305fe7 | fb129aad5be92e10aa24ae2e80a5b86303a174b2 | refs/heads/master | 2021-08-06T11:28:30.652901 | 2020-05-21T03:00:37 | 2020-05-21T03:00:37 | 178,757,774 | 1 | 1 | MIT | 2020-05-21T03:00:39 | 2019-04-01T00:28:52 | Python | UTF-8 | Python | false | false | 16,468 | py |
"""
Platform for Goldair WiFi-connected heaters and panels.
Based on sean6541/tuya-homeassistant for service call logic, and TarxBoy's
investigation into Goldair's tuyapi statuses
https://github.com/codetheweb/tuyapi/issues/31.
Version 2.0 Author: SmbKiwi
20 September 2019
Updated for HA Climate 1.0 (HA 0.96+)
Based on https://github.com/nikrolls/homeassistant-goldair-climate/tree/0.0.1
"""
from time import time
from threading import Timer, Lock
import logging
import json
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (CONF_NAME, CONF_HOST, ATTR_TEMPERATURE, TEMP_CELSIUS)
from homeassistant.components.climate import ClimateDevice
from homeassistant.components.climate.const import (ATTR_PRESET_MODE, HVAC_MODE_OFF, HVAC_MODE_HEAT, CURRENT_HVAC_HEAT, CURRENT_HVAC_IDLE, CURRENT_HVAC_OFF, CURRENT_HVAC_DRY)
from homeassistant.helpers.discovery import load_platform
REQUIREMENTS = ['pytuya==7.0']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'goldair_heater'
DATA_GOLDAIR_HEATER = 'data_goldair_heater'
CONF_DEVICE_ID = 'device_id'
CONF_LOCAL_KEY = 'local_key'
CONF_CLIMATE = 'climate'
CONF_SENSOR = 'sensor'
CONF_CHILD_LOCK = 'child_lock'
CONF_DISPLAY_LIGHT = 'display_light'
ATTR_ON = 'on'
ATTR_TARGET_TEMPERATURE = 'target_temperature'
ATTR_CHILD_LOCK = 'child_lock'
ATTR_FAULT = 'fault'
ATTR_POWER_LEVEL = 'power_level'
ATTR_TIMER_MINUTES = 'timer_minutes'
ATTR_TIMER_ON = 'timer_on'
ATTR_DISPLAY_ON = 'display_on'
ATTR_POWER_MODE = 'power_mode'
ATTR_ECO_TARGET_TEMPERATURE = 'eco_' + ATTR_TARGET_TEMPERATURE
STATE_COMFORT = 'Comfort'
STATE_ECO = 'Eco'
STATE_ANTI_FREEZE = 'Anti-freeze'
GOLDAIR_PROPERTY_TO_DPS_ID = {
ATTR_ON: '1',
ATTR_TARGET_TEMPERATURE: '2',
ATTR_TEMPERATURE: '3',
ATTR_PRESET_MODE: '4',
ATTR_CHILD_LOCK: '6',
ATTR_FAULT: '12',
ATTR_POWER_LEVEL: '101',
ATTR_TIMER_MINUTES: '102',
ATTR_TIMER_ON: '103',
ATTR_DISPLAY_ON: '104',
ATTR_POWER_MODE: '105',
ATTR_ECO_TARGET_TEMPERATURE: '106'
}
GOLDAIR_MODE_TO_DPS_MODE = {
STATE_COMFORT: 'C',
STATE_ECO: 'ECO',
STATE_ANTI_FREEZE: 'AF'
}
GOLDAIR_POWER_LEVEL_TO_DPS_LEVEL = {
'Stop': 'stop',
'1': '1',
'2': '2',
'3': '3',
'4': '4',
'5': '5',
'Auto': 'auto'
}
GOLDAIR_POWER_MODES = ['auto', 'user']
PLATFORM_SCHEMA = vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_DEVICE_ID): cv.string,
vol.Required(CONF_LOCAL_KEY): cv.string,
vol.Optional(CONF_CLIMATE, default=True): cv.boolean,
vol.Optional(CONF_SENSOR, default=False): cv.boolean,
vol.Optional(CONF_DISPLAY_LIGHT, default=False): cv.boolean,
vol.Optional(CONF_CHILD_LOCK, default=False): cv.boolean
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.All(cv.ensure_list, [PLATFORM_SCHEMA])
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
hass.data[DOMAIN] = {}
for device_config in config.get(DOMAIN, []):
host = device_config.get(CONF_HOST)
device = GoldairHeaterDevice(
device_config.get(CONF_NAME),
device_config.get(CONF_DEVICE_ID),
device_config.get(CONF_HOST),
device_config.get(CONF_LOCAL_KEY)
)
hass.data[DOMAIN][host] = device
if device_config.get('climate'):
load_platform(hass, 'climate', DOMAIN, {'host': host}, config)
if device_config.get('sensor'):
load_platform(hass, 'sensor', DOMAIN, {'host': host}, config)
if device_config.get('display_light'):
load_platform(hass, 'light', DOMAIN, {'host': host}, config)
if device_config.get('child_lock'):
load_platform(hass, 'lock', DOMAIN, {'host': host}, config)
return True
class GoldairHeaterDevice(object):
def __init__(self, name, dev_id, address, local_key):
"""
Represents a Goldair Heater device.
Args:
dev_id (str): The device id.
address (str): The network address.
local_key (str): The encryption key.
"""
import pytuya
self._name = name
self._api = pytuya.Device(dev_id, address, local_key, 'device')
self._fixed_properties = {}
self._reset_cached_state()
self._TEMPERATURE_UNIT = TEMP_CELSIUS
self._TEMPERATURE_STEP = 1
self._TEMPERATURE_LIMITS = {
STATE_COMFORT: {
'min': 5,
'max': 35
},
STATE_ECO: {
'min': 5,
'max': 21
}
}
# API calls to update Goldair heaters are asynchronous and non-blocking. This means
# you can send a change and immediately request an updated state (like HA does),
# but because it has not yet finished processing you will be returned the old state.
# The solution is to keep a temporary list of changed properties that we can overlay
# onto the state while we wait for the board to update its switches.
self._FAKE_IT_TIL_YOU_MAKE_IT_TIMEOUT = 10
self._CACHE_TIMEOUT = 20
self._CONNECTION_ATTEMPTS = 2
self._lock = Lock()
self._operation_list = [HVAC_MODE_OFF, HVAC_MODE_HEAT]
@property
def name(self):
return self._name
@property
def hvac_mode(self):
if self._get_cached_state()[ATTR_ON] is True:
return HVAC_MODE_HEAT
elif self._get_cached_state()[ATTR_ON] is False:
return HVAC_MODE_OFF
@property
def hvac_modes(self):
return self._operation_list
def set_hvac_mode(self, hvac_mode):
if hvac_mode == HVAC_MODE_HEAT:
self._set_properties({ATTR_ON: True})
elif hvac_mode == HVAC_MODE_OFF:
self._set_properties({ATTR_ON: False})
@property
def turn_on(self):
self._set_properties({ATTR_ON: True})
def turn_off(self):
self._set_properties({ATTR_ON: False})
@property
def temperature_unit(self):
return self._TEMPERATURE_UNIT
@property
def target_temperature(self):
state = self._get_cached_state()
if self.preset_mode == STATE_COMFORT:
return state[ATTR_TARGET_TEMPERATURE]
elif self.preset_mode == STATE_ECO:
return state[ATTR_ECO_TARGET_TEMPERATURE]
else:
return None
@property
def target_temperature_step(self):
return self._TEMPERATURE_STEP
@property
def min_target_teperature(self):
if self.preset_mode and self.preset_mode != STATE_ANTI_FREEZE:
return self._TEMPERATURE_LIMITS[self.preset_mode]['min']
else:
return None
@property
def max_target_temperature(self):
if self.preset_mode and self.preset_mode != STATE_ANTI_FREEZE:
return self._TEMPERATURE_LIMITS[self.preset_mode]['max']
else:
return None
def set_target_temperature(self, target_temperature):
target_temperature = int(round(target_temperature))
preset_mode = self.preset_mode
if preset_mode == STATE_ANTI_FREEZE:
raise ValueError('You cannot set the temperature in Anti-freeze mode.')
limits = self._TEMPERATURE_LIMITS[preset_mode]
if not limits['min'] <= target_temperature <= limits['max']:
raise ValueError(
f'Target temperature ({target_temperature}) must be between '
f'{limits["min"]} and {limits["max"]}'
)
if preset_mode == STATE_COMFORT:
self._set_properties({ATTR_TARGET_TEMPERATURE: target_temperature})
elif preset_mode == STATE_ECO:
self._set_properties({ATTR_ECO_TARGET_TEMPERATURE: target_temperature})
@property
def current_temperature(self):
return self._get_cached_state()[ATTR_TEMPERATURE]
@property
def preset_mode(self):
return self._get_cached_state()[ATTR_PRESET_MODE]
@property
def preset_modes(self):
return list(GOLDAIR_MODE_TO_DPS_MODE.keys())
def set_preset_mode(self, new_mode):
if new_mode not in GOLDAIR_MODE_TO_DPS_MODE:
raise ValueError(f'Invalid mode: {new_mode}')
self._set_properties({ATTR_PRESET_MODE: new_mode})
@property
def is_child_locked(self):
return self._get_cached_state()[ATTR_CHILD_LOCK]
def enable_child_lock(self):
self._set_properties({ATTR_CHILD_LOCK: True})
def disable_child_lock(self):
self._set_properties({ATTR_CHILD_LOCK: False})
@property
def is_faulted(self):
return self._get_cached_state()[ATTR_FAULT]
@property
def power_level(self):
power_mode = self._get_cached_state()[ATTR_POWER_MODE]
if power_mode == 'user':
return self._get_cached_state()[ATTR_POWER_LEVEL]
elif power_mode == 'auto':
return 'Auto'
else:
return None
@property
def power_level_list(self):
return list(GOLDAIR_POWER_LEVEL_TO_DPS_LEVEL.keys())
def set_power_level(self, new_level):
if new_level not in GOLDAIR_POWER_LEVEL_TO_DPS_LEVEL.keys():
raise ValueError(f'Invalid power level: {new_level}')
self._set_properties({ATTR_POWER_LEVEL: new_level})
@property
def timer_timeout_in_minutes(self):
return self._get_cached_state()[ATTR_TIMER_MINUTES]
@property
def is_timer_on(self):
return self._get_cached_state()[ATTR_TIMER_ON]
def start_timer(self, minutes):
self._set_properties({
ATTR_TIMER_ON: True,
ATTR_TIMER_MINUTES: minutes
})
def stop_timer(self):
self._set_properties({ATTR_TIMER_ON: False})
@property
def is_display_on(self):
return self._get_cached_state()[ATTR_DISPLAY_ON]
def turn_display_on(self):
self._set_properties({ATTR_DISPLAY_ON: True})
def turn_display_off(self):
self._set_properties({ATTR_DISPLAY_ON: False})
@property
def power_mode(self):
return self._get_cached_state()[ATTR_POWER_MODE]
def set_power_mode(self, new_mode):
if new_mode not in GOLDAIR_POWER_MODES:
raise ValueError(f'Invalid user mode: {new_mode}')
self._set_properties({ATTR_POWER_MODE: new_mode})
@property
def eco_target_temperature(self):
return self._get_cached_state()[ATTR_ECO_TARGET_TEMPERATURE]
def set_eco_target_temperature(self, eco_target_temperature):
self._set_properties({ATTR_ECO_TARGET_TEMPERATURE: eco_target_temperature})
def set_fixed_properties(self, fixed_properties):
self._fixed_properties = fixed_properties
set_fixed_properties = Timer(10, lambda: self._set_properties(self._fixed_properties))
set_fixed_properties.start()
def refresh(self):
now = time()
cached_state = self._get_cached_state()
if now - cached_state['updated_at'] >= self._CACHE_TIMEOUT:
self._retry_on_failed_connection(lambda: self._refresh_cached_state(), 'Failed to refresh device state.')
def _reset_cached_state(self):
self._cached_state = {
ATTR_ON: None,
ATTR_TARGET_TEMPERATURE: None,
ATTR_TEMPERATURE: None,
ATTR_PRESET_MODE: None,
ATTR_CHILD_LOCK: None,
ATTR_FAULT: None,
ATTR_POWER_LEVEL: None,
ATTR_TIMER_MINUTES: None,
ATTR_TIMER_ON: None,
ATTR_DISPLAY_ON: None,
ATTR_POWER_MODE: None,
ATTR_ECO_TARGET_TEMPERATURE: None,
'updated_at': 0
}
self._pending_updates = {}
def _refresh_cached_state(self):
new_state = self._api.status()
self._update_cached_state_from_dps(new_state['dps'])
_LOGGER.info(f'refreshed device state: {json.dumps(new_state)}')
_LOGGER.debug(f'new cache state: {json.dumps(self._cached_state)}')
_LOGGER.debug(f'new cache state (including pending properties): {json.dumps(self._get_cached_state())}')
def _set_properties(self, properties):
if len(properties) == 0:
return
self._add_properties_to_pending_updates(properties)
self._debounce_sending_updates()
def _add_properties_to_pending_updates(self, properties):
now = time()
properties = {**properties, **self._fixed_properties}
pending_updates = self._get_pending_updates()
for key, value in properties.items():
pending_updates[key] = {
'value': value,
'updated_at': now
}
_LOGGER.debug(f'new pending updates: {json.dumps(self._pending_updates)}')
def _debounce_sending_updates(self):
try:
self._debounce.cancel()
except AttributeError:
pass
self._debounce = Timer(1, self._send_pending_updates)
self._debounce.start()
def _send_pending_updates(self):
pending_properties = self._get_pending_properties()
new_state = GoldairHeaterDevice._generate_dps_payload_for_properties(pending_properties)
payload = self._api.generate_payload('set', new_state)
_LOGGER.debug(f'sending updated properties: {json.dumps(pending_properties)}')
_LOGGER.info(f'sending dps update: {json.dumps(new_state)}')
self._retry_on_failed_connection(lambda: self._send_payload(payload), 'Failed to update device state.')
def _send_payload(self, payload):
try:
self._lock.acquire()
self._api._send_receive(payload)
self._cached_state['updated_at'] = 0
now = time()
pending_updates = self._get_pending_updates()
for key, value in pending_updates.items():
pending_updates[key]['updated_at'] = now
finally:
self._lock.release()
def _retry_on_failed_connection(self, func, error_message):
for i in range(self._CONNECTION_ATTEMPTS):
try:
func()
except:
if i + 1 == self._CONNECTION_ATTEMPTS:
self._reset_cached_state()
_LOGGER.error(error_message)
def _get_cached_state(self):
cached_state = self._cached_state.copy()
_LOGGER.debug(f'pending updates: {json.dumps(self._get_pending_updates())}')
return {**cached_state, **self._get_pending_properties()}
def _get_pending_properties(self):
return {key: info['value'] for key, info in self._get_pending_updates().items()}
def _get_pending_updates(self):
now = time()
self._pending_updates = {key: value for key, value in self._pending_updates.items()
if now - value['updated_at'] < self._FAKE_IT_TIL_YOU_MAKE_IT_TIMEOUT}
return self._pending_updates
def _update_cached_state_from_dps(self, dps):
now = time()
for key, dps_id in GOLDAIR_PROPERTY_TO_DPS_ID.items():
if dps_id in dps:
value = dps[dps_id]
if dps_id == GOLDAIR_PROPERTY_TO_DPS_ID[ATTR_PRESET_MODE]:
self._cached_state[key] = GoldairHeaterDevice._get_key_for_value(GOLDAIR_MODE_TO_DPS_MODE, value)
elif dps_id == GOLDAIR_PROPERTY_TO_DPS_ID[ATTR_POWER_LEVEL]:
self._cached_state[key] = GoldairHeaterDevice._get_key_for_value(GOLDAIR_POWER_LEVEL_TO_DPS_LEVEL, value)
else:
self._cached_state[key] = value
self._cached_state['updated_at'] = now
@staticmethod
def _generate_dps_payload_for_properties(properties):
dps = {}
for key, dps_id in GOLDAIR_PROPERTY_TO_DPS_ID.items():
if key in properties:
value = properties[key]
if dps_id == GOLDAIR_PROPERTY_TO_DPS_ID[ATTR_PRESET_MODE]:
dps[dps_id] = GOLDAIR_MODE_TO_DPS_MODE[value]
elif dps_id == GOLDAIR_PROPERTY_TO_DPS_ID[ATTR_POWER_LEVEL]:
dps[dps_id] = GOLDAIR_POWER_LEVEL_TO_DPS_LEVEL[value]
else:
dps[dps_id] = value
return dps
@staticmethod
def _get_key_for_value(obj, value):
keys = list(obj.keys())
values = list(obj.values())
return keys[values.index(value)]
| [
"noreply@github.com"
] | noreply@github.com |
409a0b3c9fedf9479c0648210469d39a2099d9e3 | e3d33c30fa89a039532119bf6b87b54ae5bbf596 | /ckx_tools/verbs/ckx_create/cli.py | d389595f2a7ab01be0aa8d617d8e113f714a0ffc | [
"Apache-2.0"
] | permissive | stonier/ckx_tools | 1b238e9878c367fd4b5cd1b09f545b0ea2b3bb64 | 5ee04dc508c5b6256e68fb03f320bf29927f08a7 | refs/heads/devel | 2021-04-28T23:15:45.648638 | 2018-02-20T16:07:22 | 2018-02-20T16:07:22 | 77,739,402 | 3 | 0 | null | 2017-01-19T22:55:59 | 2016-12-31T12:29:21 | Python | UTF-8 | Python | false | false | 6,417 | py | # Copyright 2014 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
from catkin_pkg.package_templates import create_package_files, PackageTemplate
# Exempt build directories
# See https://github.com/catkin/catkin_tools/issues/82
def prepare_arguments(parser):
# Workspace / profile args
# add_context_args(parser)
subparsers = parser.add_subparsers(dest='subcommand', help='sub-command help')
parser_pkg = subparsers.add_parser('pkg', help='Create a new catkin package.')
parser_pkg.description = (
"Create a new Catkin package. Note that while the "
"default options used by this command are sufficient for prototyping and "
"local usage, it is important that any publically-available packages have "
"a valid license and a valid maintainer e-mail address.")
add = parser_pkg.add_argument
add('name', metavar='PKG_NAME', nargs='+',
help='The name of one or more packages to create. This name should be '
'completely lower-case with individual words separated by undercores.')
add('-p', '--path', action='store', default=os.getcwd(),
help='The path into which the package should be generated.')
# TODO: Make this possible
# add('--manifest-only', action='store_true', default=False,
# help='Only create a package.xml manifest file and do not generate a CMakeLists.txt')
# TODO: Make this possible
# add('--build-type', type=str, choices=['catkin', 'cmake'],
# nargs=1,
# default='catkin',
# help='The buildtool to use to build the package. (default: catkin)')
rosdistro_name = os.environ['ROS_DISTRO'] if 'ROS_DISTRO' in os.environ else None
add('--rosdistro', required=rosdistro_name is None, default=rosdistro_name,
help='The ROS distro (default: environment variable ROS_DISTRO if defined)')
basic_group = parser_pkg.add_argument_group('Package Metadata')
add = basic_group.add_argument
add('-v', '--version',
metavar='MAJOR.MINOR.PATCH',
action='store',
help='Initial package version. (default 0.0.0)')
add('-l', '--license',
action='append',
help='The software license under which the code is distributed, such as '
'BSD, MIT, GPLv3, or others. (default: "TODO")')
add('-m', '--maintainer',
metavar=('NAME', 'EMAIL'),
dest='maintainers',
action='append',
nargs=2,
help='A maintainer who is responsible for the package. (default: '
'[username, username@todo.todo]) (multiple allowed)')
add('-a', '--author',
metavar=('NAME', 'EMAIL'),
dest='authors',
action='append',
nargs=2,
help='An author who contributed to the package. (default: no additional '
'authors) (multiple allowed)')
add('-d', '--description',
action='store',
help='Description of the package. (default: empty)')
deps_group = parser_pkg.add_argument_group('Package Dependencies')
add = deps_group.add_argument
add('--catkin-deps', '-c', metavar='DEP', nargs="*",
help='The names of one or more Catkin dependencies. These are '
'Catkin-based packages which are either built as source or installed '
'by your system\'s package manager.')
add('--system-deps', '-s', metavar='DEP', nargs="*",
help='The names of one or more system dependencies. These are other '
'packages installed by your operating system\'s package manager.')
cpp_group = parser_pkg.add_argument_group('C++ Options')
add = cpp_group.add_argument
add('--boost-components',
metavar='COMP',
nargs='*',
help='One or more boost components used by the package.')
# py_group = parser_pkg.add_argument_group('Python Options')
# add('--python-setup', action='store_true', default=False,
# help='Add a default python setup file.')
return parser
def main(opts):
try:
# Get absolute path to directory containing package
package_dest_path = os.path.abspath(opts.path)
for package_name in opts.name:
print('Creating package "%s" in "%s"...' % (package_name, package_dest_path))
target_path = os.path.join(package_dest_path, package_name)
package_template = PackageTemplate._create_package_template(
package_name=package_name,
description=opts.description,
licenses=opts.license or [],
maintainer_names=[m[0] for m in opts.maintainers] if opts.maintainers else [],
author_names=[a[0] for a in opts.authors] if opts.authors else [],
version=opts.version,
catkin_deps=opts.catkin_deps,
system_deps=opts.system_deps,
boost_comps=opts.boost_components)
# Add maintainer and author e-mails
if opts.maintainers:
for (pm, om) in zip(package_template.maintainers, opts.maintainers):
pm.email = om[1]
if opts.authors:
for (pa, oa) in zip(package_template.authors, opts.authors):
pa.email = oa[1]
# Add build type export
# if opts.build_type and opts.build_type != 'catkin':
# build_type = Export('build_type', content=opts.build_type)
# package_template.exports.append(build_type)
create_package_files(target_path=target_path,
package_template=package_template,
rosdistro=opts.rosdistro,
newfiles={})
print('Successfully created package files in %s.' % target_path)
except ValueError as vae:
print(str(vae))
return 1
return 0
| [
"jbo@jhu.edu"
] | jbo@jhu.edu |
b07ce06995424905dfdfd4cbd11ef8dfa7396fde | 82bff6559535b9d7b6f068939406b114c7d50791 | /src/example_service.py | 031e6ec6414699a1dacecd8ceb97ad82a0ef654a | [] | no_license | ubtvisbot/get_parameters | fa8389e74eecc9f141f6bd6226cb43c73ec323ef | 9d2202b807cae081c61f6216f008243981043a5e | refs/heads/master | 2020-09-15T04:27:46.349540 | 2019-11-26T08:18:04 | 2019-11-26T08:18:04 | 223,348,244 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 527 | py | #!/usr/bin/env python
from get_parameters.srv import *
import rospy
def handle_example_service(req):
print "A client said: " + req.messageToPrint
print "A client sent me this float32 value: " + str(req.someFloatValue)
return ExampleServiceDefinitionResponse("I am the server and this is my response.")
if __name__ == "__main__":
rospy.init_node('example_service_node')
s = rospy.Service('example_service', ExampleServiceDefinition, handle_example_service)
print "Ready to serve..."
rospy.spin()
| [
"lifu.qin@ubtrobot.com"
] | lifu.qin@ubtrobot.com |
3abf6bd14ccbdae18927dd13663caf2bd87bb9f2 | 57d58d435e0645d93254f22277f063254879fc04 | /SplitTest.py | 945dda070e88bc1a7217ec0d1c4532dd86826922 | [] | no_license | chenhx1992/ECG_adv | 8a6f10254dafcfc32bd8e727b8bde6eab8fd49c5 | 020e9cc749085b59caa8c8be9248145402b0ccb1 | refs/heads/master | 2021-06-18T12:38:55.529820 | 2019-09-16T07:51:51 | 2019-09-16T07:51:51 | 148,480,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,074 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import scipy.io
import glob
import csv
import numpy as np
import matplotlib.pyplot as plt
import keras.backend as K
from keras.models import load_model
from keras.utils.np_utils import to_categorical
from keras import metrics
import tensorflow as tf
# Parameters
dataDir = './training_raw/'
FS = 300
WINDOW_SIZE = 30*FS # padding window for CNN
classes = ['A', 'N', 'O','~']
## funtion
def preprocess(x, maxlen):
x = np.nan_to_num(x)
# x = x[0, 0:min(maxlen,len(x))]
x = x[0, 0:maxlen]
x = x - np.mean(x)
x = x / np.std(x)
tmp = np.zeros((1, maxlen))
tmp[0, :len(x)] = x.T # padding sequence
x = tmp
x = np.expand_dims(x, axis=2) # required by Keras
del tmp
return x
def split_data(x):
data_len = len(x[0,:])
print("Data length:",data_len)
x1 = [x[0,0:data_len // 2]]
x2 = [x[0,data_len // 2:]]
return x1, x2
def predict_data(model, x):
prob = model.predict(x)
ann = np.argmax(prob)
return prob, ann
## Loading time serie signals
files = sorted(glob.glob(dataDir+"*.mat"))
# Load and apply model
print("Loading model")
model = load_model('ResNet_30s_34lay_16conv.hdf5')
# load groundTruth
print("Loading ground truth file")
csvfile = list(csv.reader(open(dataDir+'REFERENCE-v3.csv')))
# Main loop
prediction = np.zeros((len(files),8))
count = 0
correct_consist = 0
correct_whole = 0
consist = 0
for f in files:
record = f[:-4]
record = record[-6:]
# Loading
mat_data = scipy.io.loadmat(f[:-4] + ".mat")
print('Loading record {}'.format(record))
# data = mat_data['val'].squeeze()
data = mat_data['val']
x = preprocess(data, WINDOW_SIZE)
x1, x2 = split_data(data)
x1 = preprocess(x1, WINDOW_SIZE)
x2 = preprocess(x2, WINDOW_SIZE)
print("Applying model ..")
ground_truth_label = csvfile[count][1]
ground_truth = classes.index(ground_truth_label)
prob_x, ann_x = predict_data(model, x)
prob_x1, ann_x1 = predict_data(model, x1)
prob_x2, ann_x2 = predict_data(model, x2)
print("Record {} ground truth: {}".format(record, ground_truth_label))
print("Record {} classified as {} with {:3.1f}% certainty".format(record, classes[ann_x], 100*prob_x[0,ann_x]))
print("Record {} first half classified as {} with {:3.1f}% certainty".format(record, classes[ann_x1], 100*prob_x1[0,ann_x1]))
print("Record {} second half classified as {} with {:3.1f}% certainty".format(record, classes[ann_x2], 100*prob_x2[0,ann_x2]))
prediction[count,0] = ground_truth
prediction[count,1] = ann_x
prediction[count,2] = prob_x[0,ann_x]
prediction[count,3] = ann_x1
prediction[count,4] = prob_x1[0,ann_x1]
prediction[count,5] = ann_x2
prediction[count,6] = prob_x2[0,ann_x2]
prediction[count,7] = len(data[0,:])/300.0
if (ground_truth == ann_x):
correct_whole += 1
if (ann_x == ann_x1) and (ann_x == ann_x2):
consist += 1
if (ground_truth == ann_x) and (ann_x == ann_x1) and (ann_x == ann_x2):
correct_consist += 1
count += 1
# if count == 10:
# break
print("Correct:{}, total:{}, percent:{}".format(correct_whole, count, correct_whole/(count)))
print("Consist:{}, total:{}, percent:{}".format(consist, count, consist/(count)))
print("Correct_consist:{}, total:{}, percent:{}".format(correct_consist, count, correct_consist/(count)))
#append file index
new_prediction = np.zeros((len(files),9))
new_prediction[:, 0:8] = prediction
new_prediction[:, 8] = np.arange(8528)+1
#Select correct and correct_consist prediction
cond_x_x1 = np.equal(new_prediction[:,1], new_prediction[:,3])
cond_x_x2 = np.equal(new_prediction[:,1], new_prediction[:,5])
cond_x_gt = np.equal(new_prediction[:,1], new_prediction[:,0])
cond_consist = np.logical_and(cond_x_x1, cond_x_x2)
cond_correct_consist = np.logical_and(cond_consist, cond_x_gt)
correct_consist_prediction = new_prediction[cond_correct_consist]
correct_prediction = new_prediction[cond_x_gt]
# save prediction to csv files
format = '%i,%i,%.5f,%i,%.5f,%i,%.5f,%.2f,%i'
np.savetxt("./DataAnalysis/prediction_all.csv", new_prediction, fmt= format, delimiter=",")
np.savetxt("./DataAnalysis/prediction_correct.csv", correct_prediction, fmt= format, delimiter=",")
np.savetxt("./DataAnalysis/prediction_correct_consist.csv", correct_consist_prediction, fmt= format, delimiter=",")
# check each type percentage in correct_consist prediction
type_a = correct_consist_prediction[correct_consist_prediction[:,0] == 0]
type_n = correct_consist_prediction[correct_consist_prediction[:,0] == 1]
type_o = correct_consist_prediction[correct_consist_prediction[:,0] == 2]
type_s = correct_consist_prediction[correct_consist_prediction[:,0] == 3]
type_a_all = new_prediction[new_prediction[:,0] == 0]
type_n_all = new_prediction[new_prediction[:,0] == 1]
type_o_all = new_prediction[new_prediction[:,0] == 2]
type_s_all = new_prediction[new_prediction[:,0] == 3]
| [
"chenhx1992@gmail.com"
] | chenhx1992@gmail.com |
da1cdafb15919fc04e543d77c8dd8b96ea986d1e | 6c92b89d61e1cbfb26d2fb126392680209776124 | /TTEv1.py | 71df50340ba44fa2f7587c8d1bff6806b80ebd66 | [
"Apache-2.0"
] | permissive | Tavnos/TTE | 44f2a9181ce5eb64ec5b3e8994e8b4ad2efa1874 | 0f88d94f41f94baff7b24a1bc1a6790f477add1b | refs/heads/master | 2020-03-07T00:02:41.794576 | 2018-03-29T01:59:30 | 2018-03-29T01:59:30 | 127,148,396 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 50,357 | py | from random import choices
from tkinter import (Canvas, Frame, Text, Entry, Label, Button, Radiobutton, Checkbutton, StringVar, Tk)
class Nucleotide_Data:
dna_base = []
adenine = {'char':'a','dna':'a','match':'t','rna':'a'}
guanine = {'char':'g','dna':'g','match':'c','rna':'g'}
cytosine = {'char':'c','dna':'c','match':'g','rna':'c'}
thymine = {'char':'t','dna':'t','match':'a','rna':'u'}
uracil = {'char':'u','dna':'t','match':'a','rna':'u'}
nucleotide = (adenine,guanine,cytosine,thymine,uracil)
def __init__(self):
for i in range(len(self.nucleotide)-1):
self.dna_base.append(self.nucleotide[i]['char'])
class Amino_Data:
amino_acid = []
methionine = {'config':('atg'),'name':'Methionine','letter_code':'M','tri_char':'Met','amino_class':'sulfuric','amino_polarity':'non-polar','amino_charge':'neutral','amino_hydropathy':1.9,'amino_weight':149.208}
leucine = {'config':('tta','ttg','ctt','ctc','cta','ctg'),'name':'leucine','letter_code':'L','tri_char':'Leu','amino_class':'aliphatic','amino_polarity':'non-polar','amino_charge':'neutral','amino_hydropathy':3.8,'amino_weight':131.175}
phenylalanine = {'config':('ttt','ttc'),'name':'phenylalanine','letter_code':'F','tri_char':'Phe','amino_class':'aromatic','amino_polarity':'non-polar','amino_charge':'neutral','amino_hydropathy':2.8,'amino_weight':165.192}
serine = {'config':('tct','tcc','tca','tcg','agc','agt'),'name':'serine','letter_code':'S','tri_char':'Ser','amino_class':'hydroxylic','amino_polarity':'polar','amino_charge':'neutral','amino_hydropathy':1.9,'amino_weight':149.208}
cysteine = {'config':('tgt','tgc'),'name':'cysteine','letter_code':'C','tri_char':'Cys','amino_class':'sulfuric','amino_polarity':'non-polar','amino_charge':'neutral','amino_hydropathy':2.5,'amino_weight':121.154}
tryptophan = {'config':('tgg'),'name':'tryptophan','letter_code':'W','tri_char':'Trp','amino_class':'aromatic','amino_polarity':'non-polar','amino_charge':'neutral','amino_hydropathy':-0.9,'amino_weight':204.228}
proline = {'config':('ccc','cca','cct','ccg'),'name':'proline','letter_code':'P','tri_char':'Pro','amino_class':'cyclic','amino_polarity':'non-polar','amino_charge':'neutral','amino_hydropathy':-1.6,'amino_weight':115.132}
histidine = {'config':('cat','cac'),'name':'histidine','letter_code':'H','tri_char':'His','amino_class':'basic-aromatic','amino_polarity':'basic-polar','amino_charge':'neutral-positive','amino_hydropathy':-3.2,'amino_weight':155.156}
glutamine = {'config':('caa','cag'),'name':'glutamine','letter_code':'Q','tri_char':'Gln','amino_class':'amide','amino_polarity':'polar','amino_charge':'neutral','amino_hydropathy':-3.5,'amino_weight':146.146}
arginine = {'config':('cgg','cgt','cgc','cga','agg','aga'),'name':'arginine','letter_code':'R','tri_char':'Arg','amino_class':'basic','amino_polarity':'basic-polar','amino_charge':'positive','amino_hydropathy':-4.5,'amino_weight':174.203}
isoleucine = {'config':('att','atc','ata'),'name':'isoleucine','letter_code':'I','tri_char':'Ile','amino_class':'aliphatic','amino_polarity':'non-polar','amino_charge':'neutral','amino_hydropathy':4.5,'amino_weight':131.175}
valine = {'config':('gtg','gtt','gtc','gta'),'name':'valine','letter_code':'V','tri_char':'Val','amino_class':'aliphatic','amino_polarity':'non-polar','amino_charge':'neutral','amino_hydropathy':4.2,'amino_weight':117.148}
tyrosine = {'config':('tat','tac'),'name':'tyrosine','letter_code':'Y','tri_char':'Tyr','amino_class':'aromatic','amino_polarity':'polar','amino_charge':'neutral','amino_hydropathy':-1.3,'amino_weight':181.191}
threonine = {'config':('aca','acc','act','acg'),'name':'threonine','letter_code':'T','tri_char':'Thr','amino_class':'hydroxylic','amino_polarity':'polar','amino_charge':'neutral','amino_hydropathy':-0.7,'amino_weight':119.119}
alanine = {'config':('gcg','gcc','gct','gca'),'name':'alanine','letter_code':'A','tri_char':'Ala','amino_class':'aliphatic','amino_polarity':'non-polar','amino_charge':'neutral','amino_hydropathy':1.8,'amino_weight':89.094}
asparagine = {'config':('aat','aag'),'name':'asparagine','letter_code':'N','tri_char':'Asn','amino_class':'amide','amino_polarity':'polar','amino_charge':'neutral','amino_hydropathy':-3.5,'amino_weight':132.119}
lysine = {'config':('aaa','aag'),'name':'lysine','letter_code':'K','tri_char':'Lys','amino_class':'basic','amino_polarity':'basic-polar','amino_charge':'positive','amino_hydropathy':-3.9,'amino_weight':146.189}
glutamic = {'config':('gaa','gag'),'name':'glutamic','letter_code':'E','tri_char':'Glu','amino_class':'acidic','amino_polarity':'acidic-polar','amino_charge':'negative','amino_hydropathy':-3.5,'amino_weight':147.131}
aspartic = {'config':('gat','gac'),'name':'aspartic','letter_code':'D','tri_char':'Asp','amino_class':'acidic','amino_polarity':'acidic-polar','amino_charge':'negative','amino_hydropathy':-3.5,'amino_weight':133.104}
glycine = {'config':('ggg','gga','ggc','ggt'),'name':'glycine','letter_code':'G','tri_char':'Gly','amino_class':'aliphatic','amino_polarity':'non-polar','amino_charge':'neutral','amino_hydropathy':-0.4,'amino_weight':133.104}
ochre = {'config':('taa'),'name':'Ochre','letter_code':'X','tri_char':'_Ochre','amino_class':'Stop','amino_polarity':None,'amino_charge':None,'amino_hydropathy':None,'amino_weight':0}
amber = {'config':('tag'),'name':'Amber','letter_code':'B','tri_char':'_Amber','amino_class':'Stop','amino_polarity':None,'amino_charge':None,'amino_hydropathy':None,'amino_weight':0}
opal = {'config':('tga'),'name':'Opal','letter_code':'Z','tri_char':'_Opal','amino_class':'Stop','amino_polarity':None,'amino_charge':None,'amino_hydropathy':None,'amino_weight':0}
amino_acids = (methionine,leucine,phenylalanine,serine,cysteine,tryptophan,proline,histidine,glutamine,arginine,isoleucine,valine,tyrosine,threonine,alanine,asparagine,lysine,glutamic,aspartic,glycine,ochre,amber,opal)
blosum_62 = {'A':{'A':4, 'R':-1,'N':-2,'D':-2,'C':0, 'Q':-1,'E':-1,'G':0, 'H':-2,'I':-1,'L':-1,'K':-1,'M':-1,'F':-2,'P':-1,'S':1, 'T':0, 'W':-3,'Y':-2,'V':0, 'X':0,'B':0,'Z':0},
'R':{'A':-1,'R':5, 'N':0, 'D':-2,'C':-3,'Q':1, 'E':0, 'G':-2,'H':0, 'I':-3,'L':-2,'K':2, 'M':-1,'F':-3,'P':-2,'S':-1,'T':-1,'W':-3,'Y':-2,'V':-3,'X':0,'B':0,'Z':0},
'N':{'A':-2,'R':0, 'N':6, 'D':1, 'C':-3,'Q':0, 'E':0, 'G':0, 'H':1, 'I':-3,'L':-3,'K':0, 'M':-2,'F':-3,'P':-2,'S':1, 'T':0, 'W':-4,'Y':-2,'V':-3,'X':0,'B':0,'Z':0},
'D':{'A':-2,'R':-2,'N':1, 'D':6, 'C':-3,'Q':0, 'E':2, 'G':-1,'H':-1,'I':-3,'L':-4,'K':-1,'M':-3,'F':-3,'P':-1,'S':-1,'T':-1,'W':-2,'Y':-2,'V':-1,'X':0,'B':0,'Z':0},
'C':{'A':0, 'R':-3,'N':-3,'D':-3,'C':9, 'Q':-3,'E':-4,'G':-3,'H':-3,'I':-1,'L':-1,'K':-3,'M':-1,'F':-2,'P':-3,'S':-1,'T':-1,'W':-2,'Y':-2,'V':-1,'X':0,'B':0,'Z':0},
'Q':{'A':-1,'R':1, 'N':0, 'D':0, 'C':-3,'Q':5, 'E':2, 'G':-2,'H':0, 'I':-3,'L':-2,'K':1, 'M':0, 'F':-3,'P':-1,'S':0, 'T':-1,'W':-2,'Y':-1,'V':-2,'X':0,'B':0,'Z':0},
'E':{'A':-1,'R':0, 'N':0, 'D':2, 'C':-4,'Q':2, 'E':5, 'G':-2,'H':0, 'I':-3,'L':-3,'K':1, 'M':-2,'F':-3,'P':-1,'S':0, 'T':-1,'W':-3,'Y':-2,'V':-2,'X':0,'B':0,'Z':0},
'G':{'A':0, 'R':-2,'N':0, 'D':-1,'C':-3,'Q':-2,'E':-2,'G':6, 'H':-2,'I':-4,'L':-4,'K':-2,'M':-3,'F':-3,'P':-2,'S':0, 'T':-2,'W':-2,'Y':-3,'V':-3,'X':0,'B':0,'Z':0},
'H':{'A':-2,'R':0, 'N':1, 'D':-1,'C':-3,'Q':0, 'E':0, 'G':-2,'H':8, 'I':-3,'L':-3,'K':-1,'M':-2,'F':-1,'P':-2,'S':-1,'T':-2,'W':-2,'Y':2, 'V':-3,'X':0,'B':0,'Z':0},
'I':{'A':-1,'R':-3,'N':-3,'D':-3,'C':-1,'Q':-3,'E':-3,'G':-4,'H':-3,'I':4, 'L':2, 'K':-3,'M':1, 'F':0, 'P':-3,'S':-2,'T':-1,'W':-3,'Y':-1,'V':3, 'X':0,'B':0,'Z':0},
'L':{'A':-1,'R':-2,'N':-3,'D':-4,'C':-1,'Q':-2,'E':-3,'G':-4,'H':-3,'I':2, 'L':4, 'K':-2,'M':2, 'F':0, 'P':-3,'S':-2,'T':-1,'W':-2,'Y':-1,'V':1, 'X':0,'B':0,'Z':0},
'K':{'A':-1,'R':2, 'N':0, 'D':-1,'C':-3,'Q':1, 'E':1, 'G':-2,'H':-1,'I':-3,'L':-2,'K':5, 'M':-1,'F':-3,'P':-1,'S':0, 'T':-1,'W':-3,'Y':-2,'V':-2,'X':0,'B':0,'Z':0},
'M':{'A':-1,'R':-1,'N':-2,'D':-3,'C':-1,'Q':0, 'E':-2,'G':-3,'H':-2,'I':1, 'L':2, 'K':-1,'M':5, 'F':0, 'P':-2,'S':-1,'T':-1,'W':-1,'Y':-1,'V':1, 'X':0,'B':0,'Z':0},
'F':{'A':-2,'R':-3,'N':-3,'D':-3,'C':-2,'Q':-3,'E':-3,'G':-3,'H':-1,'I':0, 'L':0, 'K':-3,'M':0, 'F':6, 'P':-4,'S':-2,'T':-2,'W':1, 'Y':3, 'V':-1,'X':0,'B':0,'Z':0},
'P':{'A':-1,'R':-2,'N':-2,'D':-1,'C':-3,'Q':-1,'E':-1,'G':-2,'H':-2,'I':-3,'L':-3,'K':-1,'M':-2,'F':-4,'P':7, 'S':-1,'T':-1,'W':-4,'Y':-2,'V':-2,'X':0,'B':0,'Z':0},
'S':{'A':1, 'R':-1,'N':1, 'D':0, 'C':-1,'Q':0, 'E':0, 'G':0, 'H':-1,'I':-2,'L':-2,'K':0, 'M':-1,'F':-2,'P':-1,'S':4, 'T':1, 'W':-3,'Y':-2,'V':-2,'X':0,'B':0,'Z':0},
'T':{'A':0, 'R':-1,'N':0, 'D':-1,'C':-1,'Q':-1,'E':-1,'G':-2,'H':-2,'I':-1,'L':-1,'K':-1,'M':-1,'F':-2,'P':-1,'S':1, 'T':5, 'W':-2,'Y':-2,'V':0, 'X':0,'B':0,'Z':0},
'W':{'A':-3,'R':-3,'N':-4,'D':-4,'C':-2,'Q':-2,'E':-3,'G':-2,'H':-2,'I':-3,'L':-2,'K':-3,'M':-1,'F':1, 'P':-4,'S':-3,'T':-2,'W':11,'Y':-2,'V':-3,'X':0,'B':0,'Z':0},
'Y':{'A':-2,'R':-2,'N':-2,'D':-3,'C':-2,'Q':-1,'E':-2,'G':-3,'H':2, 'I':-1,'L':-1,'K':-2,'M':-1,'F':3, 'P':-3,'S':-2,'T':-2,'W':-2,'Y':7, 'V':-1,'X':0,'B':0,'Z':0},
'V':{'A':0, 'R':-3,'N':-3,'D':-3,'C':-1,'Q':-2,'E':-2,'G':-3,'H':-3,'I':3, 'L':1, 'K':-2,'M':1, 'F':-1,'P':-2,'S':-2,'T':0, 'W':-3,'Y':-1,'V':4, 'X':0,'B':0,'Z':0},
'X':{'A':0,'R':0,'N':0,'D':0,'C':0,'Q':0,'E':0,'G':0,'H':0,'I':0,'L':0,'K':0,'M':0,'F':0,'P':0,'S':0,'T':0,'W':0,'Y':0,'V':0,'X':0,'B':0,'Z':0},
'B':{'A':0,'R':0,'N':0,'D':0,'C':0,'Q':0,'E':0,'G':0,'H':0,'I':0,'L':0,'K':0,'M':0,'F':0,'P':0,'S':0,'T':0,'W':0,'Y':0,'V':0,'X':0,'B':0,'Z':0},
'Z':{'A':0,'R':0,'N':0,'D':0,'C':0,'Q':0,'E':0,'G':0,'H':0,'I':0,'L':0,'K':0,'M':0,'F':0,'P':0,'S':0,'T':0,'W':0,'Y':0,'V':0,'X':0,'B':0,'Z':0}}
def __init__(self):
for i in range(len(self.amino_acids)-3):
self.amino_acid.append(self.amino_acids[i]['letter_code'])
class Prediction:
def __init__(self, possible_range):
self.possible_range = possible_range
self.possible_config = []
self.amino_data = Amino_Data()
for i in range(possible_range):
self.possible_config.append([])
def aa_predict(self, input_seq):
for i in range(len(input_seq)):
if input_seq[i] == 'M':
for i in range(self.possible_range):
self.possible_config[i].append('atg')
elif input_seq[i] == 'L':
for i in range(6):
self.possible_config[i].append(self.amino_data.leucine['config'][i])
for i in range(self.possible_range-6):
self.possible_config[i+6].append(''.join(choices(['tta','ttg','ctt','ctc','cta','ctg'],k=1)))
elif input_seq[i] == 'F':
self.possible_config[0].append('ttt')
self.possible_config[1].append('ttc')
for i in range(self.possible_range-2):
self.possible_config[i+2].append(''.join(choices(['ttt','ttc'],k=1)))
elif input_seq[i] == 'S':
for i in range(6):
self.possible_config[i].append(self.amino_data.serine['config'][i])
for i in range(self.possible_range-6):
self.possible_config[i+6].append(''.join(choices(['tct','tcc','tca','tcg','agc','agt'],k=1)))
elif input_seq[i] == 'Y':
self.possible_config[0].append('tat')
self.possible_config[1].append('tac')
for i in range(self.possible_range-2):
self.possible_config[i+2].append(''.join(choices(['tat','tac'],k=1)))
elif input_seq[i] == 'C':
self.possible_config[0].append('tgt')
self.possible_config[1].append('tgc')
for i in range(self.possible_range-2):
self.possible_config[i+2].append(''.join(choices(['tgt','tgc'],k=1)))
elif input_seq[i] == 'W':
for i in range(self.possible_range):
self.possible_config[i].append('tgg')
elif input_seq[i] == 'P':
for i in range(4):
self.possible_config[i].append(self.amino_data.proline['config'][i])
for i in range(self.possible_range-4):
self.possible_config[i+4].append(''.join(choices(['ccc','cca','cct','ccg'],k=1)))
elif input_seq[i] == 'H':
self.possible_config[0].append('cat')
self.possible_config[1].append('cac')
for i in range(self.possible_range-2):
self.possible_config[i+2].append(''.join(choices(['cac','cat'],k=1)))
elif input_seq[i] == 'Q':
self.possible_config[0].append('caa')
self.possible_config[1].append('cag')
for i in range(self.possible_range-2):
self.possible_config[i+2].append(''.join(choices(['caa','cag'],k=1)))
elif input_seq[i] == 'R':
for i in range(6):
self.possible_config[i].append(self.amino_data.arginine['config'][i])
for i in range(self.possible_range-6):
self.possible_config[i+6].append(''.join(choices(['cgg','cgt','cgc','cga','agg','aga'],k=1)))
elif input_seq[i] == 'I':
self.possible_config[0].append('att')
self.possible_config[1].append('atc')
self.possible_config[2].append('ata')
for i in range(self.possible_range-3):
self.possible_config[i+3].append(''.join(choices(['att','atc','ata'],k=1)))
elif input_seq[i] == 'V':
for i in range(4):
self.possible_config[i].append(self.amino_data.valine['config'][i])
for i in range(self.possible_range-4):
self.possible_config[i+4].append(''.join(choices(['gtg','gtt','gtc','gta'],k=1)))
elif input_seq[i] == 'T':
for i in range(4):
self.possible_config[i].append(self.amino_data.threonine['config'][i])
for i in range(self.possible_range-4):
self.possible_config[i+4].append(''.join(choices(['aca','acc','act','acg'],k=1)))
elif input_seq[i] == 'A':
for i in range(4):
self.possible_config[i].append(self.amino_data.alanine['config'][i])
for i in range(self.possible_range-4):
self.possible_config[i+4].append(''.join(choices(['gcg','gcc','gct','gca'],k=1)))
elif input_seq[i] == 'N':
self.possible_config[0].append('aat')
self.possible_config[1].append('aac')
for i in range(self.possible_range-2):
self.possible_config[i+2].append(''.join(choices(['aat','aac'],k=1)))
elif input_seq[i] == 'K':
self.possible_config[0].append('aaa')
self.possible_config[1].append('aag')
for i in range(self.possible_range-2):
self.possible_config[i+2].append(''.join(choices(['aaa','aag'],k=1)))
elif input_seq[i] == 'E':
self.possible_config[0].append('gaa')
self.possible_config[1].append('gag')
for i in range(self.possible_range-2):
self.possible_config[i+2].append(''.join(choices(['gaa','gag'],k=1)))
elif input_seq[i] == 'D':
self.possible_config[0].append('gat')
self.possible_config[1].append('gac')
for i in range(self.possible_range-2):
self.possible_config[i+2].append(''.join(choices(['gat','gac'],k=1)))
elif input_seq[i] == 'G':
for i in range(4):
self.possible_config[i].append(self.amino_data.glycine['config'][i])
for i in range(self.possible_range-4):
self.possible_config[i+4].append(''.join(choices(['ggg','gga','ggc','ggt'],k=1)))
elif input_seq[i] == 'X':
for i in range(self.possible_range):
self.possible_config[i].append('taa')
elif input_seq[i] == 'B':
for i in range(self.possible_range):
self.possible_config[i].append('tag')
elif input_seq[i] == 'Z':
for i in range(self.possible_range):
self.possible_config[i].append('tga')
class Translation:
dna_data = Nucleotide_Data()
amino_data = Amino_Data()
def translate(self, input_seq):
self.dna_typed = []
self.rna_typed = []
self.dna_match = []
for i in range(len(input_seq)):
for f in range(len(self.dna_data.nucleotide)):
if input_seq[i] in self.dna_data.nucleotide[f]['char']:
self.dna_typed.append(self.dna_data.nucleotide[f]['dna'])
self.rna_typed.append(self.dna_data.nucleotide[f]['rna'])
self.dna_match.append(self.dna_data.nucleotide[f]['match'])
self.direct_start_index = []
self.reversed_start_index = []
self.direct_sequences = []
self.possible_chain = {}
self.stopped_chains = {}
self.stopped_chains_list = []
self.translated_chains_ammount = []
self.translated_chains_letter = []
self.translated_chains_chars = []
self.translated_chains_weight = []
self.translated_chains_total_weight = []
self.translated_chains_blosum_score = []
self.translated_chains_blosum_total_score = []
self.translated_chains_hydropathy = []
self.translated_chains_class = []
self.translated_chains_polarity = []
self.translated_chains_charge = []
self.translated_chains_name = []
self.chain_sequence = ''.join(self.dna_typed)
self.dna_typed.reverse()
self.reversed_sequence = ''.join(self.dna_typed)
self.dna_typed.reverse()
for i in range(len(self.chain_sequence)):
if self.chain_sequence[i:i+3] == self.amino_data.methionine['config']:
self.direct_start_index.append(i)
for i in range(len(self.reversed_sequence)):
if self.reversed_sequence[i:i+3] == self.amino_data.methionine['config']:
self.reversed_start_index.append(i)
for i in range(len(self.direct_start_index)):
index_var = self.direct_start_index[i]
self.direct_sequences.append(self.chain_sequence[index_var:])
for i in range(len(self.reversed_start_index)):
index_var = self.reversed_start_index[i]
self.direct_sequences.append(self.reversed_sequence[index_var:])
for i in range(len(self.direct_sequences)):
chain_names = "chain{}".format(i)
for f in range(len(self.direct_sequences[i])):
self.possible_chain[chain_names] = [self.direct_sequences[i][f:f+3] for f in range(0, len(self.direct_sequences[i]), 3)]
for f in range(len(self.possible_chain[chain_names])):
codon = self.possible_chain[chain_names][f]
if codon == self.amino_data.opal['config'] or codon == self.amino_data.amber['config'] or codon == self.amino_data.ochre['config']:
stop_index = self.possible_chain[chain_names].index(codon)+1
self.stopped_chains[chain_names] = self.possible_chain[chain_names][0:stop_index]
for i in self.stopped_chains:
self.stopped_chains_list.append(self.stopped_chains[i])
for i in range(len(self.stopped_chains_list)):
self.translated_chains_ammount.append(len(self.stopped_chains_list[i]))
self.translated_chains_letter.append([])
self.translated_chains_chars.append([])
self.translated_chains_weight.append([])
self.translated_chains_total_weight += [0]
self.translated_chains_blosum_score.append([])
self.translated_chains_blosum_total_score += [0]
self.translated_chains_hydropathy.append([])
self.translated_chains_class.append([])
self.translated_chains_polarity.append([])
self.translated_chains_charge.append([])
self.translated_chains_name.append([])
for f in range(len(self.stopped_chains_list[i])):
for k in range(len(self.amino_data.amino_acids)):
if self.stopped_chains_list[i][f] in self.amino_data.amino_acids[k]['config']:
self.translated_chains_letter[i].append(self.amino_data.amino_acids[k]['letter_code'])
self.translated_chains_chars[i].append(self.amino_data.amino_acids[k]['tri_char'])
self.translated_chains_weight[i].append(self.amino_data.amino_acids[k]['amino_weight'])
self.translated_chains_hydropathy[i].append(self.amino_data.amino_acids[k]['amino_hydropathy'])
self.translated_chains_class[i].append(self.amino_data.amino_acids[k]['amino_class'])
self.translated_chains_polarity[i].append(self.amino_data.amino_acids[k]['amino_polarity'])
self.translated_chains_charge[i].append(self.amino_data.amino_acids[k]['amino_charge'])
self.translated_chains_name[i].append(self.amino_data.amino_acids[k]['name'])
for f in self.translated_chains_weight[i]:
self.translated_chains_total_weight[i] = sum(self.translated_chains_weight[i])
for f in range(len(self.translated_chains_letter[i])-1):
self.translated_chains_blosum_score[i].append([])
first_letter_var = self.translated_chains_letter[i][f]
second_letter_var = self.translated_chains_letter[i][f+1]
blosum_score_var = self.amino_data.blosum_62[first_letter_var][second_letter_var]
self.translated_chains_blosum_score[i][f] = blosum_score_var
for f in self.translated_chains_blosum_score[i]:
self.translated_chains_blosum_total_score[i] = sum(self.translated_chains_blosum_score[i])
class Tk_Main(Tk):
def __init__(self):
super().__init__()
self.title("Biochem tool: DNA Prediction, transcription, translation, and Amino acid evaluation.")
self.canvas = Canvas(self)
self.frame = Frame(self.canvas)
self.draw_index_frame()
def draw_index_frame(self):
self.input_entry = StringVar()
self.entry_var = Entry(self, width=50, text=self.input_entry)
self.r_dot = StringVar()
self.entry_var.grid(column=0, row=0)
self.predict = Radiobutton(self, text='DNA configuration prediction from peptide sequence', command=self.select_predict,variable=self.r_dot, value='predict')
self.predict.grid(column=0, row=1)
self.translate = Radiobutton(self, text='DNA/mRNA, translation, and peptide stat', command=self.select_eval,variable=self.r_dot, value='translate')
self.translate.grid(column=0, row=2)
self.entry_var.focus()
self.eval('tk::PlaceWindow %s center' % self.winfo_pathname(self.winfo_id()))
self.eval_selected = 0
self.predict_selected = 0
self.select_eval()
self.r_dot.set('translate')
def remove_first_frame(self):
Entry.destroy(self.entry_var)
Radiobutton.destroy(self.predict)
Radiobutton.destroy(self.translate)
if self.eval_selected == 1:
self.remove_eval_frame()
self.eval_selected = 0
if self.predict_selected == 1:
self.remove_predict_frame()
self.predict_selected = 0
def run_analysis(self, *args):
if self.r_dot.get() == 'predict':
input_seq = self.input_entry.get()
predict_range = int(self.p_dot.get()) + 6
self.amino_predict = Prediction(predict_range)
self.amino_predict.aa_predict(input_seq)
self.remove_first_frame()
self.draw_prediction_frame()
self.predict_selected = 0
elif self.r_dot.get() == 'translate':
input_seq = self.input_entry.get()
self.amino_translate = Translation()
self.amino_translate.translate(input_seq)
self.remove_first_frame()
self.draw_evaluation_frame()
self.eval_selected = 0
self.eval('tk::PlaceWindow %s center' % self.winfo_pathname(self.winfo_id()))
def select_eval(self):
self.eval_selected += 1
if self.predict_selected >= 1:
self.remove_predict_frame()
self.predict_selected = 0
if self.eval_selected == 1:
self.check_dna_var = StringVar()
self.check_dna = Checkbutton(self, text='show DNA', variable=self.check_dna_var, onvalue='true', offvalue='false')
self.check_dna.grid()
self.check_match_var = StringVar()
self.check_match = Checkbutton(self, text='show DNA Matching codon', variable=self.check_match_var, onvalue='true', offvalue='false')
self.check_match.grid()
self.check_rna_var = StringVar()
self.check_rna = Checkbutton(self, text='show mRNA', variable=self.check_rna_var, onvalue='true', offvalue='false')
self.check_rna.grid()
self.check_chars_var = StringVar()
self.check_chars = Checkbutton(self, text='show 3 letter code', variable=self.check_chars_var, onvalue='true', offvalue='false')
self.check_chars.grid()
self.check_weight_distr_var = StringVar()
self.check_weight_distr = Checkbutton(self, text='show weight distribution', variable=self.check_weight_distr_var, onvalue='true', offvalue='false')
self.check_weight_distr.grid()
self.check_blosum_distr_var = StringVar()
self.check_blosum_distr = Checkbutton(self, text='show individual BLOSUM62 score', variable=self.check_blosum_distr_var, onvalue='true', offvalue='false')
self.check_blosum_distr.grid()
self.check_polarity_distr_var = StringVar()
self.check_polarity_distr = Checkbutton(self, text='show polarity distribution', variable=self.check_polarity_distr_var, onvalue='true', offvalue='false')
self.check_polarity_distr.grid()
self.check_hydropathy_distr_var = StringVar()
self.check_hydropathy_distr = Checkbutton(self, text='show hydropathy distribution', variable=self.check_hydropathy_distr_var, onvalue='true', offvalue='false')
self.check_hydropathy_distr.grid()
self.check_class_distr_var = StringVar()
self.check_class_distr = Checkbutton(self, text='show class distribution', variable=self.check_class_distr_var, onvalue='true', offvalue='false')
self.check_class_distr.grid()
self.check_charge_distr_var = StringVar()
self.check_charge_distr = Checkbutton(self, text='show charge distribution', variable=self.check_charge_distr_var, onvalue='true', offvalue='false')
self.check_charge_distr.grid()
self.eval_btn = Button(self, text='Run translation', command=self.run_analysis)
self.eval_btn.grid()
self.bind('<Return>', self.run_analysis)
def draw_evaluation_frame(self):
self.transform_chain = str.maketrans(",'", " ")
self.transform_data = str.maketrans("'", " ")
self.chain_ammount_config_lbl = Label(self, text="Chain ammount:")
self.chain_ammount_config_lbl.grid()
self.chain_ammount_config_var_txt = len(self.amino_translate.stopped_chains_list)
self.chain_ammount_config_var_lbl = Label(self, text=self.chain_ammount_config_var_txt)
self.chain_ammount_config_var_lbl.grid()
self.amino_ammount_config_lbl = Label(self, text="Amino ammount:")
self.amino_ammount_config_lbl.grid()
self.amino_ammount_config_var_txt = self.amino_translate.translated_chains_ammount
self.amino_ammount_config_var_lbl = Label(self, text=self.amino_ammount_config_var_txt)
self.amino_ammount_config_var_lbl.grid()
self.total_mass_config_lbl = Label(self, text="Total mass:")
self.total_mass_config_lbl.grid()
self.total_mass_config_var_txt = self.amino_translate.translated_chains_total_weight
self.total_mass_config_var_lbl = Label(self, text=self.total_mass_config_var_txt)
self.total_mass_config_var_lbl.grid()
self.total_blosum_config_lbl = Label(self, text="Total BLOSUM62 score:")
self.total_blosum_config_lbl.grid()
self.total_blosum_config_var_txt = self.amino_translate.translated_chains_blosum_total_score
self.total_blosum_config_var_lbl = Label(self, text=self.total_blosum_config_var_txt)
self.total_blosum_config_var_lbl.grid()
self.amino_letter_config_lbl = Label(self, text="Amino letter:")
self.amino_letter_config_lbl.grid()
self.amino_letter_config_var_txt_r = str(self.amino_translate.translated_chains_letter)
self.amino_letter_config_var_tr = str.translate(self.amino_letter_config_var_txt_r, self.transform_chain)
self.amino_letter_config_var_txt = self.amino_letter_config_var_tr.replace(" ", "")
self.amino_letter_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.amino_letter_config_var_lbl.insert(float(), self.amino_letter_config_var_txt)
self.amino_letter_config_var_lbl.grid()
if self.check_dna_var.get() == 'true':
self.dna_typed_lbl = Label(self, text="DNA:")
self.dna_typed_lbl.grid()
self.dna_typed_var_txt_r = str(self.amino_translate.dna_typed)
self.dna_typed_var_tr = str.translate(self.dna_typed_var_txt_r, self.transform_chain)
self.dna_typed_var_txt = self.dna_typed_var_tr.replace(" ", "")
self.dna_typed_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.dna_typed_var_lbl.insert(float(), self.dna_typed_var_txt)
self.dna_typed_var_lbl.grid()
if self.check_match_var.get() == 'true':
self.match_config_lbl = Label(self, text="DNA Match:")
self.match_config_lbl.grid()
self.match_config_var_txt_r = str(self.amino_translate.dna_match)
self.match_config_var_tr = str.translate(self.match_config_var_txt_r, self.transform_chain)
self.match_config_var_txt = self.match_config_var_tr.replace(" ", "")
self.match_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.match_config_var_lbl.insert(float(), self.match_config_var_txt)
self.match_config_var_lbl.grid()
if self.check_rna_var.get() == 'true':
self.rna_config_lbl = Label(self, text="mRNA:")
self.rna_config_lbl.grid()
self.rna_config_var_txt_r = str(self.amino_translate.rna_typed)
self.rna_config_var_tr = str.translate(self.rna_config_var_txt_r, self.transform_chain)
self.rna_config_var_txt = self.rna_config_var_tr.replace(" ", "")
self.rna_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.rna_config_var_lbl.insert(float(), self.rna_config_var_txt)
self.rna_config_var_lbl.grid()
if self.check_chars_var.get() == 'true':
self.amino_chars_config_lbl = Label(self, text="Amino chars:")
self.amino_chars_config_lbl.grid()
self.amino_chars_config_var_txt_r = str(self.amino_translate.translated_chains_chars)
self.amino_chars_config_var_tr = str.translate(self.amino_chars_config_var_txt_r, self.transform_chain)
self.amino_chars_config_var_txt = self.amino_chars_config_var_tr.replace(" ", "")
self.amino_chars_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.amino_chars_config_var_lbl.insert(float(), self.amino_chars_config_var_txt)
self.amino_chars_config_var_lbl.grid()
if self.check_weight_distr_var.get() == 'true':
self.mass_distr_config_lbl = Label(self, text="Weight distribution:")
self.mass_distr_config_lbl.grid()
self.mass_distr_config_var_txt_r = str(self.amino_translate.translated_chains_weight)
self.mass_distr_config_var_tr = str.translate(self.mass_distr_config_var_txt_r, self.transform_data)
self.mass_distr_config_var_txt = self.mass_distr_config_var_tr.replace(" ", "")
self.mass_distr_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.mass_distr_config_var_lbl.insert(float(), self.mass_distr_config_var_txt)
self.mass_distr_config_var_lbl.grid()
if self.check_blosum_distr_var.get() == 'true':
self.blosum_score_config_lbl = Label(self, text="Individual BLOSUM62 score:")
self.blosum_score_config_lbl.grid()
self.blosum_score_config_var_txt_r = str(self.amino_translate.translated_chains_blosum_score)
self.blosum_score_config_var_tr = str.translate(self.blosum_score_config_var_txt_r, self.transform_data)
self.blosum_score_config_var_txt = self.blosum_score_config_var_tr.replace(" ", "")
self.blosum_score_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.blosum_score_config_var_lbl.insert(float(), self.blosum_score_config_var_txt)
self.blosum_score_config_var_lbl.grid()
if self.check_polarity_distr_var.get() == 'true':
self.distr_polarity_config_lbl = Label(self, text="Distributed polarity:")
self.distr_polarity_config_lbl.grid()
self.distr_polarity_config_var_txt_r = str(self.amino_translate.translated_chains_polarity)
self.distr_polarity_config_var_tr = str.translate(self.distr_polarity_config_var_txt_r, self.transform_data)
self.distr_polarity_config_var_txt = self.distr_polarity_config_var_tr.replace(" ", "")
self.distr_polarity_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.distr_polarity_config_var_lbl.insert(float(), self.distr_polarity_config_var_txt)
self.distr_polarity_config_var_lbl.grid()
if self.check_hydropathy_distr_var.get() == 'true':
self.distr_hydropathy_config_lbl = Label(self, text="Distributed hydropathy:")
self.distr_hydropathy_config_lbl.grid()
self.distr_hydropathy_config_var_txt_r = str(self.amino_translate.translated_chains_hydropathy)
self.distr_hydropathy_config_var_tr = str.translate(self.distr_hydropathy_config_var_txt_r, self.transform_data)
self.distr_hydropathy_config_var_txt = self.distr_hydropathy_config_var_tr.replace(" ", "")
self.distr_hydropathy_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.distr_hydropathy_config_var_lbl.insert(float(), self.distr_hydropathy_config_var_txt)
self.distr_hydropathy_config_var_lbl.grid()
if self.check_class_distr_var.get() == 'true':
self.distr_class_config_lbl = Label(self, text="Distributed class:")
self.distr_class_config_lbl.grid()
self.distr_class_config_var_txt_r = str(self.amino_translate.translated_chains_class)
self.distr_class_config_var_tr = str.translate(self.distr_class_config_var_txt_r, self.transform_data)
self.distr_class_config_var_txt = self.distr_class_config_var_tr.replace(" ", "")
self.distr_class_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.distr_class_config_var_lbl.insert(float(), self.distr_class_config_var_txt)
self.distr_class_config_var_lbl.grid()
if self.check_charge_distr_var.get() == 'true':
self.distr_charge_config_lbl = Label(self, text="Distributed charge:")
self.distr_charge_config_lbl.grid()
self.distr_charge_config_var_txt_r = str(self.amino_translate.translated_chains_charge)
self.distr_charge_config_var_tr = str.translate(self.distr_charge_config_var_txt_r, self.transform_data)
self.distr_charge_config_var_txt = self.distr_charge_config_var_tr.replace(" ", "")
self.distr_charge_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.distr_charge_config_var_lbl.insert(float(), self.distr_charge_config_var_txt)
self.distr_charge_config_var_lbl.grid()
self.return_btn = Button(self, text='Go Back', command=self.remove_translation_frame)
self.return_btn.grid()
self.bind('<Return>', self.remove_translation_frame)
def remove_eval_frame(self):
Checkbutton.destroy(self.check_dna)
Checkbutton.destroy(self.check_match)
Checkbutton.destroy(self.check_rna)
Checkbutton.destroy(self.check_chars)
Checkbutton.destroy(self.check_weight_distr)
Checkbutton.destroy(self.check_blosum_distr)
Checkbutton.destroy(self.check_polarity_distr)
Checkbutton.destroy(self.check_hydropathy_distr)
Checkbutton.destroy(self.check_class_distr)
Checkbutton.destroy(self.check_charge_distr)
Button.destroy(self.eval_btn)
def remove_translation_frame(self, *args):
Label.destroy(self.chain_ammount_config_lbl)
Text.destroy(self.chain_ammount_config_var_lbl)
Label.destroy(self.amino_ammount_config_lbl)
Text.destroy(self.amino_ammount_config_var_lbl)
Label.destroy(self.total_mass_config_lbl)
Text.destroy(self.total_mass_config_var_lbl)
Label.destroy(self.total_blosum_config_lbl)
Text.destroy(self.total_blosum_config_var_lbl)
Label.destroy(self.amino_letter_config_lbl)
Text.destroy(self.amino_letter_config_var_lbl)
if self.check_dna_var.get() == 'true':
Label.destroy(self.dna_typed_lbl)
Text.destroy(self.dna_typed_var_lbl)
if self.check_match_var.get() == 'true':
Label.destroy(self.match_config_lbl)
Text.destroy(self.match_config_var_lbl)
if self.check_rna_var.get() == 'true':
Label.destroy(self.rna_config_lbl)
Text.destroy(self.rna_config_var_lbl)
if self.check_chars_var.get() == 'true':
Label.destroy(self.amino_chars_config_lbl)
Text.destroy(self.amino_chars_config_var_lbl)
if self.check_weight_distr_var.get() == 'true':
Label.destroy(self.mass_distr_config_lbl)
Text.destroy(self.mass_distr_config_var_lbl)
if self.check_blosum_distr_var.get() == 'true':
Label.destroy(self.blosum_score_config_lbl)
Text.destroy(self.blosum_score_config_var_lbl)
if self.check_polarity_distr_var.get() == 'true':
Label.destroy(self.distr_polarity_config_lbl)
Text.destroy(self.distr_polarity_config_var_lbl)
if self.check_hydropathy_distr_var.get() == 'true':
Label.destroy(self.distr_hydropathy_config_lbl)
Text.destroy(self.distr_hydropathy_config_var_lbl)
if self.check_class_distr_var.get() == 'true':
Label.destroy(self.distr_class_config_lbl)
Text.destroy(self.distr_class_config_var_lbl)
if self.check_charge_distr_var.get() == 'true':
Label.destroy(self.distr_charge_config_lbl)
Text.destroy(self.distr_charge_config_var_lbl)
Button.destroy(self.return_btn)
self.draw_index_frame()
def select_predict(self):
self.predict_selected += 1
if self.eval_selected >= 1:
self.remove_eval_frame()
self.eval_selected = 0
if self.predict_selected == 1:
self.p_dot = StringVar()
self.six_predict = Radiobutton(self, text='6 possible linear settings', variable=self.p_dot, value='0')
self.six_predict.grid()
self.seven_predict = Radiobutton(self, text='6 linear settings + 1 random', variable=self.p_dot, value='1')
self.seven_predict.grid()
self.eight_predict = Radiobutton(self, text='6 linear settings + 2 random', variable=self.p_dot, value='2')
self.eight_predict.grid()
self.nine_predict = Radiobutton(self, text='6 linear settings + 3 random', variable=self.p_dot, value='3')
self.nine_predict.grid()
self.ten_predict = Radiobutton(self, text='6 linear settings + 4 random', variable=self.p_dot, value='4')
self.ten_predict.grid()
self.predict_btn = Button(self, text='Run prediction', command=self.run_analysis)
self.p_dot.set('0')
self.predict_btn.grid()
self.bind('<Return>', self.run_analysis)
def remove_predict_frame(self):
Radiobutton.destroy(self.six_predict)
Radiobutton.destroy(self.seven_predict)
Radiobutton.destroy(self.eight_predict)
Radiobutton.destroy(self.nine_predict)
Radiobutton.destroy(self.ten_predict)
Button.destroy(self.predict_btn)
def draw_prediction_frame(self):
self.transform_chain = str.maketrans(",'", " ")
self.first_config_lbl = Label(self, text="Linear config 1:")
self.first_config_lbl.grid()
self.first_config_txt_r = str(self.amino_predict.possible_config[0])
self.first_config_tr = str.translate(self.first_config_txt_r, self.transform_chain)
self.first_config_txt = self.first_config_tr.replace(" ", "")
self.first_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.first_config_var_lbl.insert(float(), self.first_config_txt)
self.first_config_var_lbl.grid()
self.second_config_lbl = Label(self, text="Linear config 2:")
self.second_config_lbl.grid()
self.second_config_txt_r = str(self.amino_predict.possible_config[1])
self.second_config_tr = str.translate(self.second_config_txt_r, self.transform_chain)
self.second_config_txt = self.second_config_tr.replace(" ", "")
self.second_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.second_config_var_lbl.insert(float(), self.second_config_txt)
self.second_config_var_lbl.grid()
self.third_config_lbl = Label(self, text="Linear config 3:")
self.third_config_lbl.grid()
self.third_config_txt_r = str(self.amino_predict.possible_config[2])
self.third_config_tr = str.translate(self.third_config_txt_r, self.transform_chain)
self.third_config_txt = self.third_config_tr.replace(" ", "")
self.third_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.third_config_var_lbl.insert(float(), self.third_config_txt)
self.third_config_var_lbl.grid()
self.fourth_config_lbl = Label(self, text="Linear config 4:")
self.fourth_config_lbl.grid()
self.fourth_config_txt_r = str(self.amino_predict.possible_config[3])
self.fourth_config_tr = str.translate(self.fourth_config_txt_r, self.transform_chain)
self.fourth_config_txt = self.fourth_config_tr.replace(" ", "")
self.fourth_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.fourth_config_var_lbl.insert(float(), self.fourth_config_txt)
self.fourth_config_var_lbl.grid()
self.fifth_config_lbl = Label(self, text="Linear config 5:")
self.fifth_config_lbl.grid()
self.fifth_config_txt_r = str(self.amino_predict.possible_config[4])
self.fifth_config_tr = str.translate(self.fifth_config_txt_r, self.transform_chain)
self.fifth_config_txt = self.fifth_config_tr.replace(" ", "")
self.fifth_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.fifth_config_var_lbl.insert(float(), self.fifth_config_txt)
self.fifth_config_var_lbl.grid()
self.sixth_config_lbl = Label(self, text="Linear config 6:")
self.sixth_config_lbl.grid()
self.sixth_config_txt_r = str(self.amino_predict.possible_config[5])
self.sixth_config_tr = str.translate(self.sixth_config_txt_r, self.transform_chain)
self.sixth_config_txt = self.sixth_config_tr.replace(" ", "")
self.sixth_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.sixth_config_var_lbl.insert(float(), self.sixth_config_txt)
self.sixth_config_var_lbl.grid()
if int(self.p_dot.get()) == 1:
self.declare_rand_conf_1()
if int(self.p_dot.get()) == 2:
self.declare_rand_conf_1()
self.declare_rand_conf_2()
if int(self.p_dot.get()) == 3:
self.declare_rand_conf_1()
self.declare_rand_conf_2()
self.declare_rand_conf_3()
if int(self.p_dot.get()) == 4:
self.declare_rand_conf_1()
self.declare_rand_conf_2()
self.declare_rand_conf_3()
self.declare_rand_conf_4()
self.return_btn = Button(self, text='Go back', command=self.remove_configuration_frame)
self.return_btn.grid()
self.bind('<Return>', self.remove_configuration_frame)
def declare_rand_conf_1(self):
self.eight_config_lbl = Label(self, text="Randomised config 7:")
self.eight_config_lbl.grid()
self.eight_config_txt_r = str(self.amino_predict.possible_config[6])
self.eight_config_tr = str.translate(self.eight_config_txt_r, self.transform_chain)
self.eight_config_txt = self.eight_config_tr.replace(" ", "")
self.eight_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.eight_config_var_lbl.insert(float(), self.eight_config_txt)
self.eight_config_var_lbl.grid()
def declare_rand_conf_2(self):
self.nine_config_lbl = Label(self, text="Randomised config 8:")
self.nine_config_lbl.grid()
self.nine_config_txt_r = str(self.amino_predict.possible_config[7])
self.nine_config_tr = str.translate(self.nine_config_txt_r, self.transform_chain)
self.nine_config_txt = self.nine_config_tr.replace(" ", "")
self.nine_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.nine_config_var_lbl.insert(float(), self.nine_config_txt)
self.nine_config_var_lbl.grid()
def declare_rand_conf_3(self):
self.ten_config_lbl = Label(self, text="Randomised config 9")
self.ten_config_lbl.grid()
self.ten_config_txt_r = str(self.amino_predict.possible_config[8])
self.ten_config_tr = str.translate(self.ten_config_txt_r, self.transform_chain)
self.ten_config_txt = self.ten_config_tr.replace(" ", "")
self.ten_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.ten_config_var_lbl.insert(float(), self.ten_config_txt)
self.ten_config_var_lbl.grid()
def declare_rand_conf_4(self):
self.eleven_config_lbl = Label(self, text="Randomised config 10:")
self.eleven_config_lbl.grid()
self.eleven_config_txt_r = str(self.amino_predict.possible_config[9])
self.eleven_config_tr = str.translate(self.eleven_config_txt_r, self.transform_chain)
self.eleven_config_txt = self.eleven_config_tr.replace(" ", "")
self.eleven_config_var_lbl = Text(self, height=1, wrap="none", font=("Helvetica", 14))
self.eleven_config_var_lbl.insert(float(), self.eleven_config_txt)
self.eleven_config_var_lbl.grid()
def remove_configuration_frame(self, *args):
Label.destroy(self.first_config_lbl)
Text.destroy(self.first_config_var_lbl)
Label.destroy(self.second_config_lbl)
Text.destroy(self.second_config_var_lbl)
Label.destroy(self.third_config_lbl)
Text.destroy(self.third_config_var_lbl)
Label.destroy(self.fourth_config_lbl)
Text.destroy(self.fourth_config_var_lbl)
Label.destroy(self.fifth_config_lbl)
Text.destroy(self.fifth_config_var_lbl)
Label.destroy(self.sixth_config_lbl)
Text.destroy(self.sixth_config_var_lbl)
if int(self.p_dot.get()) == 1:
Label.destroy(self.eight_config_lbl)
Text.destroy(self.eight_config_var_lbl)
if int(self.p_dot.get()) == 2:
Label.destroy(self.eight_config_lbl)
Text.destroy(self.eight_config_var_lbl)
Label.destroy(self.nine_config_lbl)
Text.destroy(self.nine_config_var_lbl)
if int(self.p_dot.get()) == 3:
Label.destroy(self.eight_config_lbl)
Text.destroy(self.eight_config_var_lbl)
Label.destroy(self.nine_config_lbl)
Text.destroy(self.nine_config_var_lbl)
Label.destroy(self.ten_config_lbl)
Text.destroy(self.ten_config_var_lbl)
if int(self.p_dot.get()) == 4:
Label.destroy(self.eight_config_lbl)
Text.destroy(self.eight_config_var_lbl)
Label.destroy(self.nine_config_lbl)
Text.destroy(self.nine_config_var_lbl)
Label.destroy(self.ten_config_lbl)
Text.destroy(self.ten_config_var_lbl)
Label.destroy(self.eleven_config_lbl)
Text.destroy(self.eleven_config_var_lbl)
Button.destroy(self.return_btn)
self.draw_index_frame()
tk_init = Tk_Main()
tk_init.mainloop()
| [
"noreply@github.com"
] | noreply@github.com |
2c197d376b5580c493f3dddf7bdbd0b7cfbe9d98 | 7b12eb45c1ea76ad9c186b858b5dfebf2c5b862a | /.history/DEBER_20210905000450.py | c9d0c0d9993b0f24c7ecbb3cf98c786e2d4f0c05 | [
"MIT"
] | permissive | Alopezm5/PROYECTO-PARTE-1 | a1dce04009b24852c1c60e69bdf602ad3af0574b | bd7a8594edf08d41c6ca544cf6bac01ea4fcb684 | refs/heads/main | 2023-07-25T11:22:17.994770 | 2021-09-07T03:27:34 | 2021-09-07T03:27:34 | 403,670,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,825 | py | import os
class Empresa():
def __init__(self,nom="",ruc=0,dire="",tele=0,ciud="",tipEmpr=""):
self.nombre=nom
self.ruc=ruc
self.direccion=dire
self.telefono=tele
self.ciudad=ciud
self.tipoEmpresa=tipEmpr
def datosEmpresa(self):#3
self.nombre=input("Ingresar nombre de la empresa: ")
self.ruc=int(input("Ingresar ruc de la empresa: "))
self.direccion=input("Ingresar la direccion de la empresa: ")
self.telefono=int(input("Ingresar el numero de telefono de la empresa: "))
self.ciudad=input("Ingresar ciudad donde esta la empresa: ")
self.tipoEmpresa=input("Ingresar tipo de empresa publica o privada: ")
def mostrarEmpresa(self):
print("")
print("Empresa")
print("La empresa de nombre {}\n De RUC #{} \n Está ubicada en {}\n Se puede comunicar al #{}\n Está empresa esta en la ciudad de {}\n Es una entidad {}".format(self.nombre,self.ruc,self.direccion, self.telefono,self.ciudad, self.tipoEmpresa))
class Empleado(Empresa):
def __init__(self,nom="",cedu=0,dire="",tele=0,email="",estado="",profe=""):
self.nombre=nom
self.cedula=cedu
self.direccion=dire
self.telefono=tele
self.correo=email
self.estadocivil=estado
self.profesion=profe
def empleado(self):
self.nombre=input("Ingresar nombre del empleado: ")
self.cedula=int(input("Ingresar numero de cedula del empleado: "))
self.direccion=input("Ingresar la direccion del empleado: ")
self.telefono=int(input("Ingresar numero de contacto del empleado: "))
self.correo=input("Ingresar correo personal del empleado: ")
def empleadoObrero(self):
self.estadocivil=input("Ingresar estado civil del empleado: ")
def empleadoOficina(self):
self.profesion=input("Ingresar profesion del empleado: ")
def mostrarempleado(self):
print("El empleado: {} con # de C.I. {} \n Con direccion {}, y numero de contacto{}\n Y correo {}".format(self.nombre,self.cedula,self.direccion,self.telefono,self.correo))
class Departamento(Empleado):
def __init__(self,dep=""):
self.departamento=dep
def departa(self):
self.departamento=input("Ingresar el departamento al que pertenece el empleado: ")
def mostrarDeparta(self):
print("El empleado pertenece al departamento de: {}".format(self.departamento))
class Pagos(Empleado):
def __init__(self, desper=0,valhora=0,hotraba=0,extra=0,suel=0,hrecar=0,hextra=0,pres=0,mcou=0,valho=0,sobtiem=0,comofi=0,antobre=0,iemple=0,cuopres=0,tot=0,liquid=0,cuota=0,anti=0,comi=0,fNomina="",fIngreso="",iess=0):
self.permisos=desper
self.valorhora=valhora
self.horastrabajadas=hotraba
self.valextra=extra
self.sueldo= suel
self.horasRecargo= hrecar
self.horasExtraordinarias=hextra
self.prestamo= pres
self.mesCuota= mcou
self.valor_hora= valho
self.sobretiempo=sobtiem
self.comEmpOficina = comofi
self.antiEmpObrero = antobre
self.iessEmpleado = iemple
self.cuotaPrestamo=cuopres
self.totdes = tot
self.liquidoRecibir = liquid
self.mesCuota=cuota
self.antiguedad=anti
self.comision=comi
self.fechaNomina=fNomina
self.fechaIngreso=fIngreso
self.iess=iess
def pagoNormal(self):
self.sueldo=float(input("Ingresar sueldo del trabajador: $ "))
self.prestamo=float(input("Ingresar monto del prestamo que ha generado el empleado: $ "))
self.mesCuota=int(input("Ingresar meses a diferir el prestamo: "))
self.comision=float(input("Ingresar valor de la comsion: "))
self.antiguedad=int(input("Ingresar antiguedad: "))
self.iess=float(input("Ingresar valor del iees recordar que debe ser porcentuado Ejemplo si quiere decir 20% debe ingresar 0.20"))
def pagoExtra(self):
self.horasRecargo=int(input("Ingresar horas de recargo: "))
self.horasExtraordinarias=int(input("Ingresar horas extraordinarias: "))
self.fechaNomina=float(input("Ingresar fecha de nomida (formato año-mes-dia): "))
self.fechaIngreso=float(input("Ingresar fecha de ingreso (formato año-mes-dia): "))
def calculoSueldo(self):
self.valor_hora=self.sueldo/240
self.sobretiempo= self.valor_hora * (self.horasRecargo*0.50+self.horasExtraordinarias*2)
self.comEmpOficina = self.comision*self.sueldo
self.antiEmpObrero = self.antiguedad*(self.fechaNomina - self.fechaIngreso)/365*self.sueldo
self.iessEmpleado = self.iess*(self.sueldo+self.sobretiempo)
self.cuotaPrestamo=self.prestamo/self.mesCuota
self.toting = self.sueldo+self.sobretiempo+ self.comEmpOficina + self.antiEmpObrero
self.totdes = self.iessEmpleado + self.prestamo
self.liquidoRecibir = self.toting - self.totdes
def mostrarSueldo(self):
print("SUELDO BASE")
print("El empleado tiene un sueldo de {}".format(self.sueldo))
print("")
print("SOBRETIEMPO")
print("El valor de sobretiempo es de {}, con {} horas extras trabajadas".format(self.sobretiempo,self.horasExtraordinarias))
print("")
print("PRESTAMO")
print("El valor de prestamo es de {}, a ser pagado en {} meses, con cuotas de {}".format(self.p))
emp=Empresa()
emp.datosEmpresa()
os.system ("cls")
emple=Empleado()
emple.empleado()
os.system ("cls")
emple.empleadoObrero()
emple.empleadoOficina()
os.system ("cls")
depa=Departamento()
depa.departa()
pag=Pagos()
pag.pagoNormal()
pag.pagoExtra()
pag.calculoSueldo()
os.system ("cls")
emp.mostrarEmpresa()
print("")
emple.mostrarempleado()
print("")
pag.mostrarSueldo() | [
"85761855+Alopezm5@users.noreply.github.com"
] | 85761855+Alopezm5@users.noreply.github.com |
24485d72dda7d32f2fe019682cf33b084b6bc0b9 | f415ea7cfb982029787587d19184ea2faf7b0651 | /Train.py | 070df0c34e536429a6261414cbe1f46a6af649d9 | [] | no_license | MhYao2014/MNIST | 8fd4710158512f5763fa621c53ba52d6eaf3f2de | 2397b35b77c499c590ded9665ab21d7106ca00f7 | refs/heads/master | 2020-03-25T21:13:29.913162 | 2018-08-09T14:46:19 | 2018-08-09T14:46:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,703 | py | import datetime
import numpy as np
from data_process import load_train_labels
from data_process import load_train_images
from FNN import FNN
class Train_hyperparam():
def __int__(self):
pass
train_hyperparam = {
'step_size': 0.3,
'stop_criterion': 7,
'max_iteration': 100000000,
}
class Train(Train_hyperparam):
def __int__(self):
pass
def get_grade(self, middle_result, model_param, labels_batch, images_batch):
'''
This method will calculate the grade of all parameters in fnn.
:param middle_result: A dictionary and each value is a numpy array. The middle result of fnn's forward process.
:param model_param: A list and each entry of this list is a numpy array. All the parameters in fnn.
:param labels_batch: A numpy array with shape (batch_size, ).
:return grade_param: A list and each entry of this list is a numpy array. All the gradient of all parameters in fnn.
'''
y_truth = np.zeros(middle_result['layer4'].shape)
for row, position in enumerate(labels_batch.astype(np.int)):
y_truth[row, position] = 1
layer4_before_grad = middle_result['layer4'] - y_truth
W_layer32layer4_grad = np.matmul(middle_result['layer3'].T, layer4_before_grad) / labels_batch.shape[0]
b_layer32layer4_grad = np.sum(layer4_before_grad,axis=0) / labels_batch.shape[0]
layer3_grad = np.matmul(layer4_before_grad, model_param[6].T)
layer3_before_grad = (layer3_grad * ( middle_result['layer3'] - middle_result['layer3']**2 ))
W_layer22layer3_grad = np.matmul(middle_result['layer2'].T, layer3_before_grad) / labels_batch.shape[0]
b_layer22layer3_grad = np.sum(layer3_before_grad,axis=0) / labels_batch.shape[0]
layer2_grad = np.matmul(layer3_before_grad, model_param[4].T)
layer2_before_grad = (layer2_grad * (middle_result['layer2'] - middle_result['layer2'] ** 2))
W_layer12layer2_grad = np.matmul(middle_result['layer1'].T, layer2_before_grad) / labels_batch.shape[0]
b_layer12layer2_grad = np.sum(layer2_before_grad,axis=0) / labels_batch.shape[0]
layer1_grad = np.matmul(layer2_before_grad, model_param[2].T)
layer1_before_grad = (layer1_grad * (middle_result['layer1'] - middle_result['layer1'] ** 2))
W_input2layer1_grad = np.matmul(images_batch.T, layer1_before_grad) / labels_batch.shape[0]
b_input2layer1_grad = np.sum(layer1_before_grad,axis=0) / labels_batch.shape[0]
grade_param = [W_input2layer1_grad, b_input2layer1_grad, W_layer12layer2_grad, b_layer12layer2_grad, W_layer22layer3_grad, b_layer22layer3_grad, W_layer32layer4_grad, b_layer32layer4_grad]
return grade_param
def update_grade(self, model_param, grade_param):
'''
This method will perform the fixed step size gradient descent
:param model_param: A list and each entry of this list is a numpy array. All the parameters in fnn.
:param grade_param: A list and each entry of this list is a numpy array. All the gradient of all parameters in fnn.
:return: model_param: A list and each entry of this list is a numpy array. All the parameters in fnn.
'''
for index, param in enumerate(model_param):
model_param[index] = model_param[index] - self.train_hyperparam['step_size']*grade_param[index]
return model_param
def sgd_training(self, fnn, images_vectors, labels_numpy):
'''
This method will organize the training process.
:param fnn: A instance of FNN class.
:param images_vectors: A numpy array with shape (60000, 784)
:param labels_numpy: A numpy array with shape (60000, )
:return: None
'''
loss_emperical = 100
iteration = 0
starttime = datetime.datetime.now()
while loss_emperical > self.train_hyperparam['stop_criterion']:
images_batch, labels_batch = fnn.batcher(images_vectors, labels_numpy)
pred_category, loss_batch_average = fnn.forward(images_batch, labels_batch, if_train=True)
grade_param = self.get_grade(fnn.middle_results, fnn.model_param, labels_batch, images_batch)
new_model_param = self.update_grade(fnn.model_param, grade_param)
fnn.update_param(new_model_param)
loss_emperical = 0.9 * loss_emperical + 0.1 * loss_batch_average
iteration += 1
if iteration > self.train_hyperparam['max_iteration']:
print ("The training process may failed, we have trained for %d iterations.\n" % self.train_hyperparam['max_iteration'])
break
# if iteration >= 15000 and iteration<30000:
# self.train_hyperparam['step_size'] = 0.1
# elif iteration >=30000 and iteration < 45000:
# self.train_hyperparam['step_size'] = 0.05
if iteration % (50000//fnn.model_hyperparam['batch_size']) == 0:
account = 0
for i in range(10000//fnn.model_hyperparam['batch_size']):
_, accuracy = fnn.forward(images_vectors[50000 + i * fnn.model_hyperparam['batch_size']:50000 + (i + 1) * fnn.model_hyperparam['batch_size']],
labels_numpy[50000 + i * fnn.model_hyperparam['batch_size']:50000 + (i + 1) * fnn.model_hyperparam['batch_size']], if_train=True)
account += np.nonzero(_ - labels_numpy[50000 + i * fnn.model_hyperparam['batch_size']:50000 + (i + 1) * fnn.model_hyperparam['batch_size']])[0].shape[0]
print('This is the %dth iterations, and the accuracy on the test data set is: %f%%' % (iteration//(50000//fnn.model_hyperparam['batch_size']), (100 - 100 * (account / 10000))))
loss_emperical = 100 * (account / 10000)
if loss_emperical < 8:
fnn.model_hyperparam['step_size'] = 0.015
else:
fnn.model_hyperparam['step_size'] = 0.2
#print("This is the %dth iteration, and the accuracy is: %d%%" % (iteration, 100 - loss_emperical))
endtime = datetime.datetime.now()
print("\nThe iterations take about %d seconds\n" % (endtime - starttime).seconds)
print ('\nThe training process finished !\n')
if __name__ == '__main__':
###################
# path to the training data's images and labels
###################
train_images_idx3_ubyte_file = './train-images.idx3-ubyte'
train_labels_idx1_ubyte_file = './train-labels.idx1-ubyte'
##################
# Here we go
##################
images_numpy = load_train_images(idx3_ubyte_file=train_images_idx3_ubyte_file)
labels_numpy = load_train_labels(idx1_ubyte_file=train_labels_idx1_ubyte_file)
print('\nThe shape of all data images are:', images_numpy.shape)
print('\nThe shape of all data labels are:', labels_numpy.shape)
images_vectors = images_numpy.reshape((60000, -1))
fnn = FNN()
train = Train()
train.sgd_training(fnn=fnn ,images_vectors=images_vectors[0:60000] ,labels_numpy=labels_numpy[0:60000])
Accuracy = 0
account = 0
for i in range(20):
_, accuracy = fnn.forward(images_vectors[50000 + i * 500:50000 + (i + 1) * 500],
labels_numpy[50000 + i * 500:50000 + (i + 1) * 500], if_train=True)
account += np.nonzero(_ - labels_numpy[50000 + i * 500:50000 + (i + 1) * 500])[0].shape[0]
print('The accuracy on the whole data set is %f %%:\n' % (100 - 100 * (account / 10000)))
| [
"noreply@github.com"
] | noreply@github.com |
da382c191e793b219c9f1a7272162a4628f8dd18 | c718007c1b27e16ae5b43cf41423449357568fb9 | /lib/networks/mobilenet/mobilenet_v2.py | d86ac21147253eb38f09b28fe9a7f22e93615e32 | [] | no_license | iamrishab/DB-tf | 2f528c59bb733120a67a8aa7c93a2d0ec069a76e | 5f62e068ca23a4afb026359b25e914de5d0c852e | refs/heads/master | 2020-12-01T13:51:37.469358 | 2019-12-27T10:12:18 | 2019-12-27T10:12:18 | 230,649,108 | 22 | 11 | null | 2019-12-28T18:39:11 | 2019-12-28T18:39:10 | null | UTF-8 | Python | false | false | 9,215 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of Mobilenet V2.
Architecture: https://arxiv.org/abs/1801.04381
The base model gives 72.2% accuracy on ImageNet, with 300MMadds,
3.4 M parameters.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import functools
import tensorflow as tf
from tensorflow.contrib import layers as contrib_layers
from tensorflow.contrib import slim as contrib_slim
from lib.networks.mobilenet import conv_blocks as ops
from lib.networks.mobilenet import mobilenet as lib
slim = contrib_slim
op = lib.op
expand_input = ops.expand_input_by_factor
# pyformat: disable
# Architecture: https://arxiv.org/abs/1801.04381
V2_DEF = dict(
defaults={
# Note: these parameters of batch norm affect the architecture
# that's why they are here and not in training_scope.
(slim.batch_norm,): {'center': True, 'scale': True},
(slim.conv2d, slim.fully_connected, slim.separable_conv2d): {
'normalizer_fn': slim.batch_norm, 'activation_fn': tf.nn.relu6
},
(ops.expanded_conv,): {
'expansion_size': expand_input(6),
'split_expansion': 1,
'normalizer_fn': slim.batch_norm,
'residual': True
},
(slim.conv2d, slim.separable_conv2d): {'padding': 'SAME'}
},
spec=[
op(slim.conv2d, stride=2, num_outputs=32, kernel_size=[3, 3]),
op(ops.expanded_conv,
expansion_size=expand_input(1, divisible_by=1),
num_outputs=16),
op(ops.expanded_conv, stride=2, num_outputs=24),
op(ops.expanded_conv, stride=1, num_outputs=24),
op(ops.expanded_conv, stride=2, num_outputs=32),
op(ops.expanded_conv, stride=1, num_outputs=32),
op(ops.expanded_conv, stride=1, num_outputs=32),
op(ops.expanded_conv, stride=2, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=96),
op(ops.expanded_conv, stride=1, num_outputs=96),
op(ops.expanded_conv, stride=1, num_outputs=96),
op(ops.expanded_conv, stride=2, num_outputs=160),
op(ops.expanded_conv, stride=1, num_outputs=160),
op(ops.expanded_conv, stride=1, num_outputs=160),
op(ops.expanded_conv, stride=1, num_outputs=320),
op(slim.conv2d, stride=1, kernel_size=[1, 1], num_outputs=1280)
],
)
# pyformat: enable
# Mobilenet v2 Definition with group normalization.
V2_DEF_GROUP_NORM = copy.deepcopy(V2_DEF)
V2_DEF_GROUP_NORM['defaults'] = {
(contrib_slim.conv2d, contrib_slim.fully_connected,
contrib_slim.separable_conv2d): {
'normalizer_fn': contrib_layers.group_norm, # pylint: disable=C0330
'activation_fn': tf.nn.relu6, # pylint: disable=C0330
}, # pylint: disable=C0330
(ops.expanded_conv,): {
'expansion_size': ops.expand_input_by_factor(6),
'split_expansion': 1,
'normalizer_fn': contrib_layers.group_norm,
'residual': True
},
(contrib_slim.conv2d, contrib_slim.separable_conv2d): {
'padding': 'SAME'
}
}
@slim.add_arg_scope
def mobilenet(input_tensor,
num_classes=1001,
depth_multiplier=1.0,
scope='MobilenetV2',
conv_defs=None,
finegrain_classification_mode=False,
min_depth=None,
divisible_by=None,
activation_fn=None,
**kwargs):
"""Creates mobilenet V2 network.
Inference mode is created by default. To create training use training_scope
below.
with tf.contrib.slim.arg_scope(mobilenet_v2.training_scope()):
logits, endpoints = mobilenet_v2.mobilenet(input_tensor)
Args:
input_tensor: The input tensor
num_classes: number of classes
depth_multiplier: The multiplier applied to scale number of
channels in each layer.
scope: Scope of the operator
conv_defs: Allows to override default conv def.
finegrain_classification_mode: When set to True, the model
will keep the last layer large even for small multipliers. Following
https://arxiv.org/abs/1801.04381
suggests that it improves performance for ImageNet-type of problems.
*Note* ignored if final_endpoint makes the builder exit earlier.
min_depth: If provided, will ensure that all layers will have that
many channels after application of depth multiplier.
divisible_by: If provided will ensure that all layers # channels
will be divisible by this number.
activation_fn: Activation function to use, defaults to tf.nn.relu6 if not
specified.
**kwargs: passed directly to mobilenet.mobilenet:
prediction_fn- what prediction function to use.
reuse-: whether to reuse variables (if reuse set to true, scope
must be given).
Returns:
logits/endpoints pair
Raises:
ValueError: On invalid arguments
"""
if conv_defs is None:
conv_defs = V2_DEF
if 'multiplier' in kwargs:
raise ValueError('mobilenetv2 doesn\'t support generic '
'multiplier parameter use "depth_multiplier" instead.')
if finegrain_classification_mode:
conv_defs = copy.deepcopy(conv_defs)
if depth_multiplier < 1:
conv_defs['spec'][-1].params['num_outputs'] /= depth_multiplier
if activation_fn:
conv_defs = copy.deepcopy(conv_defs)
defaults = conv_defs['defaults']
conv_defaults = (
defaults[(slim.conv2d, slim.fully_connected, slim.separable_conv2d)])
conv_defaults['activation_fn'] = activation_fn
depth_args = {}
# NB: do not set depth_args unless they are provided to avoid overriding
# whatever default depth_multiplier might have thanks to arg_scope.
if min_depth is not None:
depth_args['min_depth'] = min_depth
if divisible_by is not None:
depth_args['divisible_by'] = divisible_by
with slim.arg_scope((lib.depth_multiplier,), **depth_args):
return lib.mobilenet(
input_tensor,
num_classes=num_classes,
conv_defs=conv_defs,
scope=scope,
multiplier=depth_multiplier,
**kwargs)
mobilenet.default_image_size = 224
def wrapped_partial(func, *args, **kwargs):
partial_func = functools.partial(func, *args, **kwargs)
functools.update_wrapper(partial_func, func)
return partial_func
# Wrappers for mobilenet v2 with depth-multipliers. Be noticed that
# 'finegrain_classification_mode' is set to True, which means the embedding
# layer will not be shrinked when given a depth-multiplier < 1.0.
mobilenet_v2_140 = wrapped_partial(mobilenet, depth_multiplier=1.4)
mobilenet_v2_050 = wrapped_partial(mobilenet, depth_multiplier=0.50,
finegrain_classification_mode=True)
mobilenet_v2_035 = wrapped_partial(mobilenet, depth_multiplier=0.35,
finegrain_classification_mode=True)
@slim.add_arg_scope
def mobilenet_base(input_tensor, depth_multiplier=1.0, **kwargs):
"""Creates base of the mobilenet (no pooling and no logits) ."""
return mobilenet(input_tensor,
depth_multiplier=depth_multiplier,
base_only=True, **kwargs)
@slim.add_arg_scope
def mobilenet_base_group_norm(input_tensor, depth_multiplier=1.0, **kwargs):
"""Creates base of the mobilenet (no pooling and no logits) ."""
kwargs['conv_defs'] = V2_DEF_GROUP_NORM
kwargs['conv_defs']['defaults'].update({
(contrib_layers.group_norm,): {
'groups': kwargs.pop('groups', 8)
}
})
return mobilenet(
input_tensor, depth_multiplier=depth_multiplier, base_only=True, **kwargs)
def training_scope(**kwargs):
"""Defines MobilenetV2 training scope.
Usage:
with tf.contrib.slim.arg_scope(mobilenet_v2.training_scope()):
logits, endpoints = mobilenet_v2.mobilenet(input_tensor)
with slim.
Args:
**kwargs: Passed to mobilenet.training_scope. The following parameters
are supported:
weight_decay- The weight decay to use for regularizing the model.
stddev- Standard deviation for initialization, if negative uses xavier.
dropout_keep_prob- dropout keep probability
bn_decay- decay for the batch norm moving averages.
Returns:
An `arg_scope` to use for the mobilenet v2 model.
"""
return lib.training_scope(**kwargs)
__all__ = ['training_scope', 'mobilenet_base', 'mobilenet', 'V2_DEF'] | [
"chizhanyuefeng@gmail,com"
] | chizhanyuefeng@gmail,com |
d96282cc046370e374c1960e9e22f7b07555821f | b4ea639c569003e56994b3ca942bd2caf24524a5 | /Data Acquisition/TestFiles/csv_test.py | 700ff91a42c4cf321f33e012a191d5e1fc74b2a3 | [] | no_license | ryhan12/CryptoIndex | cb99027972f390af72e8f452f78cdc9248287b42 | 718d75e4be6a4cba7ce7de6e3dffc7546bf174a5 | refs/heads/master | 2022-11-27T01:05:13.184714 | 2020-07-29T14:05:30 | 2020-07-29T14:05:30 | 283,514,516 | 0 | 0 | null | 2020-07-29T14:01:38 | 2020-07-29T14:01:38 | null | UTF-8 | Python | false | false | 277 | py | import csv
def writeToFile(data, file_writer):
file_writer.writerow([data])
def main():
with open('test_file.csv', 'w') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
writeToFile('x', writer)
writeToFile('z', writer)
if __name__ == '__main__':
main()
| [
"ryhan.moghe@gmail.com"
] | ryhan.moghe@gmail.com |
2725d677e8e3d4f72ca010b14799c12c80f21d88 | 033b43e55e85a9f78f2ad6578045b9ed42fcb83f | /android-runner-configuration/scripts/interaction/python3/io_github_hidroh_materialistic.py | ce485eadba52929dcc2ff3aca3f62bb57c7274c0 | [] | no_license | VU-Thesis-2019-2020-Wesley-Shann/experiments | 58a5a80b20c3c87f57d90414c7a9b86db16dea36 | 90f3777f86e836e41c82a345536c62faa103ba50 | refs/heads/master | 2022-12-06T21:09:28.343811 | 2020-08-27T15:32:34 | 2020-08-27T15:32:34 | 278,862,724 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,953 | py | import sys
import time
sys.path.insert(0, '/home/sshann/Documents/thesis/experiments/android-runner-configuration/')
from scripts.interaction.python3.common import tap
from scripts.interaction.python3.common import tap_phone_back
from scripts.interaction.python3.common import write_text
def visit_catching_up(device):
print('\tvisit_catching_up')
# open side menu
tap(device, 90, 192, 0)
# go to catching up
tap(device, 432, 858)
# click on first card
tap(device, 724, 400, 6)
# click on comments
tap(device, 355, 693)
# return to feed list
tap_phone_back(device)
# click on second card
tap(device, 648, 789)
# click on comments
tap(device, 355, 693)
# return to feed list
tap_phone_back(device)
# click on second card
tap(device, 648, 789)
# click on comments
tap(device, 355, 693)
# return to feed list
tap_phone_back(device)
# click on first card
tap(device, 724, 400)
# click on comments
tap(device, 355, 693)
# return to feed list
tap_phone_back(device)
# return to front page
tap_phone_back(device)
# open side menu
tap(device, 90, 192, 0)
# go to catching up
tap(device, 432, 858)
# click on first card
tap(device, 724, 400)
# return to feed list
tap_phone_back(device)
# return to front page
tap_phone_back(device)
def visit_best_stories(device):
print('\tvisit_best_stories')
# open side menu
tap(device, 90, 192, 0)
# open more sections
tap(device, 427, 1322, 1)
# go to best stories
tap(device, 567, 1562)
# click on second card
tap(device, 648, 789)
# click on comments
tap(device, 355, 693)
# return to feed list
tap_phone_back(device)
# click on first card
tap(device, 724, 400, 8)
# click on comments
tap(device, 355, 693)
# return to feed list
tap_phone_back(device)
# return to front page
tap_phone_back(device)
# open side menu
tap(device, 90, 192, 0)
# this is kept open
# # open more sections
# tap(device, 427, 1322, 1)
# go to best stories
tap(device, 567, 1562)
# click on first card
tap(device, 724, 400)
# return to feed list
tap_phone_back(device)
# return to front page
tap_phone_back(device)
def visit_new_stories_from_saved(device):
print('\tvisit_new_stories_from_saved')
# open side menu
tap(device, 90, 192, 0)
# go to saved
tap(device, 432, 1541, 2)
# open side menu
tap(device, 90, 192, 0)
# go to new stories
tap(device, 396, 1082, 1)
# filter search
tap(device, 1219, 165, 0)
write_text(device, 'google')
tap(device, 1314, 2245)
# # click on the last visible card
# tap(device, 589, 2138)
# click on second card
tap(device, 648, 789)
# click on comments
tap(device, 355, 693)
# return to filter feed list
tap_phone_back(device)
# click on second card
tap(device, 648, 789)
# click on comments
tap(device, 355, 693)
# return to filter feed list
tap_phone_back(device)
# close keyboard
tap_phone_back(device)
# return to feed list
tap_phone_back(device)
# return to saved paged
tap_phone_back(device)
# return to front page
tap_phone_back(device)
# noinspection PyUnusedLocal
def main(device, *args, **kwargs):
if device.current_activity().find('io.github.hidroh.materialistic') != -1:
time.sleep(4)
run_materialistic_interaction(device)
else:
print('\tSkip file')
def run_materialistic_interaction(device):
print('\tRunning interaction for Materialistic')
visit_catching_up(device)
# This interaction works, but it duplicated the runtime for this app =/
# visit_new_stories_from_saved(device)
visit_new_stories_from_saved(device)
visit_best_stories(device)
| [
"sshann95@outlook.com"
] | sshann95@outlook.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.