hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
638b0a00a63883a2bb04ae6a08d45cf3e222d715 | 6,636 | py | Python | modoboa_dmarc/lib.py | modoboa/modoboa-dmarc | 47572ed820f16203670d0f8c2881e10ecc1d53a7 | [
"MIT"
] | 14 | 2016-06-07T08:11:20.000Z | 2021-10-12T15:05:26.000Z | modoboa_dmarc/lib.py | modoboa/modoboa-dmarc | 47572ed820f16203670d0f8c2881e10ecc1d53a7 | [
"MIT"
] | 54 | 2016-02-26T18:53:07.000Z | 2022-03-01T22:17:51.000Z | modoboa_dmarc/lib.py | modoboa/modoboa-dmarc | 47572ed820f16203670d0f8c2881e10ecc1d53a7 | [
"MIT"
] | 12 | 2016-08-11T07:42:15.000Z | 2021-10-15T19:45:41.000Z | """Internal library."""
import datetime
import email
import fileinput
import getpass
import imaplib
import zipfile
import gzip
import sys
from defusedxml.ElementTree import fromstring
import pytz.exceptions
import six
import magic
from django.db import transaction
from django.utils.encoding import smart_text
from django.utils import timezone
from modoboa.admin import models as admin_models
from . import constants
from . import models
ZIP_CONTENT_TYPES = [
"application/x-zip-compressed",
"application/x-zip",
"application/zip",
"application/gzip",
"application/octet-stream",
"text/xml",
]
FILE_TYPES = [
"text/plain",
"text/xml",
]
def import_record(xml_node, report):
"""Import a record."""
record = models.Record(report=report)
row = xml_node.find("row")
record.source_ip = row.find("source_ip").text
record.count = int(row.find("count").text)
policy_evaluated = row.find("policy_evaluated")
record.disposition = policy_evaluated.find("disposition").text
record.dkim_result = policy_evaluated.find("dkim").text
record.spf_result = policy_evaluated.find("spf").text
reason = policy_evaluated.find("reason")
if reason:
record.reason_type = smart_text(reason.find("type").text)[:14]
if record.reason_type not in constants.ALLOWED_REASON_TYPES:
record.reason_type = "other"
comment = reason.find("comment").text or ""
record.reason_comment = comment
identifiers = xml_node.find("identifiers")
header_from = identifiers.find("header_from").text.split(".")
domain = None
while len(header_from) >= 2:
domain = admin_models.Domain.objects.filter(
name=".".join(header_from)).first()
if domain is not None:
record.header_from = domain
break
header_from = header_from[1:]
if domain is None:
print("Invalid record found (domain not local)")
return None
record.save()
auth_results = xml_node.find("auth_results")
for rtype in ["spf", "dkim"]:
rnode = auth_results.find(rtype)
if not rnode:
continue
models.Result.objects.create(
record=record, type=rtype, domain=rnode.find("domain").text,
result=rnode.find("result").text)
@transaction.atomic
def import_report(content):
"""Import an aggregated report."""
root = fromstring(content, forbid_dtd=True)
metadata = root.find("report_metadata")
print(
"Importing report {} received from {}".format(
metadata.find("report_id").text,
metadata.find("org_name").text)
)
reporter, created = models.Reporter.objects.get_or_create(
email=metadata.find("email").text,
defaults={"org_name": metadata.find("org_name").text}
)
qs = models.Report.objects.filter(
reporter=reporter, report_id=metadata.find("report_id").text)
if qs.exists():
print("Report already imported.")
return
report = models.Report(reporter=reporter)
report.report_id = metadata.find("report_id").text
date_range = metadata.find("date_range")
report.start_date = timezone.make_aware(
datetime.datetime.fromtimestamp(int(date_range.find("begin").text))
)
report.end_date = timezone.make_aware(
datetime.datetime.fromtimestamp(int(date_range.find("end").text))
)
policy_published = root.find("policy_published")
for attr in ["domain", "adkim", "aspf", "p", "sp", "pct"]:
value = getattr(
report, "policy_{}".format(attr),
policy_published.find(attr).text,
None
)
if not value:
print(f"Report skipped because of malformed data (empty {attr})")
return
try:
report.save()
except (pytz.exceptions.AmbiguousTimeError):
print("Report skipped because of invalid date.")
return
for record in root.findall("record"):
import_record(record, report)
def import_archive(archive, content_type=None):
"""Import reports contained inside (file pointer)
- a zip archive,
- a gzip file,
- a xml file.
"""
if content_type == "text/xml":
import_report(archive.read())
elif content_type in ["application/gzip", "application/octet-stream"]:
with gzip.GzipFile(mode="r", fileobj=archive) as zfile:
import_report(zfile.read())
else:
with zipfile.ZipFile(archive, "r") as zfile:
for fname in zfile.namelist():
import_report(zfile.read(fname))
def import_report_from_email(content):
"""Import a report from an email."""
if isinstance(content, six.string_types):
msg = email.message_from_string(content)
else:
msg = email.message_from_file(content)
err = False
for part in msg.walk():
if part.get_content_type() not in ZIP_CONTENT_TYPES:
continue
try:
fpo = six.BytesIO(part.get_payload(decode=True))
# Try to get the actual file type of the buffer
# required to make sure we are dealing with an XML file
file_type = magic.Magic(uncompress=True, mime=True).from_buffer(fpo.read(2048))
fpo.seek(0)
if file_type in FILE_TYPES:
import_archive(fpo, content_type=part.get_content_type())
except (OSError, IOError):
print('Error: the attachment does not match the mimetype')
err = True
else:
fpo.close()
if err:
# Return EX_DATAERR code <data format error> available
# at sysexits.h file
# (see http://www.postfix.org/pipe.8.html)
sys.exit(65)
def import_report_from_stdin():
"""Parse a report from stdin."""
content = six.StringIO()
for line in fileinput.input([]):
content.write(line)
content.seek(0)
if not content:
return
import_report_from_email(content)
def import_from_imap(options):
"""Import reports from an IMAP mailbox."""
obj = imaplib.IMAP4_SSL if options["ssl"] else imaplib.IMAP4
conn = obj(options["host"])
username = input("Username: ")
password = getpass.getpass(prompt="Password: ")
conn.login(username, password)
conn.select(options["mailbox"])
type, msg_ids = conn.search(None, "ALL")
for msg_id in msg_ids[0].split():
typ, content = conn.fetch(msg_id, "(RFC822)")
for response_part in content:
if isinstance(response_part, tuple):
import_report_from_email(response_part[1])
conn.close()
| 31.903846 | 91 | 0.643761 |
2757d4191a7a4d62f61773f96836976a0d7914ca | 754 | py | Python | tests/test_inspections.py | Hidden-black/jishaku | d3f50749b5a977b544e5fd14894585f656247486 | [
"MIT"
] | 434 | 2018-01-04T05:57:46.000Z | 2022-03-29T12:52:54.000Z | tests/test_inspections.py | Hidden-black/jishaku | d3f50749b5a977b544e5fd14894585f656247486 | [
"MIT"
] | 134 | 2018-02-16T05:12:13.000Z | 2022-03-31T15:05:23.000Z | tests/test_inspections.py | Hidden-black/jishaku | d3f50749b5a977b544e5fd14894585f656247486 | [
"MIT"
] | 228 | 2017-12-18T18:02:17.000Z | 2022-03-29T23:25:41.000Z | # -*- coding: utf-8 -*-
"""
jishaku.inspections test
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2021 Devon (Gorialis) R
:license: MIT, see LICENSE for more details.
"""
import collections # for __iadd__ test
import discord
import pytest
from utils import run_async
from jishaku.repl.inspections import all_inspections
@pytest.mark.parametrize(
"target",
[
4,
discord.Client, # cover type subclasses
tuple, # cover many-subclass truncation
[False, 1, "2", 3.0], # cover content types
collections.Counter, # cover inplace operators
run_async # cover current-working-directory inspections
]
)
def test_object_inspection(target):
for _, _ in all_inspections(target):
pass
| 21.542857 | 64 | 0.649867 |
0f38ba8b8c6db701aaf679a0675d359ffbf9b382 | 5,842 | py | Python | sdk/textanalytics/azure-ai-textanalytics/azure/ai/textanalytics/_response_handlers.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 1 | 2020-12-10T03:17:51.000Z | 2020-12-10T03:17:51.000Z | sdk/textanalytics/azure-ai-textanalytics/azure/ai/textanalytics/_response_handlers.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 226 | 2019-07-24T07:57:21.000Z | 2019-10-15T01:07:24.000Z | sdk/textanalytics/azure-ai-textanalytics/azure/ai/textanalytics/_response_handlers.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 1 | 2020-07-05T21:13:37.000Z | 2020-07-05T21:13:37.000Z | # coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import json
from azure.core.exceptions import (
HttpResponseError,
ClientAuthenticationError,
ODataV4Format
)
from ._models import (
RecognizeEntitiesResult,
CategorizedEntity,
TextDocumentStatistics,
RecognizeLinkedEntitiesResult,
LinkedEntity,
ExtractKeyPhrasesResult,
AnalyzeSentimentResult,
SentenceSentiment,
DetectLanguageResult,
DetectedLanguage,
DocumentError,
SentimentConfidenceScores,
TextAnalyticsError,
TextAnalyticsWarning
)
class CSODataV4Format(ODataV4Format):
def __init__(self, odata_error):
try:
if odata_error["error"]["innererror"]:
super(CSODataV4Format, self).__init__(odata_error["error"]["innererror"])
except KeyError:
super(CSODataV4Format, self).__init__(odata_error)
def process_batch_error(error):
"""Raise detailed error message.
"""
raise_error = HttpResponseError
if error.status_code == 401:
raise_error = ClientAuthenticationError
raise raise_error(response=error.response, error_format=CSODataV4Format)
def order_results(response, combined):
"""Order results in the order the user passed them in.
:param response: Used to get the original documents in the request
:param combined: A combined list of the results | errors
:return: In order list of results | errors (if any)
"""
request = json.loads(response.http_response.request.body)["documents"]
mapping = {item.id: item for item in combined}
ordered_response = [mapping[item["id"]] for item in request]
return ordered_response
def prepare_result(func):
def _get_error_code_and_message(error):
if hasattr(error.error, 'innererror') and error.error.innererror:
return error.error.innererror.code, error.error.innererror.message
return error.error.code, error.error.message
def _deal_with_too_many_documents(response, obj):
# special case for now if there are too many documents in the request
too_many_documents_errors = [
error for error in obj.errors if error.id == ""
]
if too_many_documents_errors:
too_many_documents_error = too_many_documents_errors[0]
response.status_code = 400
response.reason = "Bad Request"
code, message = _get_error_code_and_message(too_many_documents_error)
raise HttpResponseError(
message="({}) {}".format(code, message),
response=response
)
def wrapper(response, obj, response_headers): # pylint: disable=unused-argument
if obj.errors:
_deal_with_too_many_documents(response.http_response, obj)
combined = obj.documents + obj.errors
results = order_results(response, combined)
else:
results = obj.documents
for idx, item in enumerate(results):
if hasattr(item, "error"):
results[idx] = DocumentError(id=item.id, error=TextAnalyticsError._from_generated(item.error)) # pylint: disable=protected-access
else:
results[idx] = func(item)
return results
return wrapper
@prepare_result
def language_result(language):
return DetectLanguageResult(
id=language.id,
primary_language=DetectedLanguage._from_generated(language.detected_language), # pylint: disable=protected-access
warnings=[TextAnalyticsWarning._from_generated(w) for w in language.warnings], # pylint: disable=protected-access
statistics=TextDocumentStatistics._from_generated(language.statistics), # pylint: disable=protected-access
)
@prepare_result
def entities_result(entity):
return RecognizeEntitiesResult(
id=entity.id,
entities=[CategorizedEntity._from_generated(e) for e in entity.entities], # pylint: disable=protected-access
warnings=[TextAnalyticsWarning._from_generated(w) for w in entity.warnings], # pylint: disable=protected-access
statistics=TextDocumentStatistics._from_generated(entity.statistics), # pylint: disable=protected-access
)
@prepare_result
def linked_entities_result(entity):
return RecognizeLinkedEntitiesResult(
id=entity.id,
entities=[LinkedEntity._from_generated(e) for e in entity.entities], # pylint: disable=protected-access
warnings=[TextAnalyticsWarning._from_generated(w) for w in entity.warnings], # pylint: disable=protected-access
statistics=TextDocumentStatistics._from_generated(entity.statistics), # pylint: disable=protected-access
)
@prepare_result
def key_phrases_result(phrases):
return ExtractKeyPhrasesResult(
id=phrases.id,
key_phrases=phrases.key_phrases,
warnings=[TextAnalyticsWarning._from_generated(w) for w in phrases.warnings], # pylint: disable=protected-access
statistics=TextDocumentStatistics._from_generated(phrases.statistics), # pylint: disable=protected-access
)
@prepare_result
def sentiment_result(sentiment):
return AnalyzeSentimentResult(
id=sentiment.id,
sentiment=sentiment.sentiment,
warnings=[TextAnalyticsWarning._from_generated(w) for w in sentiment.warnings], # pylint: disable=protected-access
statistics=TextDocumentStatistics._from_generated(sentiment.statistics), # pylint: disable=protected-access
confidence_scores=SentimentConfidenceScores._from_generated(sentiment.confidence_scores), # pylint: disable=protected-access
sentences=[SentenceSentiment._from_generated(s) for s in sentiment.sentences], # pylint: disable=protected-access
)
| 38.946667 | 146 | 0.704896 |
f5a1e80e6b834b31ef70ead31790ed21892c6154 | 949 | py | Python | doctrans/tests/test_ast_equality.py | danielebra/doctrans | fe15f4883ff37d7a05546ad337c0f6f5431285b7 | [
"Apache-2.0",
"MIT"
] | null | null | null | doctrans/tests/test_ast_equality.py | danielebra/doctrans | fe15f4883ff37d7a05546ad337c0f6f5431285b7 | [
"Apache-2.0",
"MIT"
] | null | null | null | doctrans/tests/test_ast_equality.py | danielebra/doctrans | fe15f4883ff37d7a05546ad337c0f6f5431285b7 | [
"Apache-2.0",
"MIT"
] | null | null | null | """
Tests for AST equality
"""
import ast
from unittest import TestCase
from doctrans.tests.mocks.argparse import argparse_func_ast, argparse_func_str
from doctrans.tests.mocks.classes import class_ast, class_str
from doctrans.tests.utils_for_tests import (
reindent_docstring,
run_ast_test,
unittest_main,
)
class TestAstEquality(TestCase):
"""
Tests whether the AST generated matches the mocked one expected
"""
def test_argparse_func(self) -> None:
""" Tests whether the `argparse_func_str` correctly produces `argparse_func_ast` """
run_ast_test(
self,
*map(
reindent_docstring,
(ast.parse(argparse_func_str).body[0], argparse_func_ast),
)
)
def test_class(self) -> None:
""" Tests whether the `class_str` correctly produces `class_ast` """
run_ast_test(self, class_str, class_ast)
unittest_main()
| 25.648649 | 92 | 0.671233 |
27ae36cbc421f591be8bc5cd1445e01d2aa2419c | 53,999 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/operations/_express_route_circuits_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 3 | 2020-06-23T02:25:27.000Z | 2021-09-07T18:48:11.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/operations/_express_route_circuits_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 510 | 2019-07-17T16:11:19.000Z | 2021-08-02T08:38:32.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/operations/_express_route_circuits_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 5 | 2019-09-04T12:51:37.000Z | 2020-09-16T07:28:40.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitsOperations(object):
"""ExpressRouteCircuitsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
circuit_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
circuit_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCircuit"
"""Gets information about the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuit, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuit
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
parameters, # type: "_models.ExpressRouteCircuit"
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCircuit"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ExpressRouteCircuit')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
circuit_name, # type: str
parameters, # type: "_models.ExpressRouteCircuit"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCircuit"]
"""Creates or updates an express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to the create or update express route circuit operation.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuit
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuit or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuit]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
circuit_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCircuit"
"""Updates an express route circuit tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to update express route circuit tags.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuit, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuit
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def _list_arp_table_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ExpressRouteCircuitsArpTableListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsArpTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._list_arp_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_arp_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
def begin_list_arp_table(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCircuitsArpTableListResult"]
"""Gets the currently advertised ARP table associated with the express route circuit in a resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitsArpTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitsArpTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsArpTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_arp_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_arp_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
def _list_routes_table_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._list_routes_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
def begin_list_routes_table(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCircuitsRoutesTableListResult"]
"""Gets the currently advertised routes table associated with the express route circuit in a
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitsRoutesTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitsRoutesTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsRoutesTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_routes_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
def _list_routes_table_summary_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._list_routes_table_summary_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_summary_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
def begin_list_routes_table_summary(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]
"""Gets the currently advertised routes table summary associated with the express route circuit in
a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitsRoutesTableSummaryListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitsRoutesTableSummaryListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_routes_table_summary_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table_summary.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
def get_stats(
self,
resource_group_name, # type: str
circuit_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCircuitStats"
"""Gets all the stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitStats, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitStats"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get_stats.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/stats'} # type: ignore
def get_peering_stats(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCircuitStats"
"""Gets all stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitStats, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitStats"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get_peering_stats.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_peering_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/stats'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRouteCircuitListResult"]
"""Gets all the express route circuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRouteCircuitListResult"]
"""Gets all the express route circuits in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteCircuits'} # type: ignore
| 50.278399 | 261 | 0.667327 |
7c414250036c9aff481005e97ac393b03b27412b | 2,192 | py | Python | sample.3.py | box-community/samples-docs-authenticate-with-jwt-api | 6bd82bba3fea6701ff54207bf7ff3645aefcfaa4 | [
"Apache-2.0"
] | 6 | 2019-08-14T19:30:30.000Z | 2021-02-22T23:03:39.000Z | sample.3.py | box-community/samples-docs-authenticate-with-jwt-api | 6bd82bba3fea6701ff54207bf7ff3645aefcfaa4 | [
"Apache-2.0"
] | 1 | 2020-04-07T00:55:14.000Z | 2020-04-07T10:29:53.000Z | sample.3.py | box-community/samples-docs-authenticate-with-jwt-api | 6bd82bba3fea6701ff54207bf7ff3645aefcfaa4 | [
"Apache-2.0"
] | 10 | 2018-11-21T22:58:35.000Z | 2021-04-24T17:17:59.000Z | import json
import os
import time
import secrets
import json
import requests
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import load_pem_private_key
import jwt
config = json.load(open('config.json'))
appAuth = config["boxAppSettings"]["appAuth"]
privateKey = appAuth["privateKey"]
passphrase = appAuth["passphrase"]
# To decrypt the private key we use the cryptography library
# (https://cryptography.io/en/latest/)
key = load_pem_private_key(
data=privateKey.encode('utf8'),
password=passphrase.encode('utf8'),
backend=default_backend(),
)
# We will need the authentication_url again later,
# so it is handy to define here
authentication_url = 'https://api.box.com/oauth2/token'
claims = {
'iss': config['boxAppSettings']['clientID'],
'sub': config['enterpriseID'],
'box_sub_type': 'enterprise',
'aud': authentication_url,
# This is an identifier that helps protect against
# replay attacks
'jti': secrets.token_hex(64),
# We give the assertion a lifetime of 45 seconds
# before it expires
'exp': round(time.time()) + 45
}
keyId = config['boxAppSettings']['appAuth']['publicKeyID']
# Rather than constructing the JWT assertion manually, we are
# using the pyjwt library.
assertion = jwt.encode(
claims,
key,
# The API support "RS256", "RS384", and "RS512" encryption
algorithm='RS512',
headers={
'kid': keyId
}
)
params = {
# This specifies that we are using a JWT assertion
# to authenticate
'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
# Our JWT assertion
'assertion': assertion,
# The OAuth 2 client ID and secret
'client_id': config['boxAppSettings']['clientID'],
'client_secret': config['boxAppSettings']['clientSecret']
}
# Make the request, parse the JSON,
# and extract the access token
response = requests.post(authentication_url, params)
access_token = response.json()['access_token']
# # Folder 0 is the root folder for this account
# # and should be empty by default
headers = { 'Authorization': "Bearer %s" % access_token }
response = requests.get('https://api.box.com/2.0/folders/0', headers=headers)
print(response.json()) | 28.102564 | 77 | 0.729927 |
c5b17d607b5b5720dde5fa42e5a12bd34836ee80 | 17,027 | py | Python | test/v4.1/controller.py | gavinIRL/RHBot | 1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf | [
"MIT"
] | null | null | null | test/v4.1/controller.py | gavinIRL/RHBot | 1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf | [
"MIT"
] | 60 | 2021-03-29T14:29:49.000Z | 2021-05-03T06:06:19.000Z | test/v4.1/controller.py | gavinIRL/RHBot | 1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf | [
"MIT"
] | null | null | null | from pynput.keyboard import Key, Listener, KeyCode
from pynput import mouse, keyboard
import time
import os
import pydirectinput as pyautogui
import json
from windowcapture import WindowCapture
from combat_standalone import StandaloneCombat
from moveloot_standalone import StandaloneMoveLoot
from freemove_standalone import StandaloneFreeMove
pyautogui.FAILSAFE = True
class Controller2():
def __init__(self, loot=True, combat=True, freemove=False, rec_pb_only=False) -> None:
os.chdir(os.path.dirname(os.path.abspath(__file__)))
self.mode = "movement"
self.listener = None
self.mouse_listener = None
self.bot_running = False
self.loot_enabled = loot
self.combat_enabled = combat
self.freemove_enabled = freemove
self.playback_input_flag = False
self.playback_ongoing = False
self.playback_string = ""
self.recording_ready = False
self.recording_ongoing = False
self.recorder = Recorder(self)
self.playback = Playback(self)
# This variable is for ignoring any move/combat bot loops
self.rec_pb_only = rec_pb_only
self.movebot = StandaloneMoveLoot(self)
self.combatbat = StandaloneCombat(self)
self.freemovebot = StandaloneFreeMove(self)
self.combat_cooldown = 0
with open("gamename.txt") as f:
gamename = f.readline()
self.game_wincap = WindowCapture(gamename)
def start_controller(self):
self.start_countdown()
self.start_mouse_listener()
self.start_keypress_listener()
while self.bot_running:
# First check if any playback or record flags are on
if self.perform_record_playback_checks():
pass
# Otherwise if this is rec/pb only then skip the usual logic
elif self.rec_pb_only:
time.sleep(0.5)
# Then continue with the usual loop
# Shouldn't need to stop everything
# As shouldn't be in a dungeon when doing this
else:
if self.mode == "movement":
if not self.freemove_enabled:
self.movebot.move_mainloop()
else:
self.freemovebot.freemove_mainloop()
elif self.mode == "combat":
if (time.time() - self.combat_cooldown) > 0:
self.combatbat.combat_mainloop()
else:
print("Error, no mode selected")
time.sleep(2)
def perform_record_playback_checks(self):
if self.playback_ongoing:
# This is when the playback gets called
self.playback.playActions(self.playback_string+".json")
time.sleep(0.5)
self.playback_ongoing = False
return True
elif self.playback_input_flag:
# This is for when inputting the details
time.sleep(0.5)
return True
elif self.recording_ready:
# Start recording
time.sleep(3)
print("Now recording!")
self.recording_ongoing = True
self.recording_ready = False
self.recorder.start_time = time.time()
return True
elif self.recording_ongoing:
# This is to allow recording to go on
time.sleep(0.5)
return True
def start_mouse_listener(self):
self.mouse_listener = mouse.Listener(
on_click=self.on_click)
self.mouse_listener.start()
self.mouse_listener.wait() # Need to test if this is required
def start_keypress_listener(self):
if self.listener == None:
self.listener = Listener(on_press=self.on_press,
on_release=self.on_release)
self.listener.start()
def on_press(self, key):
if self.recording_ongoing:
# Log the event if not the end key
if key == keyboard.Key.f12:
self.recording_ongoing = False
self.recorder.write_recording_to_file()
print("Finished recording #{}".format(
self.recorder.dest_file_count))
else:
if key not in self.recorder.unreleased_keys:
self.recorder.unreleased_keys.append(key)
try:
self.recorder.record_event(
EventType.KEYDOWN, self.recorder.elapsed_time(), key.char)
except AttributeError:
self.recorder.record_event(
EventType.KEYDOWN, self.recorder.elapsed_time(), key)
elif self.playback_ongoing:
# Do nothing
pass
elif self.playback_input_flag:
# add the key to existing string
# This is causing a bug if press escape
if key == keyboard.Key.f8:
self.playback_input_flag = not self.playback_input_flag
print("Starting playback of #"+self.playback_string)
self.playback_ongoing = True
else:
try:
self.playback_string += str(key.char)
except:
pass
elif key == keyboard.Key.f8:
# This can only be reached if not entering playback number
self.playback_input_flag = not self.playback_input_flag
self.playback_string = ""
print("Select a recording number")
elif key == keyboard.Key.f10:
print("Starting recording in 3 seconds")
self.recording_ready = True
elif key == keyboard.Key.f5:
self.loot_enabled = not self.loot_enabled
if self.loot_enabled:
print("LOOT ON")
else:
print("LOOT OFF")
elif key == keyboard.Key.f6:
self.combat_enabled = not self.combat_enabled
if self.combat_enabled:
print("COMBAT ON")
else:
print("COMBAT OFF")
elif key == keyboard.Key.f7:
self.freemove_enabled = not self.freemove_enabled
if self.freemove_enabled:
print("FREEMOVE ON")
else:
print("FREEMOVE OFF")
def on_release(self, key):
# Need to have an exit recording or playback only button (=?)
if self.recording_ongoing:
if key == keyboard.Key.f11:
# Wipe all the collected data
self.recording_ongoing = False
self.recorder.unreleased_keys = []
self.recorder.input_events = []
else:
try:
self.recorder.unreleased_keys.remove(key)
except ValueError:
print('ERROR: {} not in unreleased_keys'.format(key))
try:
self.recorder.record_event(
EventType.KEYUP, self.recorder.elapsed_time(), key.char)
except AttributeError:
self.recorder.record_event(
EventType.KEYUP, self.recorder.elapsed_time(), key)
if self.playback_ongoing:
if key == keyboard.Key.f11:
# find a way to stop the action playback
self.playback_ongoing = False
# Now need to release all keys while waiting for the
# playback to catch up
self.remove_all_keypresses()
if key == keyboard.Key.f4:
self.bot_running = False
# self.combatbat.running = False
# Need to pause for 1 second and then clear all keypresses
time.sleep(0.5)
# self.combatbat.remove_all_keypresses()
print("Exiting bot")
os._exit(1)
def on_click(self, x, y, button, pressed):
# when pressed is False, that means it's a release event.
# let's listen only to mouse click releases
if self.recording_ongoing:
if not pressed:
# Need to get the ratio compared to window top left
# This will allow common usage on other size monitors
xratio, yratio = self.convert_click_to_ratio(x, y)
self.recorder.record_event(
EventType.CLICK, self.recorder.elapsed_time(), button, (xratio, yratio))
def start_countdown(self):
print("Bot starting in 3 seconds")
time.sleep(1)
print("Bot starting in 2 seconds")
time.sleep(1)
print("Bot starting in 1 seconds")
time.sleep(1)
self.bot_running = True
def remove_all_keypresses(self):
for key in ["up", "down", "left", "right"]:
pyautogui.keyUp(key)
for key in ["a", "s", "d", "f", "g", "h"]:
pyautogui.keyUp(key)
def convert_click_to_ratio(self, truex, truey):
# This will grab the current rectangle coords of game window
# and then turn the click values into a ratio of positions
# versus the game window
self.game_wincap.update_window_position(border=False)
# Turn the screen pos into window pos
relx = truex - self.game_wincap.window_rect[0] * 1.5
rely = truey - self.game_wincap.window_rect[1] * 1.5
# print("relx={}, rely={}".format(relx, rely))
# print("winx={}, winy={}".format(
# self.game_wincap.window_rect[0], self.game_wincap.window_rect[1]))
# print("winwidth={}".format(self.game_wincap.w))
# Then convert to a ratio
ratx = relx/(self.game_wincap.w*1.5)
raty = rely/(self.game_wincap.h*1.5)
return ratx, raty
def convert_ratio_to_click(self, ratx, raty):
# This will grab the current rectangle coords of game window
# and then turn the ratio of positions versus the game window
# into true x,y coords
self.game_wincap.update_window_position(border=False)
# Turn the ratios into relative
relx = int(ratx * self.game_wincap.w)
rely = int(raty * self.game_wincap.h)
# Turn the relative into true
truex = int((relx + self.game_wincap.window_rect[0]) * 1.5)
truey = int((rely + self.game_wincap.window_rect[1]) * 1.5)
return truex, truey
def convert_pynput_to_pag(self, button):
PYNPUT_SPECIAL_CASE_MAP = {
'alt_l': 'altleft',
'alt_r': 'altright',
'alt_gr': 'altright',
'caps_lock': 'capslock',
'ctrl_l': 'ctrlleft',
'ctrl_r': 'ctrlright',
'page_down': 'pagedown',
'page_up': 'pageup',
'shift_l': 'shiftleft',
'shift_r': 'shiftright',
'num_lock': 'numlock',
'print_screen': 'printscreen',
'scroll_lock': 'scrolllock',
}
# example: 'Key.F9' should return 'F9', 'w' should return as 'w'
cleaned_key = button.replace('Key.', '')
if cleaned_key in PYNPUT_SPECIAL_CASE_MAP:
return PYNPUT_SPECIAL_CASE_MAP[cleaned_key]
return cleaned_key
class EventType():
KEYDOWN = 'keyDown'
KEYUP = 'keyUp'
CLICK = 'click'
class Recorder():
def __init__(self, controller: Controller2) -> None:
self.controller = controller
# declare mouse_listener globally so that keyboard on_release can stop it
self.mouse_listener = controller.listener
# declare our start_time globally so that the callback functions can reference it
self.start_time = None
# keep track of unreleased keys to prevent over-reporting press events
self.unreleased_keys = []
# storing all input events
self.input_events = []
self.dest_file_count = 0
def elapsed_time(self):
return time.time() - self.start_time
def write_recording_to_file(self):
# Here will write to the json file
# write the output to a file
script_dir = os.path.dirname(__file__)
dest_dir = os.path.join(
script_dir,
'recordings')
# Now get the number of files in recordings folder already
_, _, files = next(os.walk(dest_dir))
self.dest_file_count = len(files) + 1
dest_file_name = str(self.dest_file_count)
filepath = os.path.join(
dest_dir,
'{}.json'.format(dest_file_name)
)
with open(filepath, 'w') as outfile:
json.dump(self.input_events, outfile, indent=4)
def record_event(self, event_type, event_time, button, pos=None):
self.input_events.append({
'time': event_time,
'type': event_type,
'button': str(button),
'pos': pos
})
if event_type == EventType.CLICK:
pass
# print('{} on {} pos {} at {}'.format(
# event_type, button, pos, event_time))
else:
pass
# print('{} on {} at {}'.format(event_type, button, event_time))
class Playback():
def __init__(self, controller: Controller2) -> None:
self.controller = controller
def move_mouse_centre(self):
pyautogui.moveTo(900, 500, 0.05)
def playActions(self, filename):
# The usual logic here
# Only difference is that first thing need to do is sleep for 3 seconds
print("Starting playback in 2 seconds")
time.sleep(2)
print("Starting playback")
# and then will move the mouse to stop any flow/mwb problems
self.move_mouse_centre()
# And then continue
script_dir = os.path.dirname(__file__)
filepath = os.path.join(
script_dir,
'recordings',
filename
)
with open(filepath, 'r') as jsonfile:
# parse the json
data = json.load(jsonfile)
# loop over each action
# Because we are not waiting any time before executing the first action, any delay before the initial
# action is recorded will not be reflected in the playback.
for index, action in enumerate(data):
action_start_time = time.time()
# Need to exit if the terminate key is pressed
if not self.controller.playback_ongoing:
break
# look for escape input to exit
# if action['button'] == 'Key.esc':
# break
# perform the action
if action['type'] == 'keyDown':
key = self.controller.convert_pynput_to_pag(
action['button'])
pyautogui.keyDown(key)
# print("keyDown on {}".format(key))
elif action['type'] == 'keyUp':
key = self.controller.convert_pynput_to_pag(
action['button'])
pyautogui.keyUp(key)
# print("keyUp on {}".format(key))
elif action['type'] == 'click':
# To-do: need to convert ratio into actual positions
# print("ratiox={}".format(action['pos'][0]), end='')
# print(" ratioy={}".format(action['pos'][1]))
# print("Action pos = {}".format(action["pos"]))
x, y = self.controller.convert_ratio_to_click(action['pos'][0],
action['pos'][1])
# print("x={}, y={}".format(x, y))
# print("truex={}".format(x), end='')
# print(" truey={}".format(y))
x = int(x/1.5)
y = int(y/1.5)
pyautogui.click(x, y, duration=0.15)
# print("click on {}".format(action['pos']))
# then sleep until next action should occur
try:
next_action = data[index + 1]
except IndexError:
# this was the last action in the list
break
elapsed_time = next_action['time'] - action['time']
# if elapsed_time is negative, that means our actions are not ordered correctly. throw an error
if elapsed_time < 0:
raise Exception('Unexpected action ordering.')
# adjust elapsed_time to account for our code taking time to run
elapsed_time -= (time.time() - action_start_time)
if elapsed_time < 0:
elapsed_time = 0
# print('sleeping for {}'.format(elapsed_time))
time.sleep(elapsed_time)
print("Finished playback")
if __name__ == "__main__":
cont = Controller2()
cont.start_controller()
| 38.697727 | 113 | 0.55782 |
91c3e6284e4cd61539435deb60531a2199bdec3a | 1,603 | py | Python | onnxmltools/convert/sparkml/shape_calculators/LogisticRegression.py | stevenlix/onnxmltools | e0953d90f99d076658ed76a6fd6ed51610786e79 | [
"MIT"
] | null | null | null | onnxmltools/convert/sparkml/shape_calculators/LogisticRegression.py | stevenlix/onnxmltools | e0953d90f99d076658ed76a6fd6ed51610786e79 | [
"MIT"
] | null | null | null | onnxmltools/convert/sparkml/shape_calculators/LogisticRegression.py | stevenlix/onnxmltools | e0953d90f99d076658ed76a6fd6ed51610786e79 | [
"MIT"
] | null | null | null | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from ...common.data_types import Int64TensorType, DictionaryType, SequenceType, FloatTensorType
from ...common._registration import register_shape_calculator
from ...common.utils import check_input_and_output_numbers, check_input_and_output_types
def calculate_logistic_regression_output_shapes(operator):
'''
This operator maps an input feature vector into a scalar label if the number of outputs is one. If two outputs
appear in this operator's output list, we should further generate a map storing all classes' probabilities.
Allowed input/output patterns are
1. [N, C] ---> [N, 1], A sequence of map
'''
class_count = operator.raw_operator.numClasses
check_input_and_output_numbers(operator, input_count_range=1, output_count_range=[1, class_count])
check_input_and_output_types(operator, good_input_types=[FloatTensorType, Int64TensorType])
if len(operator.inputs[0].type.shape) != 2:
raise RuntimeError('Input must be a [N, C]-tensor')
N = operator.inputs[0].type.shape[0]
operator.outputs[0].type = Int64TensorType(shape=[N])
operator.outputs[1].type = FloatTensorType([N,class_count])
register_shape_calculator('pyspark.ml.classification.LogisticRegressionModel', calculate_logistic_regression_output_shapes)
| 50.09375 | 123 | 0.699314 |
2d151ecbb03d635d10bfb1b85d891ec411f491ba | 2,321 | py | Python | find_social_media_links.py | ezzieyguywuf/gsscrapp | d77fa53b1b0f790549fb6abab143600045d4ce1e | [
"MIT"
] | null | null | null | find_social_media_links.py | ezzieyguywuf/gsscrapp | d77fa53b1b0f790549fb6abab143600045d4ce1e | [
"MIT"
] | null | null | null | find_social_media_links.py | ezzieyguywuf/gsscrapp | d77fa53b1b0f790549fb6abab143600045d4ce1e | [
"MIT"
] | null | null | null | import sys
import requests
from bs4 import BeautifulSoup
import re
def updateListOfLinks(links, link):
"""
This will modify the links list in-place
"""
href = link.get('href')
if href[:4] == "http" and not href in links:
links.append(href)
def getLinks(url):
"""
Will scrape the given url, and return any social media links within
"""
print("Fetching content from " + url)
timeout = 5
try:
response = requests.get(url, timeout=timeout)
except requests.exceptions.ReadTimeout:
print(" timed out after {} secs".format(timeout))
return {"facebook": "timed out", "twitter": "timed out"}
except requests.exceptions.SSLError:
return {"facebook": "SSL error", "twitter": "SSL error"}
soup = BeautifulSoup(response.content, 'html.parser')
facebookLinks = []
for link in soup.find_all(href=re.compile("facebook")):
updateListOfLinks(facebookLinks, link)
twitterLinks = []
for link in soup.find_all(href=re.compile("twitter")):
updateListOfLinks(twitterLinks, link)
if len(facebookLinks) == 1 and len(twitterLinks) == 1:
return {"facebook": facebookLinks[0], "twitter": twitterLinks[0]}
# print("for url = " + url)
if len(facebookLinks) > 1 and len(twitterLinks) == 1:
# print(" WARNING! more than one facebook link found")
return {"facebook": ",".join(facebookLinks), "twitter": twitterLinks[0]}
elif len(facebookLinks) == 1 and len(twitterLinks) > 1:
# print(" WARNING! more than one twitter link found")
return {"facebook": facebookLinks[0], "twitter": ",".join(twitterLinks)}
elif len(facebookLinks) > 1 and len(twitterLinks) > 1:
# print(" WARNING! more than one facebook and twitter link found")
return {"facebook": ",".join(facebookLinks), "twitter": ",".join(twitterLinks)}
elif len(soup.find_all(id = re.compile("captcha"))) > 0:
return {"facebook": "captcha", "twitter": "captcha"}
else:
return {"facebook": "could not find", "twitter": "could not find"}
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Please provide a URL of a website where you'd like me to find social media links")
sys.exit(1)
url = sys.argv[1]
print(getLinks(url))
| 37.435484 | 97 | 0.63464 |
2821d5433bcf2d45ba872a3ab011432bd7386cf6 | 315 | py | Python | rasa_learn/action.py | wangjianhua1/rasa_demo | 3f81c746cec1f522a6454f42d613eafb1a564457 | [
"Apache-2.0"
] | null | null | null | rasa_learn/action.py | wangjianhua1/rasa_demo | 3f81c746cec1f522a6454f42d613eafb1a564457 | [
"Apache-2.0"
] | null | null | null | rasa_learn/action.py | wangjianhua1/rasa_demo | 3f81c746cec1f522a6454f42d613eafb1a564457 | [
"Apache-2.0"
] | null | null | null | from rasa_core_sdk import Action
from rasa_core_sdk.events import SlotSet
# 继承Action
class ActionAskWeather(Action):
def name(self):
return 'action_ask_weather'
def run(self, dispatcher, tracker, domain):
dispatcher.utter_message(f'您访问的天气地点是哪里呢')
return [SlotSet(('city', '武汉'))]
| 24.230769 | 49 | 0.707937 |
7f60efce09e831149204bf00c6dc29f00fab7131 | 490 | py | Python | corehq/apps/cachehq/invalidate.py | dslowikowski/commcare-hq | ad8885cf8dab69dc85cb64f37aeaf06106124797 | [
"BSD-3-Clause"
] | 1 | 2015-02-10T23:26:39.000Z | 2015-02-10T23:26:39.000Z | corehq/apps/cachehq/invalidate.py | SEL-Columbia/commcare-hq | 992ee34a679c37f063f86200e6df5a197d5e3ff6 | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/cachehq/invalidate.py | SEL-Columbia/commcare-hq | 992ee34a679c37f063f86200e6df5a197d5e3ff6 | [
"BSD-3-Clause"
] | null | null | null | from corehq.pillows import cacheinvalidate
cache_pillow = cacheinvalidate.CacheInvalidatePillow()
def invalidate_document(document, deleted=False):
"""
Invalidates a document in the cached_core caching framework.
"""
# this is a hack that use the caching pillow invalidation that was intended to be
# rolled out to track this globally.
cache_pillow.change_trigger({
'doc': document.to_json(),
'id': document._id,
'deleted': deleted,
})
| 30.625 | 85 | 0.702041 |
cbced80c1ab29ed07b1d19bc232c0a1b14f77eaf | 5,136 | py | Python | qa/rpc-tests/importprunedfunds.py | platincoin-project/platincoin | e3c4cfc90c37293c1dce8a829b5eb64c587fd33d | [
"MIT"
] | 17 | 2018-06-11T08:21:39.000Z | 2022-01-10T21:08:21.000Z | qa/rpc-tests/importprunedfunds.py | cryptoraver/platincoin-DPOS-Core_wallet | 07d5bff1eb1b4ec7e3d44ecd2a84e69346fbb8f9 | [
"MIT"
] | 2 | 2021-05-08T11:13:07.000Z | 2021-05-09T12:39:45.000Z | qa/rpc-tests/importprunedfunds.py | coinnoob/platincoin_scam | 2584ff0d47e557c976613ee6271521ae30a53fc6 | [
"MIT"
] | 9 | 2018-06-11T08:21:45.000Z | 2021-05-07T20:09:55.000Z | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class ImportPrunedFundsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
self.is_network_split=False
self.sync_all()
def run_test(self):
print("Mining blocks...")
self.nodes[0].generate(10)
self.nodes[0].generate(110)
self.sync_all()
# address
address1 = self.nodes[0].getnewaddress()
# pubkey
address2 = self.nodes[0].getnewaddress()
address2_pubkey = self.nodes[0].validateaddress(address2)['pubkey'] # Using pubkey
# privkey
address3 = self.nodes[0].getnewaddress()
address3_privkey = self.nodes[0].dumpprivkey(address3) # Using privkey
#Check only one address
address_info = self.nodes[0].validateaddress(address1)
assert_equal(address_info['ismine'], True)
self.sync_all()
#Node 1 sync test
assert_equal(self.nodes[1].getblockcount(),120)
#Address Test - before import
address_info = self.nodes[1].validateaddress(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address2)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
#Send funds to self
txnid1 = self.nodes[0].sendtoaddress(address1, 100)
self.nodes[0].generate(1)
rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex']
proof1 = self.nodes[0].gettxoutproof([txnid1])
txnid2 = self.nodes[0].sendtoaddress(address2, 50)
self.nodes[0].generate(1)
rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex']
proof2 = self.nodes[0].gettxoutproof([txnid2])
txnid3 = self.nodes[0].sendtoaddress(address3, 25)
self.nodes[0].generate(1)
rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex']
proof3 = self.nodes[0].gettxoutproof([txnid3])
self.sync_all()
self.nodes[0].generate(10)
self.sync_all()
#Import with no affiliated address
try:
self.nodes[1].importprunedfunds(rawtxn1, proof1)
except JSONRPCException as e:
assert('No addresses' in e.error['message'])
else:
assert(False)
balance1 = self.nodes[1].getbalance("", 0, True)
assert_equal(balance1, Decimal(0))
#Import with affiliated address with no rescan
self.nodes[1].importaddress(address2, "add2", False)
result2 = self.nodes[1].importprunedfunds(rawtxn2, proof2)
balance2 = self.nodes[1].getbalance("add2", 0, True)
assert_equal(balance2, Decimal('50'))
#Import with private key with no rescan
self.nodes[1].importprivkey(address3_privkey, "add3", False)
result3 = self.nodes[1].importprunedfunds(rawtxn3, proof3)
balance3 = self.nodes[1].getbalance("add3", 0, False)
assert_equal(balance3, Decimal('25'))
balance3 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance3, Decimal('75'))
#Addresses Test - after import
address_info = self.nodes[1].validateaddress(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address2)
assert_equal(address_info['iswatchonly'], True)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], True)
#Remove transactions
try:
self.nodes[1].removeprunedfunds(txnid1)
except JSONRPCException as e:
assert('does not exist' in e.error['message'])
else:
assert(False)
balance1 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance1, Decimal('75'))
self.nodes[1].removeprunedfunds(txnid2)
balance2 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance2, Decimal('25'))
self.nodes[1].removeprunedfunds(txnid3)
balance3 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance3, Decimal('0.0'))
if __name__ == '__main__':
ImportPrunedFundsTest().main()
| 37.217391 | 107 | 0.640576 |
862f643b179be846a09601b77464ffe53c7f9844 | 1,518 | py | Python | tests/unit/test_parser.py | viktorkertesz/netutils | cf6541a25dd918d377ad34562826dc95ec4bfaea | [
"Apache-2.0"
] | null | null | null | tests/unit/test_parser.py | viktorkertesz/netutils | cf6541a25dd918d377ad34562826dc95ec4bfaea | [
"Apache-2.0"
] | null | null | null | tests/unit/test_parser.py | viktorkertesz/netutils | cf6541a25dd918d377ad34562826dc95ec4bfaea | [
"Apache-2.0"
] | null | null | null | """Test for the network os parser functions."""
import glob
import os
import pytest
from netutils.config import compliance
MOCK_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "mock", "config", "parser")
TXT_FILE = "_sent.txt"
parameters = []
for network_os in list(compliance.parser_map.keys()):
for _file in glob.glob(f"{MOCK_DIR}/{network_os}/*{TXT_FILE}"):
parameters.append([_file, network_os])
@pytest.mark.parametrize("_file, network_os", parameters)
def test_parser(_file, network_os, get_text_data, get_python_data): # pylint: disable=redefined-outer-name
truncate_file = os.path.join(MOCK_DIR, _file[: -len(TXT_FILE)])
device_cfg = get_text_data(os.path.join(MOCK_DIR, _file))
received_data = get_python_data(truncate_file + "_received.py", "data")
os_parser = compliance.parser_map[network_os]
assert os_parser(device_cfg).config_lines == received_data
def test_incorrect_banner_ios():
banner_cfg = (
"aaa new-model\n"
"!\n"
"banner exec c\n"
"**************************************************************************\n"
"* IOSv is strictly limited to use for evaluation, demonstration and IOS. *\n"
"**************************************************************************c\n"
"!\n"
"ip route 0.0.0.0 0.0.0.0 192.168.1.1\n"
)
with pytest.raises(ValueError):
compliance.parser_map["cisco_ios"](banner_cfg).config_lines # pylint: disable=expression-not-assigned
| 37.95 | 110 | 0.62253 |
bd859b3c8708c0d7ab9c8a6c8fb87400c3156f81 | 1,607 | py | Python | examples/flip_mesh_normals.py | ScanMountGoat/ssbh_data_py | f4bb5c0fb0b27f3f791842518a0ae6554fcb318a | [
"MIT"
] | 3 | 2021-04-04T04:16:58.000Z | 2022-02-01T18:42:14.000Z | examples/flip_mesh_normals.py | ScanMountGoat/ssbh_data_py | f4bb5c0fb0b27f3f791842518a0ae6554fcb318a | [
"MIT"
] | 34 | 2021-05-01T15:12:38.000Z | 2022-03-24T01:59:50.000Z | examples/flip_mesh_normals.py | ScanMountGoat/ssbh_data_py | f4bb5c0fb0b27f3f791842518a0ae6554fcb318a | [
"MIT"
] | null | null | null | # This script demonstrates how to flip normals and reverse winding order.
# Winding order primarily affects face culling.
# If normals on a model appear "flipped" or inside out, this script may help.
# Ideally, these should both be fixed in a 3D modeling program before exporting.
import ssbh_data_py
# It may not be necessary to reverse both normals and winding order in all cases.
reverse_winding_order = True
flip_normals = False
mesh = ssbh_data_py.mesh_data.read_mesh("model.numshb")
for object in mesh.objects:
# Reverse the winding order of each triangle face.
if reverse_winding_order:
for i in range(0, len(object.vertex_indices), 3):
[v0, v1, v2] = object.vertex_indices[i:i+3]
object.vertex_indices[i:i+3] = [v0, v2, v1]
# Flip the normals and related vectors.
if flip_normals:
for normal in object.normals:
normal.data = [[-x, -y, -z, 0.0] for [x, y, z, _] in normal.data]
# normal.data = [[-x, -y, -z] for [x, y, z] in normal.data] # 3-component normals
# Some models may also have tangents and binormals (bitangents) to flip as well.
for binormal in object.binormals:
binormal.data = [[-x, -y, -z] for [x, y, z] in binormal.data]
# binormal.data = [[-x, -y, -z, 0.0] for [x, y, z, _] in binormal.data] # 4-component binormals
for tangent in object.tangents:
tangent.data = [[-x, -y, -z, w] for [x, y, z, w] in tangent.data]
# tangent.data = [[-x, -y, -z] for [x, y, z] in tangent.data] # 3-component tangents
mesh.save("model.numshb")
| 45.914286 | 107 | 0.643435 |
12de1361ca429aeedc0d48f047e3f9d2b2410871 | 333 | py | Python | inheritance_exercise/players_and_monsters/project/main.py | Veselin-Stoilov/software-university-OOP | 452a77cabf2e7d93f30f629c67c6b22682eb255d | [
"MIT"
] | null | null | null | inheritance_exercise/players_and_monsters/project/main.py | Veselin-Stoilov/software-university-OOP | 452a77cabf2e7d93f30f629c67c6b22682eb255d | [
"MIT"
] | null | null | null | inheritance_exercise/players_and_monsters/project/main.py | Veselin-Stoilov/software-university-OOP | 452a77cabf2e7d93f30f629c67c6b22682eb255d | [
"MIT"
] | null | null | null | from inheritance_exercise.players_and_monsters.project.elf import Elf
from inheritance_exercise.players_and_monsters.project.hero import Hero
hero = Hero("H", 4)
print(hero.username)
print(hero.level)
print(str(hero))
elf = Elf("E", 4)
print(str(elf))
print(elf.__class__.__bases__[0].__name__)
print(elf.username)
print(elf.level)
| 25.615385 | 71 | 0.792793 |
c23f3aa27532502b0b12dadd3d2569f61457166f | 20,154 | py | Python | examples/nlp/question_answering/question_answering_squad.py | aasseman/NeMo | 78b6befb05838f2e8917aee7654cb2c1bc560ecc | [
"Apache-2.0"
] | 1 | 2020-08-04T08:29:41.000Z | 2020-08-04T08:29:41.000Z | examples/nlp/question_answering/question_answering_squad.py | aasseman/NeMo | 78b6befb05838f2e8917aee7654cb2c1bc560ecc | [
"Apache-2.0"
] | null | null | null | examples/nlp/question_answering/question_answering_squad.py | aasseman/NeMo | 78b6befb05838f2e8917aee7654cb2c1bc560ecc | [
"Apache-2.0"
] | null | null | null | # =============================================================================
# Copyright 2020 NVIDIA. All Rights Reserved.
# Copyright 2018 The Google AI Language Team Authors and
# The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""
Some transformer of this code were adapted from the HuggingFace library at
https://github.com/huggingface/transformers
Download the SQuAD data by running the script:
examples/nlp/question_answering/get_squad.py
To finetune SQuADv1.1 on pretrained BERT Base uncased on 1 GPU:
python question_answering_squad.py
--train_file /path_to_data_dir/squad/v1.1/train-v1.1.json
--eval_file /path_to_data_dir/squad/v1.1/dev-v1.1.json
--work_dir /path_to_output_folder
--bert_config /path_to/bert-config.json
--pretrained_model_name bert-base-uncased
--bert_checkpoint /path_to_bert_checkpoint
--amp_opt_level "O2"
--batch_size 24
--num_epochs 2
--lr_policy WarmupAnnealing
--optimizer fused_adam
--lr 3e-5
--do_lower_case
--mode train_eval
--no_data_cache
If --bert_checkpoint and --bert_config are not specified, training starts from
Huggingface pretrained checkpoints.
To finetune SQuADv1.1 on pretrained BERT large uncased on 8 GPU change to:
python -m torch.distributed.launch --nproc_per_node=8 question_answering_squad.py
--batch_size 3
--num_gpus 8
...
This takes about 18 minutes.
To finetune on SQuADv2.0 which allows non-answerable questions, add the flag --version_2_with_negative.
To run only evaluation on pretrained question answering checkpoints on 1 GPU with ground-truth data:
python question_answering_squad.py
--eval_file /path_to_data_dir/test.json
--checkpoint_dir /path_to_checkpoints
--mode eval
To run only inference on pretrained question answering checkpoints on 1 GPU without ground-truth data:
python question_answering_squad.py
--test_file /path_to_data_dir/test.json
--checkpoint_dir /path_to_checkpoints
--mode test
Finetuned SQuAD models and model configuration files can be found at
https://ngc.nvidia.com/catalog/models/nvidia:bertlargeuncasedsquadv1
https://ngc.nvidia.com/catalog/models/nvidia:bertlargeuncasedsquadv2
https://ngc.nvidia.com/catalog/models/nvidia:bertbaseuncasedsquadv1
https://ngc.nvidia.com/catalog/models/nvidia:bertbaseuncasedsquadv2
On BERT base uncased pretrained model
the final Exact Match (EM) and F1 scores are as follows:
Data EM F1
SQuADv1.1 82.74 89.79
SQuADv2.0 71.24 74.32
On BERT large uncased pretrained model
the final Exact Match (EM) and F1 scores are as follows:
Data EM F1
SQuADv1.1 85.79 92.28
SQuADv2.0 80.17 83.32
"""
import argparse
import json
import os
import numpy as np
import nemo.collections.nlp as nemo_nlp
import nemo.collections.nlp.data.tokenizers.tokenizer_utils
import nemo.core as nemo_core
from nemo import logging
from nemo.collections.nlp.callbacks.qa_squad_callback import eval_epochs_done_callback, eval_iter_callback
from nemo.utils.lr_policies import get_lr_policy
def parse_args():
parser = argparse.ArgumentParser(description="Squad_with_pretrained_BERT")
parser.add_argument(
"--train_file", type=str, help="The training data file. Should be *.json",
)
parser.add_argument(
"--eval_file", type=str, help="The evaluation data file. Should be *.json",
)
parser.add_argument(
"--test_file", type=str, help="The test data file. Should be *.json. Does not need to contain ground truth",
)
parser.add_argument(
'--pretrained_model_name',
default='roberta-base',
type=str,
help='Name of the pre-trained model',
choices=nemo_nlp.nm.trainables.get_pretrained_lm_models_list(),
)
parser.add_argument("--checkpoint_dir", default=None, type=str, help="Checkpoint directory for inference.")
parser.add_argument(
"--bert_checkpoint", default=None, type=str, help="Path to BERT encoder checkpoint for finetuning."
)
parser.add_argument(
"--head_checkpoint", default=None, type=str, help="Path to BERT QA head checkpoint for finetuning."
)
parser.add_argument("--bert_config", default=None, type=str, help="Path to bert config file in json format")
parser.add_argument(
"--tokenizer_model",
default=None,
type=str,
help="Path to pretrained tokenizer model, only used if --tokenizer is sentencepiece",
)
parser.add_argument(
"--tokenizer",
default="nemobert",
type=str,
choices=["nemobert", "sentencepiece"],
help="tokenizer to use, only relevant when using custom pretrained checkpoint.",
)
parser.add_argument("--optimizer", default="adam_w", type=str, help="Optimizer kind")
parser.add_argument("--vocab_file", default=None, help="Path to the vocab file.")
parser.add_argument("--lr_policy", default="WarmupAnnealing", type=str)
parser.add_argument("--lr", default=3e-5, type=float, help="The initial learning rate.")
parser.add_argument("--lr_warmup_proportion", default=0.0, type=float)
parser.add_argument("--weight_decay", default=0.0, type=float, help="Weight deay if we apply some.")
parser.add_argument("--num_epochs", default=2, type=int, help="Total number of training epochs to perform.")
parser.add_argument("--max_steps", default=-1, type=int, help="If specified overrides --num_epochs.")
parser.add_argument("--batch_size", default=8, type=int, help="Batch size per GPU/CPU for training/evaluation.")
parser.add_argument("--grad_norm_clip", type=float, default=-1, help="gradient clipping")
parser.add_argument(
"--do_lower_case",
action='store_true',
help="Whether to lower case the input text. True for uncased models, False for cased models.",
)
parser.add_argument(
"--mode", default="train_eval", choices=["train", "train_eval", "eval", "test"], help="Mode of model usage."
)
parser.add_argument(
"--no_data_cache", action='store_true', help="When specified do not load and store cache preprocessed data.",
)
parser.add_argument(
"--doc_stride",
default=128,
type=int,
help="When splitting up a long document into chunks, how much stride to take between chunks.",
)
parser.add_argument(
"--max_query_length",
default=64,
type=int,
help="The maximum number of tokens for the question. "
"Questions longer than this will be truncated to "
"this length.",
)
parser.add_argument(
"--max_seq_length",
default=384,
type=int,
help="The maximum total input sequence length after "
"WordPiece tokenization. Sequences longer than this "
"will be truncated, and sequences shorter than this "
" will be padded.",
)
parser.add_argument("--num_gpus", default=1, type=int, help="Number of GPUs")
parser.add_argument(
"--amp_opt_level", default="O0", type=str, choices=["O0", "O1", "O2"], help="01/02 to enable mixed precision"
)
parser.add_argument("--local_rank", type=int, default=None, help="For distributed training: local_rank")
parser.add_argument(
"--work_dir",
default='output_squad',
type=str,
help="The output directory where the model predictions and checkpoints will be written.",
)
parser.add_argument(
"--save_epoch_freq",
default=1,
type=int,
help="Frequency of saving checkpoint '-1' - epoch checkpoint won't be saved",
)
parser.add_argument(
"--save_step_freq",
default=-1,
type=int,
help="Frequency of saving checkpoint '-1' - epoch checkpoint won't be saved",
)
parser.add_argument("--train_step_freq", default=100, type=int, help="Frequency of printing training loss")
parser.add_argument(
"--eval_step_freq", default=500, type=int, help="Frequency of evaluation during training on evaluation data"
)
parser.add_argument(
"--version_2_with_negative",
action="store_true",
help="If true, the examples contain some that do not have an answer.",
)
parser.add_argument(
'--null_score_diff_threshold',
type=float,
default=0.0,
help="If null_score - best_non_null is greater than the threshold predict null.",
)
parser.add_argument(
"--n_best_size", default=20, type=int, help="The total number of n-best predictions to generate at testing.",
)
parser.add_argument("--batches_per_step", default=1, type=int, help="Number of iterations per step.")
parser.add_argument(
"--max_answer_length",
default=30,
type=int,
help="The maximum length of an answer that can be "
"generated. This is needed because the start "
"and end predictions are not conditioned "
"on one another.",
)
parser.add_argument(
"--output_prediction_file",
type=str,
required=False,
default="predictions.json",
help="File to write predictions to. Only in evaluation or test mode.",
)
parser.add_argument(
"--output_nbest_file",
type=str,
required=False,
default="nbest.json",
help="File to write nbest predictions to. Only in evaluation or test mode.",
)
args = parser.parse_args()
return args
def create_pipeline(
data_file,
model,
head,
max_query_length,
max_seq_length,
doc_stride,
batch_size,
version_2_with_negative,
mode,
num_gpus=1,
batches_per_step=1,
loss_fn=None,
use_data_cache=True,
):
data_layer = nemo_nlp.nm.data_layers.BertQuestionAnsweringDataLayer(
mode=mode,
version_2_with_negative=version_2_with_negative,
batch_size=batch_size,
tokenizer=tokenizer,
data_file=data_file,
max_query_length=max_query_length,
max_seq_length=max_seq_length,
doc_stride=doc_stride,
shuffle="train" in mode,
use_cache=use_data_cache,
)
input_data = data_layer()
hidden_states = model(
input_ids=input_data.input_ids, token_type_ids=input_data.input_type_ids, attention_mask=input_data.input_mask
)
qa_output = head(hidden_states=hidden_states)
steps_per_epoch = len(data_layer) // (batch_size * num_gpus * batches_per_step)
if mode == "test":
return (
steps_per_epoch,
[input_data.unique_ids, qa_output],
data_layer,
)
else:
loss_output = loss_fn(
logits=qa_output, start_positions=input_data.start_positions, end_positions=input_data.end_positions
)
return (
loss_output.loss,
steps_per_epoch,
[input_data.unique_ids, loss_output.start_logits, loss_output.end_logits],
data_layer,
)
if __name__ == "__main__":
args = parse_args()
if "train" in args.mode:
if not os.path.exists(args.train_file):
raise FileNotFoundError(
"train data not found. Datasets can be obtained using examples/nlp/question_answering/get_squad.py"
)
if "eval" in args.mode:
if not os.path.exists(args.eval_file):
raise FileNotFoundError(
"eval data not found. Datasets can be obtained using examples/nlp/question_answering/get_squad.py"
)
if "test" in args.mode:
if not os.path.exists(args.test_file):
raise FileNotFoundError(
"test data not found. Datasets can be obtained using examples/nlp/question_answering/get_squad.py"
)
# Instantiate neural factory with supported backend
nf = nemo_core.NeuralModuleFactory(
backend=nemo_core.Backend.PyTorch,
local_rank=args.local_rank,
optimization_level=args.amp_opt_level,
log_dir=args.work_dir,
create_tb_writer=True,
files_to_copy=[__file__],
add_time_to_log_dir=False,
)
model = nemo_nlp.nm.trainables.get_pretrained_lm_model(
pretrained_model_name=args.pretrained_model_name,
config=args.bert_config,
vocab=args.vocab_file,
checkpoint=args.bert_checkpoint,
)
tokenizer = nemo.collections.nlp.data.tokenizers.get_tokenizer(
tokenizer_name=args.tokenizer,
pretrained_model_name=args.pretrained_model_name,
tokenizer_model=args.tokenizer_model,
vocab_file=args.vocab_file,
do_lower_case=args.do_lower_case,
)
hidden_size = model.hidden_size
qa_head = nemo_nlp.nm.trainables.TokenClassifier(
hidden_size=hidden_size, num_classes=2, num_layers=1, log_softmax=False, name="TokenClassifier"
)
squad_loss = nemo_nlp.nm.losses.SpanningLoss()
if args.head_checkpoint is not None:
qa_head.restore_from(args.head_checkpoint)
if "train" in args.mode:
train_loss, train_steps_per_epoch, _, _ = create_pipeline(
data_file=args.train_file,
model=model,
head=qa_head,
loss_fn=squad_loss,
max_query_length=args.max_query_length,
max_seq_length=args.max_seq_length,
doc_stride=args.doc_stride,
batch_size=args.batch_size,
version_2_with_negative=args.version_2_with_negative,
num_gpus=args.num_gpus,
batches_per_step=args.batches_per_step,
mode="train",
use_data_cache=not args.no_data_cache,
)
if "eval" in args.mode:
_, _, eval_output, eval_data_layer = create_pipeline(
data_file=args.eval_file,
model=model,
head=qa_head,
loss_fn=squad_loss,
max_query_length=args.max_query_length,
max_seq_length=args.max_seq_length,
doc_stride=args.doc_stride,
batch_size=args.batch_size,
version_2_with_negative=args.version_2_with_negative,
num_gpus=args.num_gpus,
batches_per_step=args.batches_per_step,
mode="eval",
use_data_cache=not args.no_data_cache,
)
if "test" in args.mode:
_, eval_output, test_data_layer = create_pipeline(
data_file=args.test_file,
model=model,
head=qa_head,
max_query_length=args.max_query_length,
max_seq_length=args.max_seq_length,
doc_stride=args.doc_stride,
batch_size=args.batch_size,
version_2_with_negative=args.version_2_with_negative,
num_gpus=args.num_gpus,
batches_per_step=args.batches_per_step,
mode="test",
use_data_cache=not args.no_data_cache,
)
if "train" in args.mode:
logging.info(f"steps_per_epoch = {train_steps_per_epoch}")
train_callback = nemo_core.SimpleLossLoggerCallback(
tensors=[train_loss],
print_func=lambda x: logging.info("Loss: {:.3f}".format(x[0].item())),
get_tb_values=lambda x: [["loss", x[0]]],
step_freq=args.train_step_freq,
tb_writer=nf.tb_writer,
)
ckpt_callback = nemo_core.CheckpointCallback(
folder=nf.checkpoint_dir, epoch_freq=args.save_epoch_freq, step_freq=args.save_step_freq
)
callbacks = [train_callback, ckpt_callback]
if "eval" in args.mode:
eval_callback = nemo_core.EvaluatorCallback(
eval_tensors=eval_output,
user_iter_callback=lambda x, y: eval_iter_callback(x, y),
user_epochs_done_callback=lambda x: eval_epochs_done_callback(
x,
eval_data_layer=eval_data_layer,
do_lower_case=args.do_lower_case,
n_best_size=args.n_best_size,
max_answer_length=args.max_answer_length,
version_2_with_negative=args.version_2_with_negative,
null_score_diff_threshold=args.null_score_diff_threshold,
),
tb_writer=nf.tb_writer,
eval_step=args.eval_step_freq,
)
callbacks.append(eval_callback)
optimization_params = {
"lr": args.lr,
"weight_decay": args.weight_decay,
}
if args.max_steps < 0:
total_steps = args.num_epochs * train_steps_per_epoch
optimization_params['num_epochs'] = args.num_epochs
else:
total_steps = args.max_steps
optimization_params['max_steps'] = args.max_steps
lr_policy_fn = get_lr_policy(args.lr_policy, total_steps=total_steps, warmup_ratio=args.lr_warmup_proportion)
if args.grad_norm_clip >= 0:
optimization_params['grad_norm_clip'] = args.grad_norm_clip
nf.train(
tensors_to_optimize=[train_loss],
callbacks=callbacks,
lr_policy=lr_policy_fn,
optimizer=args.optimizer,
batches_per_step=args.batches_per_step,
optimization_params=optimization_params,
)
else:
load_from_folder = None
if args.checkpoint_dir is not None:
load_from_folder = args.checkpoint_dir
evaluated_tensors = nf.infer(
tensors=eval_output, checkpoint_dir=load_from_folder, cache=True, offload_to_cpu=False
)
unique_ids = []
for t in evaluated_tensors[0]:
unique_ids.extend(t.tolist())
if "eval" in args.mode:
start_logits = []
end_logits = []
for t in evaluated_tensors[1]:
start_logits.extend(t.tolist())
for t in evaluated_tensors[2]:
end_logits.extend(t.tolist())
exact_match, f1, all_predictions, all_nbest = eval_data_layer.dataset.evaluate(
unique_ids=unique_ids,
start_logits=start_logits,
end_logits=end_logits,
n_best_size=args.n_best_size,
max_answer_length=args.max_answer_length,
version_2_with_negative=args.version_2_with_negative,
null_score_diff_threshold=args.null_score_diff_threshold,
do_lower_case=args.do_lower_case,
)
logging.info(f"exact_match: {exact_match}, f1: {f1}")
elif "test" in args.mode:
logits = []
for t in evaluated_tensors[1]:
logits.extend(t.tolist())
start_logits, end_logits = np.split(np.asarray(logits), 2, axis=-1)
(all_predictions, all_nbest, scores_diff) = test_data_layer.dataset.get_predictions(
unique_ids=unique_ids,
start_logits=start_logits,
end_logits=end_logits,
n_best_size=args.n_best_size,
max_answer_length=args.max_answer_length,
version_2_with_negative=args.version_2_with_negative,
null_score_diff_threshold=args.null_score_diff_threshold,
do_lower_case=args.do_lower_case,
)
if args.output_nbest_file is not None:
with open(args.output_nbest_file, "w") as writer:
writer.write(json.dumps(all_nbest, indent=4) + "\n")
if args.output_prediction_file is not None:
with open(args.output_prediction_file, "w") as writer:
writer.write(json.dumps(all_predictions, indent=4) + "\n")
| 38.461832 | 118 | 0.657835 |
068164412d08e8f4ce91ffb663cdb342a437e9bf | 880 | py | Python | rhoci/server/config.py | dsariel/rhoci | 76734fefc495fe7d2e5b5b38d8eeb3bd6b156aa7 | [
"Apache-2.0"
] | null | null | null | rhoci/server/config.py | dsariel/rhoci | 76734fefc495fe7d2e5b5b38d8eeb3bd6b156aa7 | [
"Apache-2.0"
] | null | null | null | rhoci/server/config.py | dsariel/rhoci | 76734fefc495fe7d2e5b5b38d8eeb3bd6b156aa7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Arie Bregman
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class Config(object):
"""RHOCI server configuration."""
RHOCI_SERVER_PORT = 5000
RHOCI_DEBUG = False
RHOCI_CONFIG_FILE = '/etc/rhoci/server.conf'
RHOCI_SERVER_LOG = 'rhoci.log'
RHOCI_RELEASES = '6,7,8,9,10,11,12,13'
RHOCI_UPDATE_INTERVAL = 3600
| 35.2 | 78 | 0.713636 |
7ab631b6a439351bc324694138b5bf03a9f29d7a | 1,108 | py | Python | python/xviz_avs/io/json.py | EliotCao/xviz | 57429ba8f76a8bb7ac0889d021a12e22a52f8e7e | [
"Apache-2.0"
] | 851 | 2019-02-19T18:06:42.000Z | 2022-01-17T03:04:23.000Z | python/xviz_avs/io/json.py | Smart-Ag/xviz | 71c4470fdcb5c497793eb53666da6a5feb6832f0 | [
"Apache-2.0"
] | 291 | 2019-02-19T18:48:01.000Z | 2022-01-17T03:03:59.000Z | python/xviz_avs/io/json.py | assalmeftahi/xviz | b9f05ade77b1fe4f0fd478e6075e4f62456ec6a0 | [
"Apache-2.0"
] | 221 | 2019-02-19T20:57:37.000Z | 2022-01-20T08:43:01.000Z | import json
from .base import XVIZBaseWriter
from xviz_avs.message import XVIZEnvelope, XVIZMessage, Metadata
class XVIZJsonWriter(XVIZBaseWriter):
def __init__(self, sink, wrap_envelope=True, float_precision=10, as_array_buffer=False):
super().__init__(sink)
self._wrap_envelop = wrap_envelope
self._json_precision = float_precision
def write_message(self, message: XVIZMessage, index: int = None):
self._check_valid()
if self._wrap_envelop:
obj = XVIZEnvelope(message).to_object()
else:
obj = message.to_object()
fname = self._get_sequential_name(message, index) + '.json'
# Encode GLB into file
result = [] # These codes are for float truncation
for part in json.JSONEncoder(separators=(',', ':')).iterencode(obj):
try:
rounded = round(float(part), self._json_precision)
except ValueError:
pass
else: part = str(rounded)
result.append(part)
self._source.write(''.join(result).encode('ascii'), fname)
| 35.741935 | 92 | 0.636282 |
ebe528e746d8fb0d3cbc196c2d24fe020abc3970 | 1,406 | py | Python | src/scripts/cluster_vol.py | flaviovdf/pyksc | 6ba8988c7fad63366dc2b8d005d0779971e129c5 | [
"BSD-3-Clause"
] | 32 | 2015-03-09T12:08:13.000Z | 2021-03-23T09:47:25.000Z | src/scripts/cluster_vol.py | flaviovdf/pyksc | 6ba8988c7fad63366dc2b8d005d0779971e129c5 | [
"BSD-3-Clause"
] | 5 | 2015-04-22T02:46:14.000Z | 2018-07-26T14:17:54.000Z | src/scripts/cluster_vol.py | flaviovdf/pyksc | 6ba8988c7fad63366dc2b8d005d0779971e129c5 | [
"BSD-3-Clause"
] | 13 | 2015-04-15T06:58:33.000Z | 2019-07-26T03:34:12.000Z | # -*- coding: utf8
from __future__ import division, print_function
from scipy import stats
from collections import defaultdict
from matplotlib import pyplot as plt
from scripts import initialize_matplotlib
import numpy as np
import plac
import sys
cols = {'PEAK_VIEWS':3, 'SUM_VIEWS':-1}
@plac.annotations(features_fpath=plac.Annotation('Features file', type=str),
classes_fpath=plac.Annotation('Video classes file', type=str),
tseries_fpath=plac.Annotation('Time Series file', type=str))
def main(features_fpath, classes_fpath, tseries_fpath):
X = np.genfromtxt(features_fpath)[:,1:].copy()
y = np.loadtxt(classes_fpath)
T = np.genfromtxt(tseries_fpath)[:,1:].copy()
bah = T.sum(axis=1) / X[:,-1]
print(np.mean(bah))
print(np.median(bah))
print(np.std(bah))
print(stats.scoreatpercentile(bah, 25))
num_clusters = len(set(y))
for k in xrange(num_clusters):
print(k, end='\t')
M = X[y == k]
for column, col_num in sorted(cols.items()):
data = M[:,col_num]
mean = np.mean(data)
print(mean, end='\t')
print()
print('Tot.', end='\t')
for column, col_num in sorted(cols.items()):
data = X[:,col_num]
mean = np.mean(data)
print(mean, end='\t')
print()
if __name__ == '__main__':
sys.exit(plac.call(main))
| 26.037037 | 80 | 0.621622 |
47b2fb1ccce82ce16769810b044e59a7122d35fc | 1,002 | py | Python | isi_sdk_8_2_0/test/test_result_directories_total_usage.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 24 | 2018-06-22T14:13:23.000Z | 2022-03-23T01:21:26.000Z | isi_sdk_8_2_0/test/test_result_directories_total_usage.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 46 | 2018-04-30T13:28:22.000Z | 2022-03-21T21:11:07.000Z | isi_sdk_8_2_0/test/test_result_directories_total_usage.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 29 | 2018-06-19T00:14:04.000Z | 2022-02-08T17:51:19.000Z | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 7
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_8_2_0
from isi_sdk_8_2_0.models.result_directories_total_usage import ResultDirectoriesTotalUsage # noqa: E501
from isi_sdk_8_2_0.rest import ApiException
class TestResultDirectoriesTotalUsage(unittest.TestCase):
"""ResultDirectoriesTotalUsage unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testResultDirectoriesTotalUsage(self):
"""Test ResultDirectoriesTotalUsage"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_8_2_0.models.result_directories_total_usage.ResultDirectoriesTotalUsage() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 24.439024 | 113 | 0.734531 |
3b2074985a653123f9ef020e7a63d1cec5412844 | 6,906 | py | Python | raven.py | ChenYi015/Raven | e732e03f8dd118ed805a143fc6916f0e5fc53c2c | [
"Apache-2.0"
] | 1 | 2022-03-03T05:54:25.000Z | 2022-03-03T05:54:25.000Z | raven.py | ChenYi015/Raven | e732e03f8dd118ed805a143fc6916f0e5fc53c2c | [
"Apache-2.0"
] | null | null | null | raven.py | ChenYi015/Raven | e732e03f8dd118ed805a143fc6916f0e5fc53c2c | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Raven Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import queue
import threading
from concurrent.futures.thread import ThreadPoolExecutor
from datetime import datetime
from threading import Timer
import yaml
import configs
from benchmark.core.engine import Engine
from benchmark.core.statistic import Collector
from benchmark.core.testplan import Testplan
from benchmark.core.workload import Workload
from benchmark.engine.manager import EngineManager
from benchmark.hook.manager import HookManager
from benchmark.pipeline.pipeline import Pipeline
from benchmark.testplan.timeline import Timeline, Event
from benchmark.workload.manager import WorkloadManager
logger = configs.ROOT_LOGGER
class Raven:
"""
Raven - a benchmark framework for olap engines within cloud.
"""
def __init__(self, config):
self.config = config
self.cloud = None # Cloud providers
self.engine: Engine = None # OLAP engine
self.workload: Workload = None # Workload
self.plan: Testplan = None # Testplan
self.collector: Collector = None # Statistics collector
self._hook_exec_pool = ThreadPoolExecutor(max_workers=12, thread_name_prefix='HookExecutor')
def run(self):
if self.plan.type == Testplan.Type.PIPELINE:
self._execute_pipeline(self.plan)
elif self.plan.type == Testplan.Type.TIMELINE:
self._execute_timeline(self.plan)
def setup_cloud(self):
pass
def setup_engine(self, engine_name, **kwargs):
logger.info('Raven is setting up engine...')
self.engine = EngineManager.get_engine(engine_name, **kwargs)
self.engine.launch()
def setup_workload(self, workload_name: str, workload_type: str, **kwargs):
logger.info('Raven is setting up workload...')
self.workload = WorkloadManager.get_workload(workload_name, workload_type)
if 'Database' in kwargs:
self.workload.set_database(database=kwargs['Database'])
def setup_testplan(self, plan_type: str, plan_path: str):
logger.info(f'Raven is setting up testplan, type: {plan_type}, path: {plan_path}...')
if plan_type == Testplan.Type.PIPELINE:
pass
elif plan_type == Testplan.Type.TIMELINE:
with open(plan_path, encoding='utf-8') as stream:
plan_config = yaml.load(stream, yaml.FullLoader)
self.plan = Timeline(plan_config)
def setup_collector(self):
logger.info('Raven is setting up statistics collector...')
self.collector = Collector()
def _execute_pipeline(self, plan: Pipeline):
# database = workload['Database']
# for query in workload['Queries']:
# self.engine.query(database, query['SQL'])
pass
def _handle_stage(self, stage):
pass
def _execute_timeline(self, timeline: Timeline):
logger.info(f'Raven is executing timeline: {timeline.name}...')
# 给 Workload 中的每个事件设置一个定时器
# 定时器触发时调用相应事件的 hook 处理事件
threads = [Timer(event.time, self._handle_event, args=(event,)) for event in timeline.events]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
logger.info(f'Raven has finished executing timeline: {timeline.name}.')
def _handle_event(self, event: Event):
"""
根据事件名称调用相应的 hook 进行处理.
:param event:
:return:
"""
logger.info(f'Raven is handling event: {event.name}...')
hook = HookManager.get_hook(event.name)
self._hook_exec_pool.submit(hook, self.engine)
# 查询开始
# 查询分为三个阶段: 查询生成, 查询执行, 收集指标
# Generate -> Execute -> Collect
if event.name == Event.Name.ON_QUERY_START:
# 查询用例采用生产者消费者模式
# workload 是生产者, engine 是消费者
# 生产者和消费者之间通过查询请求队列进行通信
# workload 启动若干个线程并发生成查询请求并放入请求队列中
execute_queue = queue.Queue(maxsize=3000)
collect_queue = queue.Queue(maxsize=3000)
generate_thread = threading.Thread(
target=self.workload.generate_queries,
args=(execute_queue,),
kwargs=self.config['Workload']['Parameters'],
name='QueryGenerator'
)
generate_thread.start()
# engine 启动若干个线程用于并发处理查询请求
execute_thread = threading.Thread(
target=self.engine.execute_queries,
args=(execute_queue, collect_queue),
name='QueryExecutor'
)
execute_thread.start()
# metric collector 启动若干个线程去收集性能指标
# 成本指标和资源利用率指标通过 AWS CloudWatch 去收集
collect_thread = threading.Thread(
target=self.collector.collect_queries,
args=(collect_queue,),
name='QueryCollector'
)
collect_thread.start()
generate_thread.join()
execute_queue.join()
self.engine.cancel_execute()
execute_thread.join()
self.engine.shutdown()
collect_queue.join()
self.collector.cancel_collect()
collect_thread.join()
logger.info(f'Raven has finished handling event: {event.name}...')
if __name__ == '__main__':
# Raven
with open(os.path.join(os.environ['RAVEN_HOME'], 'config', 'raven.yaml'), encoding='utf-8') as file:
config = yaml.load(file, Loader=yaml.FullLoader)
raven = Raven(config)
# Setup engine
raven.setup_engine(engine_name=config['Engine']['Name'], **config['Engine']['Properties'])
# Setup workload
raven.setup_workload(workload_name=config['Workload']['Name'], workload_type=config['Workload']['Type'],
**config['Workload']['Parameters'])
# Setup testplan
raven.setup_testplan(plan_type=config['Testplan']['Properties']['Type'],
plan_path=os.path.join(os.environ['RAVEN_HOME'],
*config['Testplan']['Properties']['Path'].split('/')))
# Setup statistics collector
raven.setup_collector()
start = datetime.now()
raven.run()
end = datetime.now()
raven.collector.save_query_metrics()
raven.collector.generate_report(start=start, end=end)
| 35.597938 | 108 | 0.643064 |
bd4cb0acf5b320a98c06054fc795f7a931c9d029 | 6,814 | py | Python | tests/cli/test_cli.py | vlanse/plan_b | 59f6b3aa57f73f4406cf5acfc4f0a834d71b0d5e | [
"Apache-2.0"
] | null | null | null | tests/cli/test_cli.py | vlanse/plan_b | 59f6b3aa57f73f4406cf5acfc4f0a834d71b0d5e | [
"Apache-2.0"
] | 10 | 2019-01-03T12:56:24.000Z | 2021-06-01T23:16:05.000Z | tests/cli/test_cli.py | vlanse/plan_b | 59f6b3aa57f73f4406cf5acfc4f0a834d71b0d5e | [
"Apache-2.0"
] | null | null | null | import os
from collections import namedtuple
from unittest import TestCase
from unittest.mock import patch, Mock
from plan_b.cli import do_work
MockIssue = namedtuple('MockIssue', ['key', 'fields'])
MockFields = namedtuple('MockFields', [
'issuetype', 'summary', 'assignee', 'reporter', 'created', 'resolutiondate', 'duedate', 'aggregatetimespent',
'customfield_10073', 'priority', 'components', 'customfield_10180', 'customfield_16390', 'status', 'resolution',
'customfield_13694', 'comments', 'aggregatetimeoriginalestimate', 'timeoriginalestimate'
])
def _make_mock_issue(
key, issue_type, summary, assignee=None, reporter=None, created=None, resolved=None, due=None,
time_spent=None, severity=None, priority=None, components=None, tags=None, qa_advice=None, status='open',
resolution=None, epic_link=None, comments=None, aggregated_orig_estimate=None, orig_estimate=None
):
return MockIssue(
key=key,
fields=MockFields(
issuetype=namedtuple('issuetype', ['name'])(name=issue_type),
summary=summary,
assignee=namedtuple('assignee', ['name'])(name=assignee),
reporter=reporter,
created=created,
resolutiondate=resolved,
duedate=due,
aggregatetimespent=time_spent,
customfield_10073=namedtuple('customfield_10073', ['value'])(value=severity),
priority=priority,
components=components,
customfield_10180=tags,
customfield_16390=qa_advice,
status=namedtuple('issuestatus', ['name'])(name=status),
resolution=resolution,
customfield_13694=epic_link,
comments=comments,
aggregatetimeoriginalestimate=aggregated_orig_estimate,
timeoriginalestimate=orig_estimate
)
)
def _make_comment(author, body):
return namedtuple(
'comment', ['author', 'body']
)(author=namedtuple('author', ['name'])(author), body=body)
def comments_side_effect(issue):
# A1 project
if issue.key == 'A-00001':
return [
_make_comment('V.Ivanov', '#plan reqs: hi, design: med, impl: 10d, doc: 1d, arch: 1d'),
_make_comment('B.Smithson', '#plan qa: 1d'),
]
elif issue.key == 'A-00002':
return [
_make_comment('J.Smith', '#plan reqs: hi, design: lo, impl: 5d, perf: 1w'),
_make_comment('B.Smithson', '#plan qa: 2d'),
]
elif issue.key == 'A-00003':
return [
_make_comment('V.Ivanov', '#plan reqs: lo, design: med, impl: 1d, doc: 1d'),
_make_comment('J.Smith', '#plan reqs: med, design: lo, impl: 1w, doc: 1d'),
_make_comment('B.Smithson', '#plan qa: 5d'),
_make_comment('A.Testerson', '#plan qa: 1d'),
]
elif issue.key == 'A-00005':
return [
_make_comment('V.Ivanov', '#plan reqs: lo, design: med, impl: 1d, doc: 1d'),
]
# B2U4 project
elif issue.key == 'A-00050':
return [
_make_comment('J.Smith', '#plan reqs: lo, design: lo, impl: 5w, doc: 1d, arch: 3d'),
_make_comment('A.Testerson', '#plan qa: 3d'),
_make_comment('B.Smithson', '#plan qa: 1d'),
]
elif issue.key == 'A-00051':
return [
_make_comment('V.Ivanov', '#plan reqs: hi, design: lo, impl: 10w, doc: 1d, arch: 4d'),
_make_comment('A.Testerson', '#plan qa: 2d'),
_make_comment('B.Smithson', '#plan qa: 2d'),
]
# qa specific epics
elif issue.key == 'A-00006':
return [
_make_comment('B.Smithson', '#plan qa: 1w'),
_make_comment('A.Testerson', '#plan qa: 2w')
]
elif issue.key == 'A-00007':
return [
_make_comment('B.Smithson', '#plan qa: 3w'),
_make_comment('A.Testerson', '#plan qa: 4w')
]
elif issue.key == 'A-00008':
return [
_make_comment('B.Smithson', '#plan qa: 3w'),
]
elif issue.key == 'A-00009':
return [
_make_comment('A.Testerson', '#plan qa: 1w')
]
elif issue.key == 'A-00010':
return [
_make_comment('B.Smithson', '#plan qa: 1w'),
]
return []
def issues_side_effect(data_query, **_):
if 'A1' in data_query:
return [
_make_mock_issue('A-00001', 'Epic', 'Cool backend feature', assignee='V.Ivanov'),
_make_mock_issue('A-00002', 'Epic', 'Super-duper UI improvement', assignee='J.Smith'),
_make_mock_issue('A-00003', 'Epic', 'Revolutionary product', assignee='V.Ivanov'),
_make_mock_issue(
'A-00004', 'Story', 'Story inside A-00001', epic_link='A-00001', assignee='V.Ivanov'
),
_make_mock_issue(
'A-00005', 'Story', 'Autonomous story in another team\'s epic', epic_link='A-00100',
assignee='V.Ivanov'
),
_make_mock_issue('A-00006', 'Epic', 'A1 External QA Tasks', assignee='B.Smithson'),
_make_mock_issue('A-00007', 'Epic', 'A1 Internal QA Tasks', assignee='B.Smithson'),
_make_mock_issue('A-00008', 'Epic', 'A1 Regress', assignee='B.Smithson'),
_make_mock_issue('A-00009', 'Epic', 'A1 Verification', assignee='B.Smithson'),
_make_mock_issue('A-00010', 'Epic', 'A1 Production acceptance', assignee='B.Smithson'),
_make_mock_issue('A-00021', 'Bug', '', assignee='V.Ivanov'),
_make_mock_issue('A-00022', 'Bug', '', assignee='J.Smith'),
_make_mock_issue('A-00023', 'Bug', '', assignee='J.Smith'),
_make_mock_issue('A-00024', 'Bug', '', assignee='J.Smith'),
]
elif 'B2U4' in data_query:
return [
_make_mock_issue('A-00050', 'Epic', 'Make go services great again', assignee='V.Ivanov'),
_make_mock_issue('A-00051', 'Epic', 'Earn 10 billion $$$ for the company', assignee='V.Ivanov'),
_make_mock_issue('A-00061', 'Bug', '', assignee='V.Ivanov', status='in progress'),
_make_mock_issue('A-00062', 'Bug', '', assignee='V.Ivanov'),
_make_mock_issue('A-00063', 'Bug', '', assignee='V.Ivanov'),
_make_mock_issue('A-00064', 'Bug', '', assignee='J.Smith'),
]
class TestPlanExport(TestCase):
@patch('jira.JIRA')
def test_plan_export(self, jira_mock):
jira_mock().search_issues = Mock(side_effect=issues_side_effect)
jira_mock().comments = Mock(side_effect=comments_side_effect)
data_dir_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')
do_work(
os.path.join(data_dir_path, 'config-test.yml'),
os.path.join(data_dir_path, 'test.xlsx')
)
| 42.061728 | 116 | 0.58688 |
8d1549fa91c093ee8bfa934bd05de0c8bd316aa0 | 1,393 | py | Python | user-t.py | jamesgathuru001/password-locker | ce545c11d027aa384c4d901aa867651f49dd6797 | [
"Unlicense"
] | null | null | null | user-t.py | jamesgathuru001/password-locker | ce545c11d027aa384c4d901aa867651f49dd6797 | [
"Unlicense"
] | null | null | null | user-t.py | jamesgathuru001/password-locker | ce545c11d027aa384c4d901aa867651f49dd6797 | [
"Unlicense"
] | null | null | null |
import unittest
from user import User
class TestUser(unittest.TestCase):
'''
Test class that defines test cases for the contact class behaviours.
Args:
unittest.TestCase: TestCase class that helps in creating test cases.
'''
def setUp(self):
'''
Set up method to run before each test case.
'''
self.new_user = User("qwerty", "mmmmmm")
def tearDown(self):
'''
TearDown method that does clean up after each test case has run.
'''
User.user_list = []
def test_init(self):
'''
test_init test case to test if the object is initialized properly.
'''
self.assertEqual(self.new_user.username, "qwerty")
self.assertEqual(self.new_user.password, "mmmmmm")
def test_save_user(self):
'''
test_save_user test case to test if the user object is saved into the user list.
'''
self.new_user.save_user()
self.assertEqual(len(User.user_list), 1)
def test_save_multiple_user(self):
'''
test_save_multiple_user to check if we can save multiple user
objects to our user_list
'''
self.new_user.save_user()
test_user = User("qwerty", "mmmmmm")
test_user.save_user()
self.assertEqual(len(User.user_list), 2)
if __name__ == '__main__':
unittest.main()
| 26.788462 | 88 | 0.613783 |
96f4c9e70b35e79a43ce56364228e95701ca9fe0 | 5,350 | py | Python | obniz/obniz/libs/embeds/ble/ble_attribute_abstract.py | izm51/obniz-python-sdk | 40a738b5fe2c0a415cdc09f46d28c143982bfb07 | [
"MIT"
] | 11 | 2019-03-22T12:02:11.000Z | 2021-01-21T04:57:18.000Z | obniz/obniz/libs/embeds/ble/ble_attribute_abstract.py | izm51/obniz-python-sdk | 40a738b5fe2c0a415cdc09f46d28c143982bfb07 | [
"MIT"
] | 5 | 2019-03-02T08:28:25.000Z | 2021-02-02T22:06:37.000Z | obniz/obniz/libs/embeds/ble/ble_attribute_abstract.py | izm51/obniz-python-sdk | 40a738b5fe2c0a415cdc09f46d28c143982bfb07 | [
"MIT"
] | 3 | 2019-07-20T06:55:09.000Z | 2019-12-04T05:05:00.000Z | from pyee import AsyncIOEventEmitter
import asyncio
from .ble_helper import BleHelper
from ...utils.util import ObnizUtil
class BleAttributeAbstract:
def __init__(self, params):
self.uuid = BleHelper.uuid_filter(params["uuid"])
self.parent = None
self.children = []
self.isRemote = False
self.discoverdOnRemote = False
self.data = params.get("data")
if not self.data and params.get("text"):
self.data = ObnizUtil.string2data_array(params["text"])
if not self.data and params.get("value"):
self.data = [params.value]
if self.children_name in params:
for child in params[self.children_name]:
self.add_child(child)
self.set_functions()
self.emitter = AsyncIOEventEmitter()
def set_functions(self):
children_name = self.children_name
if children_name:
child_name = children_name[:-1]
func_name = "add_" + child_name
setattr(self, func_name, self.add_child)
func_name = "get_" + child_name
setattr(self, func_name, self.get_child)
parent_name = self.parent_name
if parent_name:
func_name = "get_" + parent_name
setattr(self, func_name, self.get_parent)
func_name = "set_" + parent_name
setattr(self, func_name, self.set_parent)
def get_parent(self):
return self.parent
def set_parent(self, new_value):
self.parent = new_value
@property
def children_class(self):
return object
@property
def children_name(self):
return None
@property
def parent_name(self):
return None
def add_child(self, child):
if not isinstance(child, self.children_class):
children_class = self.children_class
child = children_class(child)
child.parent = self
self.children.append(child)
return child
def get_child(self, uuid):
uuid = BleHelper.uuid_filter(uuid)
return next(
iter(
[
element
for element in self.children
if BleHelper.uuid_filter(element.uuid) == uuid
]
),
None,
)
def to_json(self):
obj = {"uuid": BleHelper.uuid_filter(self.uuid)}
if len(self.children) > 0:
key = self.children_name
obj[key] = self.children
if self.data:
obj["data"] = self.data
return obj
# /**
# * WS COMMANDS
# */
def read(self):
pass
def write(self):
pass
def write_number(self, val, need_response=False):
self.write([val], need_response)
# writeText(str, needResponse) {
# self.write(ObnizUtil.string2dataArray(str), needResponse)
# }
def read_wait(self):
# get_running_loop() function is preferred on Python >= 3.7
future = asyncio.get_event_loop().create_future()
def cb(params):
if params["result"] == "success":
future.set_result(params["data"])
else:
future.set_result(None)
self.emitter.once("onread", cb)
self.read()
return future
# writeWait(data, needResponse) {
# return new Promise(resolve => {
# self.emitter.once('onwrite', params => {
# resolve(params.result == 'success')
# })
# self.write(data, needResponse)
# })
# }
# writeTextWait(data) {
# return new Promise(resolve => {
# self.emitter.once('onwrite', params => {
# resolve(params.result == 'success')
# })
# self.writeText(data)
# })
# }
# writeNumberWait(data) {
# return new Promise(resolve => {
# self.emitter.once('onwrite', params => {
# resolve(params.result == 'success')
# })
# self.writeNumber(data)
# })
# }
# readFromRemoteWait() {
# return new Promise(resolve => {
# self.emitter.once('onreadfromremote', () => {
# resolve()
# })
# })
# }
# writeFromRemoteWait() {
# return new Promise(resolve => {
# self.emitter.once('onreadfromremote', params => {
# resolve(params.data)
# })
# })
# }
#
# CALLBACKS
#
def onwrite(self):
pass
def onread(self, data):
pass
def onwritefromremote(self):
pass
def onreadfromremote(self):
pass
def onerror(self, err):
print(err.message)
def notify_from_server(self, notify_name, params):
self.emitter.emit(notify_name, params)
if notify_name == "onerror":
self.onerror(params)
elif notify_name == "onwrite":
self.onwrite(params["result"])
elif notify_name == "onread":
self.onread(params["data"])
elif notify_name == "onwritefromremote":
self.onwritefromremote(params["address"], params["data"])
elif notify_name == "onreadfromremote":
self.onreadfromremote(params["address"])
| 25.970874 | 69 | 0.54486 |
6139a23285eb479f53d8d18c68235f2cfdebce51 | 3,012 | py | Python | Practice_2/main2.py | jldj1/Practice-2 | 014d63bff36c4ba7a80a32db1da66c397267a205 | [
"MIT"
] | null | null | null | Practice_2/main2.py | jldj1/Practice-2 | 014d63bff36c4ba7a80a32db1da66c397267a205 | [
"MIT"
] | null | null | null | Practice_2/main2.py | jldj1/Practice-2 | 014d63bff36c4ba7a80a32db1da66c397267a205 | [
"MIT"
] | null | null | null | import sys
from typing import Type
import pygame
from pygame.transform import scale
from buttons.button import Button
from buttons.input_box import InputBox
from image_button import ImageButton
login: Type[InputBox] = InputBox
window = (1150, 1050)
screen = pygame.display.set_mode(window)
pygame.font.init()
clock = pygame.time.Clock()
BG_COLOR = (30, 30, 30)
BLACK_COLOR = (0, 0, 0)
class Blank:
def __init__(self):
self.width = 600
self.height = 600
self.setup_screen()
self.click = False
self.running = True
self.button = Button(self.screen, self.width // 2 - 100, self.height // 2 - 25, 200, 50, "esc to go back", (BLACK_COLOR))
self.start = ImageButton(self.screen, self.width, self.height, "assets/start_btn.png", scale)
self.clock = pygame.time.Clock()
def start(self):
cats = ImageButton(screen, 200, 250, "assets/cats.png", 1)
esc = Button(screen, 50, 40, 800, 600, "esc to go back", (BLACK_COLOR))
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
if event.type == pygame.MOUSEBUTTONDOWN:
if esc.collides(pygame.mouse.get_pos()):
pygame.quit()
elif cats.collides(pygame.mouse.get_pos()):
login.draw()
pygame.display.update()
pygame.quit()
def draw(self):
self.screen.fill(BG_COLOR)
# screen.fill always in beginning of draw func
#self.button.draw()
#self.start.draw()
# display.update() always in end of draw func
pygame.display.update()
#####
def setup_screen(self):
self.screen = pygame.display.set_mode((self.width, self.height))
pygame.display.set_caption("Blank Template Screen")
def run(self):
while self.running:
pos = pygame.mouse.get_pos()
print(pos)
self.draw()
if self.start.collides(pos):
if self.click:
print("BUTTON CLICKED")
self.click = False
for event in pygame.event.get():
self.handle_event(event)
self.clock.tick(60)
def handle_event(self, event):
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
self.click = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
self.running = False
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
cats = ImageButton(screen, 200, 250, "assets/cats.png", 1)
pygame.display.flip()
done = False
while not done:
cats.draw()
go = Blank()
go.start()
pygame.display.update()
clock.tick(60)
| 26.421053 | 129 | 0.565737 |
567509ad72310178809745447aeb4c7390821dcd | 4,002 | py | Python | samples/cli/accelbyte_py_sdk_cli/platform/_public_normalize_payment_return_url.py | AccelByte/accelbyte-python-sdk | dcd311fad111c59da828278975340fb92e0f26f7 | [
"MIT"
] | null | null | null | samples/cli/accelbyte_py_sdk_cli/platform/_public_normalize_payment_return_url.py | AccelByte/accelbyte-python-sdk | dcd311fad111c59da828278975340fb92e0f26f7 | [
"MIT"
] | 1 | 2021-10-13T03:46:58.000Z | 2021-10-13T03:46:58.000Z | samples/cli/accelbyte_py_sdk_cli/platform/_public_normalize_payment_return_url.py | AccelByte/accelbyte-python-sdk | dcd311fad111c59da828278975340fb92e0f26f7 | [
"MIT"
] | null | null | null | # Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template_file: python-cli-command.j2
# justice-platform-service (4.10.0)
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
import json
import yaml
from typing import Optional
import click
from .._utils import login_as as login_as_internal
from .._utils import to_dict
from accelbyte_py_sdk.api.platform import public_normalize_payment_return_url as public_normalize_payment_return_url_internal
@click.command()
@click.argument("order_no", type=str)
@click.argument("payment_order_no", type=str)
@click.argument("payment_provider", type=str)
@click.argument("return_url", type=str)
@click.option("--payer_id", "payer_id", type=str)
@click.option("--foreinginvoice", "foreinginvoice", type=str)
@click.option("--invoice_id", "invoice_id", type=str)
@click.option("--payload", "payload", type=str)
@click.option("--redirect_result", "redirect_result", type=str)
@click.option("--result_code", "result_code", type=str)
@click.option("--session_id", "session_id", type=str)
@click.option("--status", "status", type=str)
@click.option("--token", "token", type=str)
@click.option("--type", "type_", type=str)
@click.option("--user_id", "user_id", type=str)
@click.option("--namespace", type=str)
@click.option("--login_as", type=click.Choice(["client", "user"], case_sensitive=False))
@click.option("--login_with_auth", type=str)
@click.option("--doc", type=bool)
def public_normalize_payment_return_url(
order_no: str,
payment_order_no: str,
payment_provider: str,
return_url: str,
payer_id: Optional[str] = None,
foreinginvoice: Optional[str] = None,
invoice_id: Optional[str] = None,
payload: Optional[str] = None,
redirect_result: Optional[str] = None,
result_code: Optional[str] = None,
session_id: Optional[str] = None,
status: Optional[str] = None,
token: Optional[str] = None,
type_: Optional[str] = None,
user_id: Optional[str] = None,
namespace: Optional[str] = None,
login_as: Optional[str] = None,
login_with_auth: Optional[str] = None,
doc: Optional[bool] = None,
):
if doc:
click.echo(public_normalize_payment_return_url_internal.__doc__)
return
x_additional_headers = None
if login_with_auth:
x_additional_headers = {
"Authorization": login_with_auth
}
else:
login_as_internal(login_as)
result, error = public_normalize_payment_return_url_internal(
order_no=order_no,
payment_order_no=payment_order_no,
payment_provider=payment_provider,
return_url=return_url,
payer_id=payer_id,
foreinginvoice=foreinginvoice,
invoice_id=invoice_id,
payload=payload,
redirect_result=redirect_result,
result_code=result_code,
session_id=session_id,
status=status,
token=token,
type_=type_,
user_id=user_id,
namespace=namespace,
x_additional_headers=x_additional_headers,
)
if error:
raise Exception(f"publicNormalizePaymentReturnUrl failed: {str(error)}")
click.echo(yaml.safe_dump(to_dict(result), sort_keys=False))
public_normalize_payment_return_url.operation_id = "publicNormalizePaymentReturnUrl"
public_normalize_payment_return_url.is_deprecated = False
| 35.415929 | 125 | 0.708896 |
eb6557d0e302dfb20a6db8080766312ad518ee93 | 32,635 | py | Python | jams/river_network.py | MuellerSeb/jams_python | 1bca04557da79d8f8a4c447f5ccc517c40ab7dfc | [
"MIT"
] | null | null | null | jams/river_network.py | MuellerSeb/jams_python | 1bca04557da79d8f8a4c447f5ccc517c40ab7dfc | [
"MIT"
] | null | null | null | jams/river_network.py | MuellerSeb/jams_python | 1bca04557da79d8f8a4c447f5ccc517c40ab7dfc | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from __future__ import division, absolute_import, print_function
#
# This script reads a DEM from a netcdf file and calculates the flow direction
# following ArcGis convention:
#
# flow direction is assumed like this
# 64 y-axis
# 32 128 |
# 16 -1 1 |
# 8 2 \|/
# 4 V
# x-axis ------>
# ORIGIN is in the upper left corner
#
# sinks are marked by -1
#
# Nomenclature:
#
# co - channel orders
# fd - flow direction
# fa - flow accumulation
# yy - row index
# xx - column index
#
# author: Stephan Thober, David Schaefer
# created: 01.07.2015
import numpy as np
# global variables for calculating upstream cells
# yy_offset, xx_offset = np.meshgrid(np.arange(-1, 2), np.arange(-1, 2))
yy_offset, xx_offset = np.meshgrid(np.arange(1, -2, -1), np.arange(-1, 2))
yy_offset = yy_offset.ravel() # yy_offset for neighboring cells
xx_offset = xx_offset.ravel() # xx_offset for neighboring cells
# local flow direction sorted in the order of yy_offset and xx_offset
local_flow_direction = np.array([8, 16, 32, 4, -1, 64, 2, 1, 128])
# same as local_flow_direction but reverted. This means these are the
# flow directions of neighboring cells flowing into the local cell.
inflow_direction = np.array([128, 1, 2, 64, -9999, 4, 32, 16, 8])
def cal_fdir(locs, fdir, factor):
# flow direction is assumed like this
# 64 y-axis
# 32 128 |
# 16 -1 1 |
# 8 2 \|/
# 4 V
# x-axis ------>
#
# ORIGIN is in the upper left corner -> y axis increases to the bottom
# loc = [nrow, ncol]
nloc = locs[0].shape[0]
fds = np.zeros((nloc))
for kk in range(nloc):
# loop over all locations having maximum flow direction
loc = [locs[0][kk], locs[1][kk]]
fd = fdir[loc[0], loc[1]]
if fd == 2:
if loc[1] + 1 < factor:
fd = 4
elif loc[0] + 1 < factor:
fd = 1
elif fd == 8:
if loc[1] > 0:
fd = 4
elif loc[0] + 1 < factor:
fd = 16
elif fd == 32:
if loc[1] > 0:
fd = 64
elif loc[0] > 0:
fd = 16
elif fd == 128:
if loc[1] + 1 < factor:
fd = 64
elif loc[0] > 0:
fd = 1
fds[kk] = fd
return fds
def upscale_fdir(sn, factor, print_info=False, return_maxlocs=False, do_co=False, redo_fa=True, missing_value=-9999.):
"""
Upscales a river network by a factor (integer > 1), that has to be a divisible of the
resolution of the flow direction. Direction is given by the cell with the largest flow
accumulation. If multiple of these exist, then one is chosen randomly.
Definition
----------
upscale_fdir(sn, factor, print_info=False, return_maxlocs=False, do_co=False, redo_fa=True)
Input
-----
sn river_network object containing flow direction, flow accumulation and sinks
factor integer indicating by which factor the flow direction should be upscaled
Optional Input Parameters
-------------------------
print_info flag for printing additional information
return_maxlocs flag for return locations of cell determining flow directions at given river_network object
do_co flag for calculating channel order
redo_fa flow recalculating flow accumulation at coarser river_network
Options
-------
Output
------
river_network object at coarser resolution with upscaled flow direction
Restrictions
------------
Given river_network object sn has to contain flow direction, accumulation and location of
sink following the convention below.
The origin of the flow direction field is assumed to be located in the upper left corner.
Then flow directions are following this convention:
Flow direction is assumed like this meaning that the ORIGIN IS THE UPPER LEFT CORNER
64 y-axis
32 128 |
16 -1 1 |
8 2 \|/
4 V
x-axis ------>
Sinks are marked by -1.
Examples
--------
>>> # Create some data
>>> fd = np.ma.array([[ 2, 1, 1, 2, 4, 4, 8, 8, 8],
... [ 1, 2, 1, 1, 2, 4, 4, 4, 8],
... [128, 1, 128, 1, 1, 2, 4, 4, 4],
... [ 1, 128, 64, 128, 128, 2, 4, 4, 8],
... [128, 64, 64, 64, 1, 2, 4, 4, 4],
... [ 64, 128, 64, 32, 1, 1, 2, 2, 4],
... [128, 64, 64, 64, 1, 1, 1, 1, 1],
... [128, 64, 128, 64, 32, 1, 128, 64, 64],
... [ 64, 128, 64, 64, 64, 128, 64, 64, 32]])
>>> sinks = np.array([[6], [8]])
>>> sn = river_network(fdir=fd, do_fa=True, do_co=False, sinks=sinks)
>>> print(upscale_fdir(sn, 3).fdir)
[[1.0 2.0 4.0]
[64.0 16.0 4.0]
[64.0 64.0 1.0]]
License
-------
This file is part of the JAMS Python package, distributed under the MIT
License. The JAMS Python package originates from the former UFZ Python library,
Department of Computational Hydrosystems, Helmholtz Centre for Environmental
Research - UFZ, Leipzig, Germany.
Copyright (c) 2016 Stephan Thober
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
History
-------
Written, ST, Feb 2016
Modified, MC, Nov 2016 - ported to Python 3
"""
# consistency checks
if not np.ma.isMaskedArray(sn.fa):
raise ValueError('***ERROR: upscale_fdir requires flow accumulation as masked array in river_network')
if any(np.array(sn.fdir.shape) % factor != 0):
raise ValueError('***ERROR: factor: ' + str(factor) + ' is not a divisible of flow direction shape')
# create upscaled arrays
new_shape = np.array(sn.fdir.shape) // factor
new_fd = np.ma.masked_array(np.zeros(new_shape) + missing_value,
mask=np.ones(new_shape), fill_value=missing_value)
new_fa = np.ma.masked_array(np.zeros(new_shape) + missing_value,
mask=np.ones(new_shape), fill_value=missing_value)
# create maxlocs list
maxlocs = []
for ii in range(new_fd.shape[0]):
for jj in range(new_fd.shape[1]):
# extract part of map evaluated
tmp_fa = sn.fa[ii * factor: (ii + 1) * factor, jj * factor: (jj + 1) * factor]
tmp_fd = sn.fdir[ii * factor: (ii + 1) * factor, jj * factor: (jj + 1) * factor]
if np.all(tmp_fa.mask):
if print_info: print('cell is masked ', ii, jj)
# cell is masked
new_fd[ii, jj] = -9999.
new_fa[ii, jj] = -9999.
else:
# determine locations of maximum flow accumulations
maxloc = np.ma.where(tmp_fa == np.ma.amax(tmp_fa))
# calculate coarse scale flow direction
coarse_fd = cal_fdir(maxloc, tmp_fd, factor)
if maxloc[0].shape[0] > 1:
# if there is more than one outflow cell, check whether flow directions are different
if print_info:
print(coarse_fd, ' flow directions of same cells')
# evaluate when there are more than one cell if maximum flow directions are different
if np.any(np.diff(coarse_fd) > 0):
print('***Warning: multiple cells with same flow accumulation but different flow directions found, arbitrarily choose first one')
# store flow direction and flow accumulation
new_fd[ii, jj] = coarse_fd[0]
new_fa[ii, jj] = tmp_fa[maxloc[0][0], maxloc[1][0]]
if print_info:
print('maxloc = ', maxloc)
print('tmp_fd = ', tmp_fd[maxloc], coarse_fd[0])
print('tmp_fa = ', tmp_fa[maxloc])
print('----------------')
print('new_fd = ', new_fd[ii, jj])
print('new_fa = ', new_fa[ii, jj])
print('================')
# add to store maximum locations
maxlocs.append([maxloc[0] + ii * factor, maxloc[1] + jj * factor])
# upscale sinks
upscale_sinks = tuple(np.array(sn.sinks)/int(factor))
# return
if return_maxlocs:
if redo_fa:
return maxlocs, river_network(fdir=new_fd, do_co=do_co, do_fa=True, sinks=upscale_sinks)
else:
return maxlocs, river_network(fdir=new_fd, do_co=do_co, fa=new_fa)
else:
if redo_fa:
return river_network(fdir=new_fd, do_co=do_co, do_fa=True, sinks=upscale_sinks)
else:
return river_network(fdir=new_fd, do_co=do_co, fa=new_fa)
class river_network(object):
def __init__(self, dem=None, fdir=None, co=None, do_co=False, fa=None, do_fa=False, print_info=False, missing_value=-9999., sinks=None):
"""
Initializes a river_network object describing the flow path of a river through the landscape.
Definition
----------
river_network(dem=None, fdir=None, co=None, do_co=False, fa=None, do_fa=False, missing_value=-9999., sinks=None):
Input
-----
dem digital elevation model (dem), basis for calculating flow direction fdir
fdir flow direction following convention below.
Optional Input Parameters
-------------------------
co channel order following Strahler 1952
do_co flag for calculating channel order
fa flow accumulation
do_fa flag for calculating flow accumulation
print_info flag for printing additional information
missing_value default: -9999.
sinks location of sinks as two arrays (first/second for y/x coordinate)
Options
-------
Output
------
Restrictions
------------
Either dem or fdir has to be provided, both cannot be omitted.
The origin of the flow direction field is assumed to be located in the upper left corner.
Then flow directions are following this convention:
Flow direction is assumed like this meaning that the ORIGIN IS THE UPPER LEFT CORNER
64 y-axis
32 128 |
16 -1 1 |
8 2 \|/
4 V
x-axis ------>
Sinks are marked by -1.
Examples
--------
>>> # Create some data
>>> fd = np.ma.array([[ 2, 1, 1, 2, 4, 4, 8, 8, 8],
... [ 1, 2, 1, 1, 2, 4, 4, 4, 8],
... [128, 1, 128, 1, 1, 2, 4, 4, 4],
... [ 1, 128, 64, 128, 128, 2, 4, 4, 8],
... [128, 64, 64, 64, 1, 2, 4, 4, 4],
... [ 64, 128, 64, 32, 1, 1, 2, 2, 4],
... [128, 64, 64, 64, 1, 1, 1, 1, 1],
... [128, 64, 128, 64, 32, 1, 128, 64, 64],
... [ 64, 128, 64, 64, 64, 128, 64, 64, 32]])
>>> sinks = np.array([[6], [8]])
>>> print(river_network(fdir=fd, do_fa=True, do_co=False, sinks=sinks).fa)
[[1.0 1.0 2.0 3.0 1.0 1.0 1.0 1.0 1.0]
[1.0 4.0 1.0 32.0 37.0 3.0 2.0 2.0 1.0]
[1.0 1.0 30.0 1.0 4.0 46.0 3.0 4.0 1.0]
[1.0 5.0 19.0 2.0 1.0 1.0 50.0 5.0 2.0]
[2.0 1.0 18.0 1.0 1.0 2.0 52.0 8.0 1.0]
[1.0 6.0 2.0 9.0 1.0 2.0 57.0 9.0 2.0]
[1.0 4.0 1.0 8.0 1.0 2.0 3.0 68.0 81.0]
[2.0 1.0 3.0 2.0 2.0 1.0 4.0 3.0 1.0]
[1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0]]
>>> print(river_network(fdir=fd, do_fa=False, do_co=True, sinks=sinks).co)
[[1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0]
[1.0 2.0 1.0 3.0 3.0 2.0 1.0 1.0 1.0]
[1.0 1.0 3.0 1.0 2.0 3.0 1.0 2.0 1.0]
[1.0 2.0 3.0 1.0 1.0 1.0 3.0 2.0 1.0]
[1.0 1.0 3.0 1.0 1.0 1.0 3.0 2.0 1.0]
[1.0 2.0 1.0 2.0 1.0 1.0 3.0 2.0 1.0]
[1.0 2.0 1.0 2.0 1.0 1.0 1.0 3.0 3.0]
[1.0 1.0 2.0 1.0 1.0 1.0 2.0 2.0 1.0]
[1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0]]
License
-------
This file is part of the JAMS Python package, distributed under the MIT
License. The JAMS Python package originates from the former UFZ Python library,
Department of Computational Hydrosystems, Helmholtz Centre for Environmental
Research - UFZ, Leipzig, Germany.
Copyright (c) 2016 Stephan Thober
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
History
-------
Written, ST, Dec 2015
"""
# initialize all arrays
self.dem = None # digital elevation model
self.fdir = None # flow direction
self.sinks = None # sinks
self.co = None # channel order
self.fa = None # flow accumulation
# consistency check
if fdir is None and dem is None:
raise ValueError('***ERROR: specify either dem or fdir to create a river_network object')
if fdir is None:
if print_info:
print('calculating flow direction')
# create flow direction if necessary
self.dem = dem
self.fdir = self.flow_direction()
if print_info:
print('calculating flow direction... ok')
else:
self.fdir = fdir
# assure that fdir is masked array
if not np.ma.isMaskedArray(self.fdir):
self.fdir = np.ma.array(self.fdir)
self.fdir.mask = False
# assign flow accumulation
if not fa is None:
self.fa = fa
# assign channel order
if not co is None:
self.co = co
# assign sinks
if sinks is None and fa is None:
raise ValueError('***ERROR: for initializing river network either the location of the sinks or flow accumulation has to be given')
elif sinks is None and not fa is None:
self.sinks = self._get_sinks()
elif not sinks is None and fa is None:
self.sinks = sinks
# get channel order and flow accumulation
if do_co and do_fa:
self.co = np.ma.masked_array(np.zeros(self.fdir.shape) + missing_value,
mask=np.ones(self.fdir.shape), fill_value=missing_value)
self.fa = np.ma.masked_array(np.zeros(self.fdir.shape) + missing_value,
mask=np.ones(self.fdir.shape), fill_value=missing_value)
for ii in range(self.sinks[0].shape[0]):
self.co, self.fa = self.network_properties(self.fdir, self.sinks[0][ii], self.sinks[1][ii],
do_co=do_co, co=self.co,
do_fa=do_fa, fa=self.fa,
missing_value=missing_value,
print_info=print_info)
elif do_co and not do_fa:
self.co = np.ma.masked_array(np.zeros(self.fdir.shape) + missing_value,
mask=np.ones(self.fdir.shape), fill_value=missing_value)
for ii in range(self.sinks[0].shape[0]):
self.co = self.network_properties(self.fdir, self.sinks[0][ii], self.sinks[1][ii],
do_co=do_co, co=self.co,
do_fa=do_fa,
missing_value=missing_value,
print_info=print_info)
elif not do_co and do_fa:
self.fa = np.ma.masked_array(np.zeros(self.fdir.shape) + missing_value,
mask=np.ones(self.fdir.shape), fill_value=missing_value)
for ii in range(self.sinks[0].shape[0]):
self.fa = self.network_properties(self.fdir, self.sinks[0][ii], self.sinks[1][ii],
do_co=do_co,
do_fa=do_fa, fa=self.fa,
missing_value=missing_value,
print_info=print_info)
def flow_direction(self, print_info=False):
"""
Calculates flow direction from a DEM.
Definition
----------
def flow_direction(self, print_info=False)
Input
-----
self self - river_network object containing a dem array
Optional Input Parameters
-------------------------
print_info - flag for printing additional information
Options
-------
Output
------
fd array containing flow direction with the following convention
Restrictions
------------
The origin of the flow direction field is assumed to be located in the upper left corner.
Then flow directions are following this convention:
flow direction is assumed like this meaning that the ORIGIN IS THE UPPER LEFT CORNER
64 y-axis
32 128 |
16 -1 1 |
8 2 \|/
4 V
x-axis ------>
Sinks are marked by -1.
Examples
--------
>>> # Create some data
>>> dem = np.ma.array([[ 30, 2, 1],
... [ 5, 10, 25],
... [ 15, 23, 24]])
>>> dem.mask = np.zeros(dem.shape, dtype='bool')
>>> print(river_network(dem=dem, do_fa=False, do_co=False, sinks=np.array([[0], [2]])).fdir)
[[1.0 1.0 -1.0]
[128.0 128.0 64.0]
[64.0 32.0 32.0]]
License
-------
This file is part of the JAMS Python package, distributed under the MIT
License. The JAMS Python package originates from the former UFZ Python library,
Department of Computational Hydrosystems, Helmholtz Centre for Environmental
Research - UFZ, Leipzig, Germany.
Copyright (c) 2015 Stephan Thober, David Schaefer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
History
-------
Written, ST & DS, Dec 2015
"""
# global variable used: correct_direction
fd = np.zeros(self.dem.shape)
#
for ii in range(self.dem.shape[0]):
for jj in range(self.dem.shape[1]):
if self.dem.mask[ii, jj]:
continue
if print_info:
print('processing cell: ', ii, jj)
# get mask of neighbors and y and x locations
neighbors, yy, xx = self._get_neighbors(self.dem, ii, jj)
# get position of cell with steepest gradient
pos_min = np.ma.argmin(self.dem[yy, xx] - self.dem[ii, jj])
fd[ii, jj] = local_flow_direction[neighbors][pos_min]
return fd
def network_properties(self, fd, yy, xx, print_info=False, do_co=True, co=None, do_fa=True, fa=None,
missing_value=-9999.):
"""
Calculates channel order number and flow accumulation starting from one sink in a flow direction map
channel order is calculated following Strahler 1952. It is ONE for headwater. If channels join, the
channel order of the resulting stream is the highest one of the inflowing streams, if two or more than
two inflowing streams have the highest channel order, the channel order of the resulting stream is one
higher than the highest channel order of the inflowing streams.
Definition
----------
def network_properties(self, fd, yy, xx, print_info=False, do_co=True, co=None, do_fa=True, fa=None, missing_value=-9999.):
Input
-----
self self - river_network object
fd flow direction field, basically river_network.fd
yy row coordinate of sink
xx column coordinate of sink
Optional Input Parameters
-------------------------
print_info write additional info on std_out
do_co calculate channel order
co given channel order field
do_fa calculate flow accumulation
fa given flow accumulation
missing_value floating point value for masking
Options
-------
print_info True: write additional information
False: do not write additional information (default)
do_co True: calculate channel order (default)
False: do not channel order
co None: no channel order field specified, will be created (default)
do_fa True: calculate flow accumulation (default)
False: do not flow accumulation
fa None: no flow accumulation field specified, will be created (default)
Output
------
Depending on options:
co, fa if do_co=True and do_fa=True
co if do_co=True and not do_fa=True
fa if not do_co=True and do_fa=True
Restrictions
------------
The origin of the flow direction field is assumed to be located in the upper left corner.
Then flow directions are following this convention:
flow direction is assumed like this meaning that the ORIGIN IS THE UPPER LEFT CORNER
64 y-axis
32 128 |
16 -1 1 |
8 2 \|/
4 V
x-axis ------>
Sinks are marked by -1.
Examples
--------
License
-------
This file is part of the JAMS Python package, distributed under the MIT
License. The JAMS Python package originates from the former UFZ Python library,
Department of Computational Hydrosystems, Helmholtz Centre for Environmental
Research - UFZ, Leipzig, Germany.
Copyright (c) 2015-2018 Stephan Thober, David Schaefer, Matthias Cuntz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
History
-------
Written, ST & DS, Dec 2015
Modified, MC, Nov 2016 - ported to Python 3
MC, Oct 2018 - int indices in fa[]
"""
if co is None and do_co:
co = np.ma.masked_array(np.zeros(fd.shape) + missing_value,
mask=np.ones(fd.shape), fill_value=missing_value)
if fa is None and do_fa:
fa = np.ma.masked_array(np.zeros(fd.shape) + missing_value,
mask=np.ones(fd.shape), fill_value=missing_value)
# flow direction stack to emulate recursion
if not do_co and not do_fa:
raise ValueERROR('***ERROR: neither fa nor co calculated')
fd_stack = [[yy, xx]] # start at initial sink
while fd_stack:
if print_info:
print('current flow accumulation stack: ', fd_stack)
upstream = self._get_upstream(fd, fd_stack[-1])
if print_info:
print('upstream locations: ', upstream)
if do_co:
# use co for identifying upstream cells
ext = [l for l in upstream if co.data[int(l[0]),int(l[1])] == missing_value]
else:
# use fa for identifying upstream cells
ext = [l for l in upstream if fa.data[int(l[0]),int(l[1])] == missing_value]
if ext:
fd_stack.extend(ext)
continue
# all upstream cells are available
# note that headwaters dont have an upstream cell
cell = fd_stack.pop() # save cell
if do_co:
co_upstream = [co[loc[0], loc[1]] for loc in upstream]
co_max = np.amax(co_upstream + [1])
# if two streams of equal co merge, increment channel order
if len(np.where(co_upstream == co_max)[0]) > 1:
co_max += 1
co.data[int(cell[0]), int(cell[1])] = co_max
co.mask[int(cell[0]), int(cell[1])] = False
if print_info:
print('co (channel order) of upstream: ', co_upstream)
if do_fa:
fa_upstream = [ fa[int(loc[0]), int(loc[1])] for loc in upstream ]
fa.data[int(cell[0]), int(cell[1])] = np.sum(fa_upstream) + 1
fa.mask[int(cell[0]), int(cell[1])] = False
if print_info:
print('sum of upstream: ', np.sum(fa_upstream))
if do_co and do_fa:
return co, fa
elif do_co and not do_fa:
return co
elif not do_co and do_fa:
return fa
def _get_sinks(self):
# set sinks to maximum flow accumulation
return np.ma.where(self.fa == np.amax(self.fa))
def _get_neighbors(self, arr, yy_loc, xx_loc):
# global variables used: yy_offset, xx_offset
#
yy_ind = yy_offset + yy_loc
xx_ind = xx_offset + xx_loc
# create mask for valid neighbors
neighbors = ((yy_ind >= 0) &
(yy_ind < arr.shape[0]) &
(xx_ind >= 0) &
(xx_ind < arr.shape[1]))
return neighbors, yy_ind[neighbors], xx_ind[neighbors]
def _get_upstream(self, fd, loc):
# global variable used: inflow_direction
#
# get mask of neighbors and y and x locations
neighbors, yy, xx = self._get_neighbors(fd, loc[0], loc[1])
# mask inflowing cells
upstream_mask = (fd.data[yy.astype(np.int), xx.astype(np.int)] == inflow_direction[neighbors])
yy_upstream = yy[upstream_mask]
xx_upstream = xx[upstream_mask]
return [ [yy_upstream[ii], xx_upstream[ii]] for ii in range(np.sum(upstream_mask)) ]
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
| 45.326389 | 153 | 0.531117 |
468113bfb30ee029a902e0ae0fbe91a4bf9a2ff9 | 912 | py | Python | base/urls.py | omololevy/study-college | 5ce482b4f09314fd370509654337e95ec39c4612 | [
"MIT"
] | 1 | 2022-03-21T08:23:19.000Z | 2022-03-21T08:23:19.000Z | base/urls.py | omololevy/study-college | 5ce482b4f09314fd370509654337e95ec39c4612 | [
"MIT"
] | 1 | 2022-03-21T08:21:27.000Z | 2022-03-21T08:21:27.000Z | base/urls.py | omololevy/study-college | 5ce482b4f09314fd370509654337e95ec39c4612 | [
"MIT"
] | null | null | null | from django.urls import path, include
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('register/', views.signup, name='signup'),
path('account/', include('django.contrib.auth.urls')),
path('all-cohorts/', views.cohorts, name='cohort'),
path('new-cohort/', views.create_cohort, name='new-cohort'),
path('profile/<username>', views.profile, name='profile'),
path('profile/<username>/edit/', views.edit_profile, name='edit-profile'),
path('join_cohort/<id>', views.join_cohort, name='join-cohort'),
path('leave_cohort/<id>', views.leave_cohort, name='leave-cohort'),
path('single_cohort/<cohort_id>', views.single_cohort, name='single-cohort'),
path('<cohort_id>/new-topic', views.create_topic, name='topic'),
path('<cohort_id>/members', views.cohort_members, name='members'),
path('search/', views.search_module, name='search'),
]
| 48 | 81 | 0.680921 |
ed7911a1df67f1346d5d024e0dd912013369c8e8 | 68 | py | Python | dictsum.py | sushmitajaiswal/PythonPrograms | d4fb1b36953185e2f8dd866798ca6965a52563a9 | [
"MIT"
] | null | null | null | dictsum.py | sushmitajaiswal/PythonPrograms | d4fb1b36953185e2f8dd866798ca6965a52563a9 | [
"MIT"
] | null | null | null | dictsum.py | sushmitajaiswal/PythonPrograms | d4fb1b36953185e2f8dd866798ca6965a52563a9 | [
"MIT"
] | null | null | null | d=eval(input("enter dictionary:"))
s=sum(d.values())
print("sum=",s) | 22.666667 | 34 | 0.661765 |
9c742845a98a7fec994403fb8fb6ea563ce9b667 | 5,017 | py | Python | JVTC/utils/resnet.py | FlyingRoastDuck/UnrealPerson | 9fd5a882bea16f6289f0353b78140bff36c16609 | [
"Apache-2.0"
] | 62 | 2020-12-09T02:05:32.000Z | 2022-03-30T03:04:25.000Z | JVTC/utils/resnet.py | FlyingRoastDuck/UnrealPerson | 9fd5a882bea16f6289f0353b78140bff36c16609 | [
"Apache-2.0"
] | 8 | 2021-04-16T10:18:44.000Z | 2022-03-24T04:51:29.000Z | JVTC/utils/resnet.py | FlyingRoastDuck/UnrealPerson | 9fd5a882bea16f6289f0353b78140bff36c16609 | [
"Apache-2.0"
] | 7 | 2020-12-09T03:38:25.000Z | 2022-02-05T06:21:33.000Z | import torch.nn as nn
import math, torch
import torch.utils.model_zoo as model_zoo
from torch.nn import init
from torch.nn import functional as F
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes, momentum=None)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes, momentum=None)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4, momentum=None)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers, num_classes=1000, train=True):
self.inplanes = 64
super(ResNet, self).__init__()
self.istrain = train
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = nn.BatchNorm2d(64, momentum=None)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=1)
#self.avgpool = nn.AvgPool2d((16,8), stride=1)
self.num_features = 512
self.feat = nn.Linear(512 * block.expansion, self.num_features)
init.kaiming_normal_(self.feat.weight, mode='fan_out')
init.constant_(self.feat.bias, 0)
self.feat_bn = nn.BatchNorm1d(self.num_features, momentum=None)
init.constant_(self.feat_bn.weight, 1)
init.constant_(self.feat_bn.bias, 0)
self.classifier = nn.Linear(self.num_features, num_classes)
init.normal_(self.classifier.weight, std=0.001)
init.constant_(self.classifier.bias, 0)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion, momentum=None),)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = F.avg_pool2d(x, x.size()[2:])
x = x.view(x.size(0), -1)
x = self.feat(x)
fea = self.feat_bn(x)
fea_norm = F.normalize(fea)
x = F.relu(fea)
x = self.classifier(x)
return x, fea_norm, fea
def resnet50(pretrained=None, num_classes=1000, train=True):
model = ResNet(Bottleneck, [3, 4, 6, 3], num_classes, train)
weight = torch.load(pretrained)
static = model.state_dict()
base_param = []
for name, param in weight.items():
if name not in static:
continue
if 'classifier' in name:
continue
if isinstance(param, nn.Parameter):
param = param.data
static[name].copy_(param)
base_param.append(name)
params = []
params_dict = dict(model.named_parameters())
for key, v in params_dict.items():
if key in base_param:
params += [{ 'params':v, 'lr_mult':1}]
else:
#new parameter have larger learning rate
params += [{ 'params':v, 'lr_mult':10}]
return model, params
| 33.446667 | 85 | 0.587801 |
3f8d00f8f50a25f52eaf189e1f12727b4c34ffe3 | 13,015 | py | Python | tests/components/apns/test_notify.py | alindeman/home-assistant | b274b10f3874c196f0db8f9cfa5f47eb756d1f8e | [
"Apache-2.0"
] | 4 | 2019-07-03T22:36:57.000Z | 2019-08-10T15:33:25.000Z | tests/components/apns/test_notify.py | alindeman/home-assistant | b274b10f3874c196f0db8f9cfa5f47eb756d1f8e | [
"Apache-2.0"
] | 7 | 2019-08-23T05:26:02.000Z | 2022-03-11T23:57:18.000Z | tests/components/apns/test_notify.py | alindeman/home-assistant | b274b10f3874c196f0db8f9cfa5f47eb756d1f8e | [
"Apache-2.0"
] | 2 | 2018-08-15T03:59:35.000Z | 2018-10-18T12:20:05.000Z | """The tests for the APNS component."""
import io
import unittest
from unittest.mock import Mock, patch, mock_open
from apns2.errors import Unregistered
import yaml
import homeassistant.components.notify as notify
from homeassistant.setup import setup_component
import homeassistant.components.apns.notify as apns
from homeassistant.core import State
from tests.common import assert_setup_component, get_test_home_assistant
CONFIG = {
notify.DOMAIN: {
'platform': 'apns',
'name': 'test_app',
'topic': 'testapp.appname',
'cert_file': 'test_app.pem'
}
}
@patch('homeassistant.components.apns.notify.open', mock_open(), create=True)
class TestApns(unittest.TestCase):
"""Test the APNS component."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
@patch('os.path.isfile', Mock(return_value=True))
@patch('os.access', Mock(return_value=True))
def _setup_notify(self):
assert isinstance(apns.load_yaml_config_file, Mock), \
'Found unmocked load_yaml'
with assert_setup_component(1) as handle_config:
assert setup_component(self.hass, notify.DOMAIN, CONFIG)
assert handle_config[notify.DOMAIN]
@patch('os.path.isfile', return_value=True)
@patch('os.access', return_value=True)
def test_apns_setup_full(self, mock_access, mock_isfile):
"""Test setup with all data."""
config = {
'notify': {
'platform': 'apns',
'name': 'test_app',
'sandbox': 'True',
'topic': 'testapp.appname',
'cert_file': 'test_app.pem'
}
}
with assert_setup_component(1) as handle_config:
assert setup_component(self.hass, notify.DOMAIN, config)
assert handle_config[notify.DOMAIN]
def test_apns_setup_missing_name(self):
"""Test setup with missing name."""
config = {
'notify': {
'platform': 'apns',
'topic': 'testapp.appname',
'cert_file': 'test_app.pem',
}
}
with assert_setup_component(0) as handle_config:
assert setup_component(self.hass, notify.DOMAIN, config)
assert not handle_config[notify.DOMAIN]
def test_apns_setup_missing_certificate(self):
"""Test setup with missing certificate."""
config = {
'notify': {
'platform': 'apns',
'name': 'test_app',
'topic': 'testapp.appname',
}
}
with assert_setup_component(0) as handle_config:
assert setup_component(self.hass, notify.DOMAIN, config)
assert not handle_config[notify.DOMAIN]
def test_apns_setup_missing_topic(self):
"""Test setup with missing topic."""
config = {
'notify': {
'platform': 'apns',
'name': 'test_app',
'cert_file': 'test_app.pem',
}
}
with assert_setup_component(0) as handle_config:
assert setup_component(self.hass, notify.DOMAIN, config)
assert not handle_config[notify.DOMAIN]
@patch('homeassistant.components.apns.notify._write_device')
def test_register_new_device(self, mock_write):
"""Test registering a new device with a name."""
yaml_file = {5678: {'name': 'test device 2'}}
written_devices = []
def fake_write(_out, device):
"""Fake write_device."""
written_devices.append(device)
mock_write.side_effect = fake_write
with patch(
'homeassistant.components.apns.notify.load_yaml_config_file',
Mock(return_value=yaml_file)):
self._setup_notify()
assert self.hass.services.call(notify.DOMAIN, 'apns_test_app', {
'push_id': '1234',
'name': 'test device'
}, blocking=True)
assert len(written_devices) == 1
assert written_devices[0].name == 'test device'
@patch('homeassistant.components.apns.notify._write_device')
def test_register_device_without_name(self, mock_write):
"""Test registering a without a name."""
yaml_file = {
1234: {
'name': 'test device 1',
'tracking_device_id': 'tracking123',
},
5678: {
'name': 'test device 2',
'tracking_device_id': 'tracking456',
},
}
written_devices = []
def fake_write(_out, device):
"""Fake write_device."""
written_devices.append(device)
mock_write.side_effect = fake_write
with patch(
'homeassistant.components.apns.notify.load_yaml_config_file',
Mock(return_value=yaml_file)):
self._setup_notify()
assert self.hass.services.call(notify.DOMAIN, 'apns_test_app', {
'push_id': '1234'
}, blocking=True)
devices = {dev.push_id: dev for dev in written_devices}
test_device = devices.get('1234')
assert test_device is not None
assert test_device.name is None
@patch('homeassistant.components.apns.notify._write_device')
def test_update_existing_device(self, mock_write):
"""Test updating an existing device."""
yaml_file = {
1234: {
'name': 'test device 1',
},
5678: {
'name': 'test device 2',
},
}
written_devices = []
def fake_write(_out, device):
"""Fake write_device."""
written_devices.append(device)
mock_write.side_effect = fake_write
with patch(
'homeassistant.components.apns.notify.load_yaml_config_file',
Mock(return_value=yaml_file)):
self._setup_notify()
assert self.hass.services.call(notify.DOMAIN, 'apns_test_app', {
'push_id': '1234',
'name': 'updated device 1'
}, blocking=True)
devices = {dev.push_id: dev for dev in written_devices}
test_device_1 = devices.get('1234')
test_device_2 = devices.get('5678')
assert test_device_1 is not None
assert test_device_2 is not None
assert 'updated device 1' == test_device_1.name
@patch('homeassistant.components.apns.notify._write_device')
def test_update_existing_device_with_tracking_id(self, mock_write):
"""Test updating an existing device that has a tracking id."""
yaml_file = {
1234: {
'name': 'test device 1',
'tracking_device_id': 'tracking123',
},
5678: {
'name': 'test device 2',
'tracking_device_id': 'tracking456',
},
}
written_devices = []
def fake_write(_out, device):
"""Fake write_device."""
written_devices.append(device)
mock_write.side_effect = fake_write
with patch(
'homeassistant.components.apns.notify.load_yaml_config_file',
Mock(return_value=yaml_file)):
self._setup_notify()
assert self.hass.services.call(notify.DOMAIN, 'apns_test_app', {
'push_id': '1234',
'name': 'updated device 1'
}, blocking=True)
devices = {dev.push_id: dev for dev in written_devices}
test_device_1 = devices.get('1234')
test_device_2 = devices.get('5678')
assert test_device_1 is not None
assert test_device_2 is not None
assert 'tracking123' == \
test_device_1.tracking_device_id
assert 'tracking456' == \
test_device_2.tracking_device_id
@patch('apns2.client.APNsClient')
def test_send(self, mock_client):
"""Test updating an existing device."""
send = mock_client.return_value.send_notification
yaml_file = {1234: {'name': 'test device 1'}}
with patch(
'homeassistant.components.apns.notify.load_yaml_config_file',
Mock(return_value=yaml_file)):
self._setup_notify()
assert self.hass.services.call(
'notify', 'test_app',
{'message': 'Hello', 'data': {
'badge': 1,
'sound': 'test.mp3',
'category': 'testing'}},
blocking=True)
assert send.called
assert 1 == len(send.mock_calls)
target = send.mock_calls[0][1][0]
payload = send.mock_calls[0][1][1]
assert '1234' == target
assert 'Hello' == payload.alert
assert 1 == payload.badge
assert 'test.mp3' == payload.sound
assert 'testing' == payload.category
@patch('apns2.client.APNsClient')
def test_send_when_disabled(self, mock_client):
"""Test updating an existing device."""
send = mock_client.return_value.send_notification
yaml_file = {1234: {
'name': 'test device 1',
'disabled': True,
}}
with patch(
'homeassistant.components.apns.notify.load_yaml_config_file',
Mock(return_value=yaml_file)):
self._setup_notify()
assert self.hass.services.call(
'notify', 'test_app',
{'message': 'Hello', 'data': {
'badge': 1,
'sound': 'test.mp3',
'category': 'testing'}},
blocking=True)
assert not send.called
@patch('apns2.client.APNsClient')
def test_send_with_state(self, mock_client):
"""Test updating an existing device."""
send = mock_client.return_value.send_notification
yaml_file = {
1234: {
'name': 'test device 1',
'tracking_device_id': 'tracking123',
},
5678: {
'name': 'test device 2',
'tracking_device_id': 'tracking456',
},
}
with patch(
'homeassistant.components.apns.notify.load_yaml_config_file',
Mock(return_value=yaml_file)), \
patch('os.path.isfile', Mock(return_value=True)):
notify_service = apns.ApnsNotificationService(
self.hass,
'test_app',
'testapp.appname',
False,
'test_app.pem'
)
notify_service.device_state_changed_listener(
'device_tracker.tracking456',
State('device_tracker.tracking456', None),
State('device_tracker.tracking456', 'home'))
notify_service.send_message(message='Hello', target='home')
assert send.called
assert 1 == len(send.mock_calls)
target = send.mock_calls[0][1][0]
payload = send.mock_calls[0][1][1]
assert '5678' == target
assert 'Hello' == payload.alert
@patch('apns2.client.APNsClient')
@patch('homeassistant.components.apns.notify._write_device')
def test_disable_when_unregistered(self, mock_write, mock_client):
"""Test disabling a device when it is unregistered."""
send = mock_client.return_value.send_notification
send.side_effect = Unregistered()
yaml_file = {
1234: {
'name': 'test device 1',
'tracking_device_id': 'tracking123',
},
5678: {
'name': 'test device 2',
'tracking_device_id': 'tracking456',
},
}
written_devices = []
def fake_write(_out, device):
"""Fake write_device."""
written_devices.append(device)
mock_write.side_effect = fake_write
with patch(
'homeassistant.components.apns.notify.load_yaml_config_file',
Mock(return_value=yaml_file)):
self._setup_notify()
assert self.hass.services.call('notify', 'test_app',
{'message': 'Hello'},
blocking=True)
devices = {dev.push_id: dev for dev in written_devices}
test_device_1 = devices.get('1234')
assert test_device_1 is not None
assert test_device_1.disabled is True
def test_write_device():
"""Test writing device."""
out = io.StringIO()
device = apns.ApnsDevice('123', 'name', 'track_id', True)
apns._write_device(out, device)
data = yaml.safe_load(out.getvalue())
assert data == {
123: {
'name': 'name',
'tracking_device_id': 'track_id',
'disabled': True
},
}
| 31.666667 | 77 | 0.57088 |
b13f78c6ba7663fe9fe53ca6a706d6aeb6940a8a | 178 | py | Python | EduRec/meta/ReplayBuffer/__init__.py | tswsxk/EduRec | b7514acc4dea805375fe512534d8ab6ad8916374 | [
"MIT"
] | 2 | 2021-03-03T11:04:53.000Z | 2021-09-03T09:12:32.000Z | EduRec/meta/ReplayBuffer/__init__.py | bigdata-ustc/EduRec | 133f3fd0ce74b7c8f36f0e5a2cf71bbf759ee4c8 | [
"MIT"
] | null | null | null | EduRec/meta/ReplayBuffer/__init__.py | bigdata-ustc/EduRec | 133f3fd0ce74b7c8f36f0e5a2cf71bbf759ee4c8 | [
"MIT"
] | 1 | 2022-03-12T00:27:52.000Z | 2022-03-12T00:27:52.000Z | # coding: utf-8
# 2021/2/13 @ tongshiwei
from .ReplayBuffer import ReplayBuffer, FiniteReplayBuffer, InfinityReplayBuffer
from .CircularReplayBuffer import CircularReplayBuffer | 29.666667 | 80 | 0.837079 |
e65a443276fd4aeffc90475a8a4a2945d4e08a10 | 1,042 | py | Python | parks/tests/views/test_profile.py | bskari/park-stamper | cb347b9fdb7b71bd2cc100547bd171336f7ac272 | [
"MIT"
] | null | null | null | parks/tests/views/test_profile.py | bskari/park-stamper | cb347b9fdb7b71bd2cc100547bd171336f7ac272 | [
"MIT"
] | null | null | null | parks/tests/views/test_profile.py | bskari/park-stamper | cb347b9fdb7b71bd2cc100547bd171336f7ac272 | [
"MIT"
] | null | null | null | from mock import patch
from pyramid import testing
from parks.models import User
from parks.tests.test_base import IntegrationTestBase
from parks.views.profile import profile_user
class ProfileUnitTest(IntegrationTestBase):
@patch('parks.logic.user.get_user_by_username_or_email')
@patch('parks.logic.stamp_collection.get_recent_collections_by_user_id')
def test_profile_user_view(self, mock_get_user_by_username_or_email, _):
mock_get_user_by_username_or_email.return_value = User(
username='guest',
password='123',
signup_ip=1,
)
request = testing.DummyRequest()
request.matchdict['username'] = 'guest'
page = profile_user(request)
expected_keys = set((
'stamp_collections',
'days',
'personal_profile',
'username',
))
self.assertEqual(expected_keys, set(page.keys()))
def test_profile_personal_view(self):
# TODO (bskari|2013-07-04) Implement this
pass
| 29.771429 | 76 | 0.671785 |
283eae9acbde0c66c55aedb0c3708736fd6ca494 | 2,202 | py | Python | oncopolicy/models/deterministic_screening.py | yala/Tempo | bf3e0e78d64869bb2079c582a4a35982f78386ad | [
"MIT"
] | 6 | 2022-01-15T11:57:19.000Z | 2022-02-13T21:15:22.000Z | oncopolicy/models/deterministic_screening.py | yala/Tempo | bf3e0e78d64869bb2079c582a4a35982f78386ad | [
"MIT"
] | null | null | null | oncopolicy/models/deterministic_screening.py | yala/Tempo | bf3e0e78d64869bb2079c582a4a35982f78386ad | [
"MIT"
] | 2 | 2022-02-02T13:09:29.000Z | 2022-02-18T07:06:19.000Z | import torch
import torch.nn as nn
from oncopolicy.models.factory import RegisterModel
from oncopolicy.datasets.personalized_screening import PAD_Y_VALUE
import pdb
ANNUAL_GUIDELINE_IN_6MO_INTERVALS = 2
BIANNUAL_GUIDELINE_IN_6MO_INTERVALS = 4
SWITCHING_AGE = 55
class AbstractDeterministicGuideline(nn.Module):
def __init__(self, args):
super(AbstractDeterministicGuideline, self).__init__()
self.args = args
def save_transition(self, cur_x, next_x, action, reward_vec, preference, cur_is_censored, next_is_censored):
pass
def reset(self):
pass
def learn(self):
return torch.zeros(1).to(self.args.device)
@RegisterModel("max_frequency_guideline")
class MaxFreqGuideline(AbstractDeterministicGuideline):
def __init__(self, args):
super(MaxFreqGuideline, self).__init__(args)
def forward(self, x, preference, batch):
y = batch['y']
return torch.ones_like(y).to(y.device)
@RegisterModel("annual_guideline")
class AnnualGuideline(AbstractDeterministicGuideline):
def __init__(self, args):
super(AnnualGuideline, self).__init__(args)
def forward(self, x, preference, batch):
y = batch['y']
return torch.ones_like(y).to(y.device) * ANNUAL_GUIDELINE_IN_6MO_INTERVALS
@RegisterModel("biannual_guideline")
class BiAnnualGuideline(AbstractDeterministicGuideline):
def __init__(self, args):
super(BiAnnualGuideline, self).__init__(args)
def forward(self, x, preference, batch):
y = batch['y']
return torch.ones_like(y).to(y.device) * BIANNUAL_GUIDELINE_IN_6MO_INTERVALS
@RegisterModel("age_based_guideline")
class AgeBasedGuideline(AbstractDeterministicGuideline):
def __init__(self, args):
super(AgeBasedGuideline, self).__init__(args)
def forward(self, x, preference, batch):
y = batch['y']
older = (batch['age'] >= 55).float().unsqueeze(1)
annual = torch.ones_like(y).to(y.device) * ANNUAL_GUIDELINE_IN_6MO_INTERVALS
biannual = torch.ones_like(y).to(y.device) * BIANNUAL_GUIDELINE_IN_6MO_INTERVALS
recommendation = (older * biannual) + (1-older)*annual
return recommendation
| 31.457143 | 112 | 0.721163 |
2400f21b8210508c89f18e8c5c569cea2ab5d504 | 2,889 | py | Python | tests/test_warnings.py | jonathanslenders/cryptography | 2535e557872ac2f94355eb8a91a598f5cfc8afcf | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2021-09-14T03:21:30.000Z | 2021-09-14T03:21:30.000Z | tests/test_warnings.py | jonathanslenders/cryptography | 2535e557872ac2f94355eb8a91a598f5cfc8afcf | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 9 | 2022-02-21T07:34:59.000Z | 2022-03-31T07:36:36.000Z | tests/test_warnings.py | jonathanslenders/cryptography | 2535e557872ac2f94355eb8a91a598f5cfc8afcf | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 2 | 2022-02-04T07:50:54.000Z | 2022-02-04T10:21:43.000Z | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import sys
import types
import typing
import warnings
import pytest
from cryptography.utils import deprecated
class TestDeprecated:
@typing.no_type_check
def test_deprecated(self, monkeypatch):
mod = types.ModuleType("TestDeprecated/test_deprecated")
monkeypatch.setitem(sys.modules, mod.__name__, mod)
mod.X = deprecated(
value=1,
module_name=mod.__name__,
message="deprecated message text",
warning_class=DeprecationWarning,
)
mod.Y = deprecated(
value=2,
module_name=mod.__name__,
message="more deprecated text",
warning_class=PendingDeprecationWarning,
)
mod = sys.modules[mod.__name__]
mod.Z = 3
with warnings.catch_warnings(record=True) as log:
warnings.simplefilter("always", PendingDeprecationWarning)
warnings.simplefilter("always", DeprecationWarning)
assert mod.X == 1
assert mod.Y == 2
assert mod.Z == 3
[msg1, msg2] = log
assert msg1.category is DeprecationWarning
assert msg1.message.args == ("deprecated message text",)
assert msg2.category is PendingDeprecationWarning
assert msg2.message.args == ("more deprecated text",)
assert "Y" in dir(mod)
@typing.no_type_check
def test_deleting_deprecated_members(self, monkeypatch):
mod = types.ModuleType("TestDeprecated/test_deprecated")
monkeypatch.setitem(sys.modules, mod.__name__, mod)
mod.X = deprecated(
value=1,
module_name=mod.__name__,
message="deprecated message text",
warning_class=DeprecationWarning,
)
mod.Y = deprecated(
value=2,
module_name=mod.__name__,
message="more deprecated text",
warning_class=PendingDeprecationWarning,
)
mod = sys.modules[mod.__name__]
mod.Z = 3
with warnings.catch_warnings(record=True) as log:
warnings.simplefilter("always", PendingDeprecationWarning)
warnings.simplefilter("always", DeprecationWarning)
del mod.X
del mod.Y
del mod.Z
[msg1, msg2] = log
assert msg1.category is DeprecationWarning
assert msg1.message.args == ("deprecated message text",)
assert msg2.category is PendingDeprecationWarning
assert msg2.message.args == ("more deprecated text",)
assert "X" not in dir(mod)
assert "Y" not in dir(mod)
assert "Z" not in dir(mod)
with pytest.raises(AttributeError):
del mod.X
| 31.747253 | 79 | 0.624091 |
7086956d353f6e15ac7c4b80dfce6fea27e7464a | 8,982 | py | Python | src/authorization/web_server.py | rdsea/ZETA | da4410b443ba918870773c43d117267bf4329471 | [
"Apache-2.0"
] | 1 | 2021-11-09T05:32:54.000Z | 2021-11-09T05:32:54.000Z | src/authorization/web_server.py | rdsea/ZETA | da4410b443ba918870773c43d117267bf4329471 | [
"Apache-2.0"
] | 2 | 2021-08-31T21:20:15.000Z | 2021-09-02T16:30:31.000Z | src/authorization/web_server.py | rdsea/ZETA | da4410b443ba918870773c43d117267bf4329471 | [
"Apache-2.0"
] | null | null | null | from flask import Flask, request
from flask_restful import Resource, Api
import psycopg2
import jwt
import os
import requests
from datetime import datetime, timedelta
from enum import Enum
from helpers.custom_logger import CustomLogger
app = Flask(__name__)
api = Api(app)
logger = CustomLogger().get_logger()
PRIVATE_KEY = None
PUBLIC_KEY = None
TRUST_SERVICE_NAME = None
POLICY_EVAL_SERVICE_NAME = None
SERVICE_KNOWLEDGE_NAME = None
SERVICE_KNOWLEDGE_USERNAME = None
SERVICE_KNOWLEDGE_PASSWORD = None
class ServiceDatabaseHandler:
def __init__(self):
#Connect to db
self.conn = psycopg2.connect(f"postgres://{SERVICE_KNOWLEDGE_USERNAME}:{SERVICE_KNOWLEDGE_PASSWORD}@{SERVICE_KNOWLEDGE_NAME}")
self.cur = self.conn.cursor()
self.cur.execute('SELECT version()')
db_version = self.cur.fetchone()
logger.info(f"Connected to {db_version}")
self.query = """
INSERT INTO elasticity_token_info (service_name, target_service_name, capabilities, token)
VALUES (%s, %s, %s, %s)
"""
def __del__(self):
try:
# Clean up the cursor before destroying the object.
self.cur.close()
except (Exception, psycopg2.DatabaseError) as error:
logger.error(error)
def save_to_db(self, service_name, capabilities, target_service_name, token):
self.cur.execute(self.query, (service_name, target_service_name, capabilities, token))
self.conn.commit()
class TrustLevel(Enum):
NONE = 4
LOW = 3
MEDIUM = 2
HIGH=1
# Checks and initializes env variables required for the script
def init_environment_variables():
read_private_key()
read_public_key()
global TRUST_SERVICE_NAME
global POLICY_EVAL_SERVICE_NAME
TRUST_SERVICE_NAME = os.environ.get('TRUST_SERVICE_NAME')
if not TRUST_SERVICE_NAME:
raise Exception('TRUST_SERVICE_NAME not defined')
POLICY_EVAL_SERVICE_NAME = os.environ.get('POLICY_EVAL_SERVICE_NAME')
if not POLICY_EVAL_SERVICE_NAME:
raise Exception('POLICY_EVAL_SERVICE_NAME not defined')
SERVICE_KNOWLEDGE_USERNAME = os.environ.get('SERVICE_KNOWLEDGE_USERNAME')
if not SERVICE_KNOWLEDGE_USERNAME:
raise Exception('SERVICE_KNOWLEDGE_USERNAME not defined')
SERVICE_KNOWLEDGE_NAME = os.environ.get('SERVICE_KNOWLEDGE_NAME')
if not SERVICE_KNOWLEDGE_NAME:
raise Exception('SERVICE_KNOWLEDGE_NAME not defined')
SERVICE_KNOWLEDGE_PASSWORD = os.environ.get('SERVICE_KNOWLEDGE_PASSWORD')
if not SERVICE_KNOWLEDGE_PASSWORD:
raise Exception('SERVICE_KNOWLEDGE_PASSWORD not defined')
def read_public_key():
global PUBLIC_KEY
with open("keys/ec_public.pem", "r") as reader:
PUBLIC_KEY = str(reader.read())
if PUBLIC_KEY is None:
raise Exception("Encountered problem while reading public key")
def read_private_key():
global PRIVATE_KEY
with open("keys/private.pem", "r") as reader:
PRIVATE_KEY = str(reader.read())
if PRIVATE_KEY is None:
raise Exception("Encountered problem while reading private key")
class GetAuthToken(Resource):
def getResponse(self, data, message=None):
if message == None:
return {
'status': 'success',
'data' : {
'token': data
},
'message': message
}
else:
return {
'status': 'failure',
'data' : None,
'message' : message
}
@staticmethod
def generateJWTTokenClaims(service_name, expiry, claims):
# Add capabilities to reflect what this JWT token can do
# So service name and capabilities
return {
"exp": datetime.utcnow() + timedelta(hours=int(expiry)),
"nbf": datetime.utcnow(),
"aud": [f"tefa:{service_name}"],
"type": "auth_token",
"cf": claims
}
def generateToken(self, service_name, expiry, claims):
token_claims = GetAuthToken.generateJWTTokenClaims(service_name, expiry, claims)
token = jwt.encode(token_claims, PRIVATE_KEY, algorithm="ES256")
return token
def get(self):
service_name = request.args.get('name')
expiry = request.args.get('expiry')
claims = request.args.get('claims')
logger.info(f"recieved a auth token request for service {service_name}")
if service_name is None or expiry is None or claims is None:
return self.getResponse(data=None, message="Missing information")
token = self.generateToken(service_name, expiry, claims)
logger.info(f"Generated an auth token for service {service_name}")
return self.getResponse(data=token), 200
class GetElasticityToken(Resource):
def getResponse(self, data=None, message=None):
if message == None:
return {
'status': 'success',
'data' : {
'token': data
},
'message': message
}
else:
return {
'status': 'failure',
'data' : None,
'message' : message
}
@staticmethod
def generateJWTTokenClaims(service_name, expiry, claims, value_allowed, target_service_name):
# Add capabilities to reflect what this JWT token can do
# So service name and capabilities
return {
"exp": datetime.utcnow() + timedelta(minutes=int(expiry)),
"nbf": datetime.utcnow(),
"aud": [f"tefa:{service_name}"],
"type": "elasticity_token",
"cf": claims,
"value_allowed": value_allowed,
"target_service": target_service_name
}
def generateToken(self, service_name, expiry, claims, value_allowed, target_service_name):
token_claims = GetElasticityToken.generateJWTTokenClaims(service_name, expiry, claims, value_allowed, target_service_name)
token = jwt.encode(token_claims, PRIVATE_KEY, algorithm="ES256")
return token
def check_valid_capabilities(self, decoded, claims):
requested = claims.split(",")
authorized = decoded["cf"].split(",")
return set(requested) <= set(authorized)
def get(self):
service_name = request.args.get('name')
expiry = request.args.get('expiry')
auth_token = request.args.get('auth_token')
claims = request.args.get('claims')
value_requested = request.args.get('value')
target_service_name = request.args.get('target')
if service_name is None or auth_token is None:
return self.getResponse(data=None, message="Missing information")
try:
# The following are checked: time, audience, authenticity
decoded = jwt.decode(auth_token, PUBLIC_KEY, audience=f"tefa:{service_name}", algorithms="ES256")
except Exception as e:
print(e)
return self.getResponse(data=None, message="Error trying to verify the auth token"), 200
if not self.check_valid_capabilities(decoded, claims):
return self.getResponse(data=None, message="Error trying to verify the claims"), 200
time = datetime.utcnow().minute % 42
print(time, value_requested)
# Call trust service
try:
r = requests.get(f'http://{TRUST_SERVICE_NAME}:3000/ConfidenceScore?time={time}&required_resource={value_requested}')
response = r.json()
trust_level = TrustLevel[response['data']['trust-level'][11:].upper()]
except Exception as e:
print(e)
return self.getResponse(data=None, message="Error calculating trust level"), 200
policyData = {
"data": {
"trust": trust_level.name,
"service_name": target_service_name,
"time": 23
},
"policy_name": "client_1_policy", #todo: connect to DB
"rule":"trust_client_1"
}
# Evaluate policy
policy_resp = requests.post(f"http://{POLICY_EVAL_SERVICE_NAME}:3001/evaluatePolicy", json = policyData)
if not bool(policy_resp.text):
return self.getResponse(data=None, message="Policy Evaluation rejected"), 200
token = self.generateToken(service_name, expiry, claims, value_requested, target_service_name)
# save into db
db.save_to_db(service_name, claims, target_service_name, token)
return self.getResponse(data=token), 200
init_environment_variables()
db = ServiceDatabaseHandler()
api.add_resource(GetAuthToken, '/authtoken')
api.add_resource(GetElasticityToken, '/elasticitytoken')
if __name__ == '__main__':
app.run(debug=True, port=4000) | 35.928 | 134 | 0.634714 |
55f8c049a429c0afdd8c2987e2386391cd9f31cb | 7,028 | py | Python | spikeinterface/sortingcomponents/tests/test_motion_estimation.py | andrei8782/spikeinterface | c9f5672ee142189d149dfd776c51c664693b083a | [
"MIT"
] | null | null | null | spikeinterface/sortingcomponents/tests/test_motion_estimation.py | andrei8782/spikeinterface | c9f5672ee142189d149dfd776c51c664693b083a | [
"MIT"
] | null | null | null | spikeinterface/sortingcomponents/tests/test_motion_estimation.py | andrei8782/spikeinterface | c9f5672ee142189d149dfd776c51c664693b083a | [
"MIT"
] | null | null | null | import pytest
from pathlib import Path
import numpy as np
from spikeinterface import download_dataset
from spikeinterface.extractors import MEArecRecordingExtractor
from spikeinterface.sortingcomponents import detect_peaks
from spikeinterface.sortingcomponents import (estimate_motion, make_motion_histogram,
compute_pairwise_displacement, compute_global_displacement)
repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
remote_path = 'mearec/mearec_test_10s.h5'
if hasattr(pytest, "global_test_folder"):
cache_folder = pytest.global_test_folder / "sortingcomponents"
else:
cache_folder = Path("cache_folder") / "sortingcomponents"
def setup_module():
local_path = download_dataset(
repo=repo, remote_path=remote_path, local_folder=None)
recording = MEArecRecordingExtractor(local_path)
# detect and localize
peaks = detect_peaks(recording,
method='locally_exclusive',
peak_sign='neg', detect_threshold=5, n_shifts=2,
chunk_size=10000, verbose=1, progress_bar=True,
localization_dict=dict(method='center_of_mass', local_radius_um=150,
ms_before=0.1, ms_after=0.3),
)
np.save(cache_folder / 'mearec_detected_peaks.npy', peaks)
def test_motion_functions():
local_path = download_dataset(
repo=repo, remote_path=remote_path, local_folder=None)
recording = MEArecRecordingExtractor(local_path)
peaks = np.load(cache_folder / 'mearec_detected_peaks.npy')
bin_um = 2
motion_histogram, temporal_bins, spatial_bins = make_motion_histogram(
recording, peaks, bin_um=bin_um)
# print(motion_histogram.shape, temporal_bins.size, spatial_bins.size)
pairwise_displacement = compute_pairwise_displacement(
motion_histogram, bin_um, method='conv2d')
motion = compute_global_displacement(pairwise_displacement)
# # DEBUG
# import matplotlib.pyplot as plt
# fig, ax = plt.subplots()
# extent = (temporal_bins[0], temporal_bins[-1], spatial_bins[0], spatial_bins[-1])
# im = ax.imshow(motion_histogram.T, interpolation='nearest',
# origin='lower', aspect='auto', extent=extent)
# fig, ax = plt.subplots()
# ax.scatter(peaks['sample_ind'] / recording.get_sampling_frequency(),peaks['y'], color='r')
# fig, ax = plt.subplots()
# extent = None
# im = ax.imshow(pairwise_displacement, interpolation='nearest',
# cmap='PiYG', origin='lower', aspect='auto', extent=extent)
# im.set_clim(-40, 40)
# ax.set_aspect('equal')
# fig.colorbar(im)
# fig, ax = plt.subplots()
# ax.plot(temporal_bins[:-1], motion)
# plt.show()
def test_estimate_motion_rigid():
local_path = download_dataset(
repo=repo, remote_path=remote_path, local_folder=None)
recording = MEArecRecordingExtractor(local_path)
print(recording)
peaks = np.load(cache_folder / 'mearec_detected_peaks.npy')
motion, temporal_bins, spatial_bins, extra_check = estimate_motion(recording, peaks, peak_locations=None,
direction='y', bin_duration_s=1., bin_um=10.,
margin_um=5,
method='decentralized_registration',
method_kwargs={},
non_rigid_kwargs=None,
output_extra_check=True, progress_bar=True,
verbose=True)
# print(motion)
# print(extra_check)
print(spatial_bins)
assert spatial_bins is None
# # DEBUG
# import matplotlib.pyplot as plt
# fig, ax = plt.subplots()
# ax.plot(temporal_bins[:-1], motion)
# motion_histogram = extra_check['motion_histogram']
# spatial_hist_bins = extra_check['spatial_hist_bins']
# fig, ax = plt.subplots()
# extent = (temporal_bins[0], temporal_bins[-1], spatial_hist_bins[0], spatial_hist_bins[-1])
# im = ax.imshow(motion_histogram.T, interpolation='nearest',
# origin='lower', aspect='auto', extent=extent)
# fig, ax = plt.subplots()
# pairwise_displacement = extra_check['pairwise_displacement_list'][0]
# im = ax.imshow(pairwise_displacement, interpolation='nearest',
# cmap='PiYG', origin='lower', aspect='auto', extent=None)
# im.set_clim(-40, 40)
# ax.set_aspect('equal')
# fig.colorbar(im)
# plt.show()
def test_estimate_motion_non_rigid():
local_path = download_dataset(
repo=repo, remote_path=remote_path, local_folder=None)
recording = MEArecRecordingExtractor(local_path)
print(recording)
peaks = np.load(cache_folder / 'mearec_detected_peaks.npy')
motion, temporal_bins, spatial_bins, extra_check = estimate_motion(recording, peaks, peak_locations=None,
direction='y', bin_duration_s=1., bin_um=10.,
margin_um=5,
method='decentralized_registration',
method_kwargs={},
non_rigid_kwargs={
'bin_step_um': 50},
output_extra_check=True, progress_bar=True,
verbose=True)
# print(motion)
# print(extra_check.keys())
# print(spatial_bins)
assert spatial_bins is not None
assert len(spatial_bins) == motion.shape[1]
# # # DEBUG
# import matplotlib.pyplot as plt
# probe = recording.get_probe()
# from probeinterface.plotting import plot_probe
# fig, ax = plt.subplots()
# plot_probe(probe, ax=ax)
# non_rigid_windows = extra_check['non_rigid_windows']
# spatial_hist_bins = extra_check['spatial_hist_bins']
# fig, ax = plt.subplots()
# for w, win in enumerate(non_rigid_windows):
# ax.plot(win, spatial_hist_bins[:-1])
# ax.axhline(spatial_bins[w])
# fig, ax = plt.subplots()
# for w, win in enumerate(non_rigid_windows):
# ax.plot(temporal_bins[:-1], motion[:, w])
# plt.show()
if __name__ == '__main__':
# setup_module()
# test_motion_functions()
# test_estimate_motion_rigid()
test_estimate_motion_non_rigid()
| 40.16 | 117 | 0.577832 |
17de7f4d5ac0109e303cb6cfb13392b07f803160 | 859 | py | Python | paper_recommender/urls.py | kzkadc/paper-recommender | 7b47638ae5a3d5b45cdcb585adabc1ce6ae5f1b1 | [
"MIT"
] | null | null | null | paper_recommender/urls.py | kzkadc/paper-recommender | 7b47638ae5a3d5b45cdcb585adabc1ce6ae5f1b1 | [
"MIT"
] | 2 | 2022-03-29T12:28:36.000Z | 2022-03-30T14:01:23.000Z | paper_recommender/urls.py | kzkadc/paper-recommender | 7b47638ae5a3d5b45cdcb585adabc1ce6ae5f1b1 | [
"MIT"
] | null | null | null | """paper_recommender URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/4.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path("user/", include("user.urls")),
path("recommend/", include("recommend.urls"))
]
| 35.791667 | 77 | 0.703143 |
88f4b436e6f5a61201c75f32d35042f1ce79603d | 339 | py | Python | TD/TD3/Code/Python/4.py | sumaxime/LIFAP1 | b5da0395de9775cd8b319b56af7a6d0257d542db | [
"MIT"
] | 1 | 2019-06-19T08:40:24.000Z | 2019-06-19T08:40:24.000Z | TD/TD3/Code/Python/4.py | sumaxime/LIFAP1 | b5da0395de9775cd8b319b56af7a6d0257d542db | [
"MIT"
] | 13 | 2016-11-15T16:44:23.000Z | 2016-12-16T09:42:48.000Z | TD/TD3/Code/Python/4.py | sumaxime/LIFAP1 | b5da0395de9775cd8b319b56af7a6d0257d542db | [
"MIT"
] | 1 | 2019-12-06T11:48:09.000Z | 2019-12-06T11:48:09.000Z | #!/usr/bin/python
# Demander à USER une valeur entre 0 et 20, la retourner après
def retour():
print('Donne moi une valeur entre 0 et 20 : ', end='')
val = int(input())
while val < 0 or val > 20:
print('La valeur doit être entre 0 et 20. Recommence : ', end='')
val = int(input())
return val
retour()
| 19.941176 | 73 | 0.59292 |
5e8515383ea78804178206a5cb434137868f8858 | 557 | py | Python | legacy/regenesis/test_variable.py | crijke/regenesis | e53a0c6302aa458ff9ae95f573d5594351e5434c | [
"MIT"
] | 16 | 2015-04-09T14:40:53.000Z | 2021-07-13T15:03:35.000Z | legacy/regenesis/test_variable.py | crijke/regenesis | e53a0c6302aa458ff9ae95f573d5594351e5434c | [
"MIT"
] | 1 | 2018-06-25T07:51:18.000Z | 2018-06-25T07:51:18.000Z | legacy/regenesis/test_variable.py | crijke/regenesis | e53a0c6302aa458ff9ae95f573d5594351e5434c | [
"MIT"
] | 3 | 2015-12-20T18:24:21.000Z | 2018-06-24T16:57:25.000Z | import unittest
import variable
import mongo
class variableTest(unittest.TestCase):
def setUp(self):
self.db = mongo.get_db()
def test_types(self):
types = set()
types_ausp = {}
for variable in self.db.variables.find():
#print variable
#assert False
typ = variable.get('typ')
types.add(typ)
#types_ausp[typ] = types_ausp.get(typ, []) + [variable.get('auspraegungen')]
print types
if __name__ == '__main__':
unittest.main() | 22.28 | 88 | 0.561939 |
863f01e0dae83bf2224a4d2da417a0f93f5a00a7 | 4,863 | py | Python | data/process_data.py | paulobueno90/Disaster-Response | 7786bb3cd90e37519b66d978b4a28239c1c515d4 | [
"MIT"
] | null | null | null | data/process_data.py | paulobueno90/Disaster-Response | 7786bb3cd90e37519b66d978b4a28239c1c515d4 | [
"MIT"
] | null | null | null | data/process_data.py | paulobueno90/Disaster-Response | 7786bb3cd90e37519b66d978b4a28239c1c515d4 | [
"MIT"
] | null | null | null | import os
from os.path import isfile, join, isdir
import sys
import pandas as pd
from sqlalchemy import create_engine
def load_data(messages_filepath, categories_filepath):
"""
Load Messages data and categories data to be handled.
:param messages_filepath: string. Path to csv with messages data
:param categories_filepath: string. Path to csv with categories data
:return: Merged Dataframe with messages and categories
"""
# Load messages dataset
messages = pd.read_csv(messages_filepath)
# Load categories dataset
categories = pd.read_csv(categories_filepath)
# merge datasets
df = messages.merge(categories, on='id')
return df
def clean_data(df):
"""
Clean data and create categories Columns.
:param df: Pandas Dataframe. Dataframe Input
:return: Pandas Dataframe. Dataframe cleaned with categories at your own column
"""
# create a dataframe of the 36 individual category columns
categories = df['categories'].str.split(';', expand=True)
# Extract a list of new column names for categories.
category_colnames = df['categories'].values[0].replace('-1', '').replace('-0', '').split(';')
# rename the columns of `categories`
categories.columns = category_colnames
for column in categories:
categories[column] = categories[column].apply(lambda x: x[-1])
# convert column from string to numeric
categories[column] = categories[column].astype(int)
# checking for values other than 0 and 1 and converting number > 1 to 1
if len(categories[column].unique()) > 2:
categories[column] = categories[column].map(lambda x: 1 if x == 2 else x)
# drop the original categories column from `df`
df.drop('categories', axis=1, inplace=True)
# concatenate the original dataframe with the new `categories` dataframe
df = pd.concat([df, categories], axis=1)
# drop duplicates
df.drop_duplicates(inplace=True)
return df
def save_data(df, database_filename):
"""
Save cleaned and merged dataframe in a SQL database
:param df: Pandas Dataframe. Data to save in a SQL database
:param database_filename: String. String containing the name of the database
:return: None
"""
engine = create_engine('sqlite:///' + str(database_filename))
df.to_sql('message', engine, index=False, if_exists='replace')
def main():
"""
Runs script step by step until it saves data in a SQL db
:return: None
"""
messages_filepath = ''
categories_filepath = ''
database_filepath = ''
model_filepath = ''
data_path = ''
basedir = [f for f in os.listdir("../") if isdir(join("../", f))]
if 'data' in basedir:
data_path = "../data"
data_files = [f for f in os.listdir(data_path) if isfile(join(data_path, f))]
for file in data_files:
if "messages.csv" in file:
messages_filepath = f"{data_path}/{file}"
elif "categories.csv" in file:
categories_filepath = f"{data_path}/{file}"
elif "data.db" in file:
database_filepath = f"{data_path}/{file}"
if 'models' in basedir:
models_path = "../models"
models_files = [f for f in os.listdir(models_path) if isfile(join(models_path, f))]
for file in models_files:
if "classifier.pkl" in file:
model_filepath = f"{models_path}/{file}"
if model_filepath:
print("Model Path: Ok")
print(model_filepath)
if messages_filepath:
print("Messages Data Path: Ok")
print(messages_filepath)
if categories_filepath:
print("Categories Data Path: Ok")
print(categories_filepath)
if database_filepath:
print("Database Path: Ok")
print(database_filepath)
else:
database_filepath = f"{data_path}/disaster_data.db"
try:
print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
df = load_data(messages_filepath, categories_filepath)
print('Cleaning data...')
df = clean_data(df)
print('Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('Cleaned data saved to database!')
except:
print('Please provide the filepaths of the messages and categories '\
'datasets as the first and second argument respectively, as '\
'well as the filepath of the database to save the cleaned data '\
'to as the third argument. \n\nExample: python process_data.py '\
'disaster_messages.csv disaster_categories.csv '\
'DisasterResponse.db')
if __name__ == '__main__':
main()
| 30.39375 | 97 | 0.641785 |
7125dbb8b9e35c108be9b82d8de3de4974ce635b | 6,501 | py | Python | plugins/ldap/plugin_tests/ldap_test.py | adsorensen/girder | ae461d1198e6173f36168a71d4f7a9a5f66e6b70 | [
"Apache-2.0"
] | null | null | null | plugins/ldap/plugin_tests/ldap_test.py | adsorensen/girder | ae461d1198e6173f36168a71d4f7a9a5f66e6b70 | [
"Apache-2.0"
] | null | null | null | plugins/ldap/plugin_tests/ldap_test.py | adsorensen/girder | ae461d1198e6173f36168a71d4f7a9a5f66e6b70 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the 'License' );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import ldap
import mock
from girder.models.model_base import ValidationException
from tests import base
def setUpModule():
base.enabledPlugins.append('ldap')
base.startServer()
def tearDownModule():
base.stopServer()
class MockLdap(object):
def __init__(self, bindFail=False, searchFail=False, record=None):
self.bindFail = bindFail
self.searchFail = searchFail
self.record = record
def bind_s(self, *args, **kwargs):
if self.bindFail:
raise ldap.LDAPError({
'desc': 'failed to connect'
})
def search_s(self, *args, **kwargs):
if self.searchFail:
return []
return [(None, self.record or {
'distinguishedName': [b'foobar'],
'uid': [b'foobar'],
'sn': [b'Bar'],
'givenName': [b'Foo'],
'mail': [b'foo@bar.com']
})]
def set_option(self, *args, **kwargs):
pass
def unbind_s(self, *args, **kwargs):
pass
class LdapTestCase(base.TestCase):
def testLdapLogin(self):
from girder.plugins.ldap.constants import PluginSettings
settings = self.model('setting')
self.assertEqual(settings.get(PluginSettings.LDAP_SERVERS), [])
with self.assertRaises(ValidationException):
settings.set(PluginSettings.LDAP_SERVERS, {})
settings.set(PluginSettings.LDAP_SERVERS, [{
'baseDn': 'cn=Users,dc=foo,dc=bar,dc=org',
'bindName': 'cn=foo,cn=Users,dc=foo,dc=bar,dc=org',
'password': 'foo',
'searchField': 'mail',
'uri': 'foo.bar.org:389'
}])
with mock.patch('ldap.initialize', return_value=MockLdap()) as ldapInit:
resp = self.request('/user/authentication', basicAuth='hello:world')
self.assertEqual(len(ldapInit.mock_calls), 1)
self.assertStatusOk(resp)
# Register a new user
user = resp.json['user']
self.assertEqual(user['email'], 'foo@bar.com')
self.assertEqual(user['firstName'], 'Foo')
self.assertEqual(user['lastName'], 'Bar')
self.assertEqual(user['login'], 'foobar')
# Login as an existing user
resp = self.request('/user/authentication', basicAuth='hello:world')
self.assertStatusOk(resp)
self.assertEqual(resp.json['user']['_id'], user['_id'])
with mock.patch('ldap.initialize', return_value=MockLdap(bindFail=True)):
resp = self.request('/user/authentication', basicAuth='hello:world')
self.assertStatus(resp, 401)
with mock.patch('ldap.initialize', return_value=MockLdap(searchFail=True)):
resp = self.request('/user/authentication', basicAuth='hello:world')
self.assertStatus(resp, 401)
# Test fallback to logging in with core auth
normalUser = self.model('user').createUser(
login='normal', firstName='Normal', lastName='User', email='normal@user.com',
password='normaluser')
with mock.patch('ldap.initialize', return_value=MockLdap(searchFail=True)):
resp = self.request('/user/authentication', basicAuth='normal:normaluser')
self.assertStatusOk(resp)
self.assertEqual(str(normalUser['_id']), resp.json['user']['_id'])
# Test registering from a record that only has a cn, no sn/givenName
record = {
'cn': [b'Fizz Buzz'],
'mail': [b'fizz@buzz.com'],
'distinguishedName': [b'shouldbeignored']
}
with mock.patch('ldap.initialize', return_value=MockLdap(record=record)):
resp = self.request('/user/authentication', basicAuth='fizzbuzz:foo')
self.assertStatusOk(resp)
self.assertEqual(resp.json['user']['login'], 'fizz')
self.assertEqual(resp.json['user']['firstName'], 'Fizz')
self.assertEqual(resp.json['user']['lastName'], 'Buzz')
# Test falling back to other name generation behavior (first+last name)
record = {
'cn': [b'Fizz Buzz'],
'mail': [b'fizz@buzz2.com'],
'distinguishedName': [b'shouldbeignored']
}
with mock.patch('ldap.initialize', return_value=MockLdap(record=record)):
resp = self.request('/user/authentication', basicAuth='fizzbuzz:foo')
self.assertStatusOk(resp)
self.assertEqual(resp.json['user']['login'], 'fizzbuzz')
self.assertEqual(resp.json['user']['firstName'], 'Fizz')
self.assertEqual(resp.json['user']['lastName'], 'Buzz')
def testLdapStatusCheck(self):
admin = self.model('user').createUser(
login='admin', email='a@a.com', firstName='admin', lastName='admin',
password='passwd', admin=True)
params = {
'bindName': 'cn=foo,cn=Users,dc=foo,dc=bar,dc=org',
'password': 'foo',
'uri': 'ldap://foo.bar.org:389'
}
with mock.patch('ldap.initialize', return_value=MockLdap(bindFail=True)):
resp = self.request('/system/ldap_server/status', user=admin, params=params)
self.assertStatusOk(resp)
self.assertFalse(resp.json['connected'])
self.assertEqual(resp.json['error'], 'LDAP connection error: failed to connect')
with mock.patch('ldap.initialize', return_value=MockLdap(bindFail=False)):
resp = self.request('/system/ldap_server/status', user=admin, params=params)
self.assertStatusOk(resp)
self.assertTrue(resp.json['connected'])
self.assertNotIn('error', resp.json)
| 38.928144 | 92 | 0.593909 |
5e2a615c084cbfd08c822be5d3459873fdff3695 | 23,593 | py | Python | src/radon/cli.py | radon-provenance/radon-lib | 30e0486ed5d0b63c46b139b6e35f687fe3d91402 | [
"Apache-2.0"
] | null | null | null | src/radon/cli.py | radon-provenance/radon-lib | 30e0486ed5d0b63c46b139b6e35f687fe3d91402 | [
"Apache-2.0"
] | 2 | 2020-06-08T10:11:10.000Z | 2020-06-11T15:54:12.000Z | src/radon/cli.py | radon-provenance/radon-lib | 30e0486ed5d0b63c46b139b6e35f687fe3d91402 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__doc_opt__ = """
Radon Admin Command Line Interface.
Usage:
radmin init
radmin drop
radmin ls [<path>] [-a] [--v=<VERSION>]
radmin cd [<path>]
radmin mkdir <path>
radmin get <src> [<dest>] [--force]
radmin put <src> [<dest>] [--mimetype=<MIME>]
radmin put --ref <url> <dest> [--mimetype=<MIME>]
radmin pwd
radmin rm <path>
radmin lu [<name>]
radmin lg [<name>]
radmin mkuser [<name>]
radmin mkldapuser [<name>]
radmin moduser <name> (email | administrator | active | password | ldap) [<value>]
radmin rmuser [<name>]
radmin mkgroup [<name>]
radmin atg <name> <userlist> ...
radmin rfg <name> <userlist> ...
radmin rmgroup [<name>]
Options:
-h --help Show this screen.
--version Show version.
"""
import errno
from getpass import getpass
import logging
import blessings
from operator import methodcaller
import docopt
import os
import pickle
import radon
from radon.database import (
create_default_users,
create_root,
create_tables,
destroy,
initialise
)
from radon.model import (
Collection,
Group,
Resource,
User
)
from radon.model.errors import (
NoSuchCollectionError
)
from radon.util import (
guess_mimetype,
random_password,
split
)
SESSION_PATH = os.path.join(os.path.expanduser("~/.radon"), "session.pickle")
ARG_NAME = "<name>"
ARG_PATH = "<path>"
ARG_USERLIST = "<userlist>"
MSG_GROUP_EXIST = "Group {} already exists"
MSG_GROUP_NOT_EXIST = "Group {} doesn't exist"
MSG_GROUP_CREATED = "Group {} has been created"
MSG_GROUP_DELETED = "Group {} has been deleted"
MSG_ADD_USER = "Added {} to the group {}"
MSG_USER_IN_GROUP = "{} {} already in the group {}"
MSG_USER_NOT_EXIST = "User {} doesn't exist"
MSG_USERS_NOT_EXIST = "Users {} don't exist"
MSG_USER_CREATED = "User {} has been created"
MSG_USER_MODIFIED = "User {} has been modified"
MSG_USER_DELETED = "User {} has been deleted"
MSG_PROMPT_USER = "Please enter the username: "
MSG_PROMPT_GROUP = "Please enter the group name: "
MSG_COLL_EXIST = "Collection {} already exists"
MSG_COLL_NOT_EXIST = "Collection {} doesn't exist"
MSG_COLL_WRONG_NAME = "cdmi_ prefix is not a valid prefix for the name of a container"
MSG_RESC_EXIST = "Resource {} already exists"
MSG_RESC_NOT_EXIST = "Resource {} doesn't exists"
MSG_NO_OBJECT = "No object found at path {}"
class RadonApplication():
"""Methods for the CLI"""
def __init__(self, session_path):
self.terminal = blessings.Terminal()
self.session_path = session_path
initialise()
def add_to_group(self, args):
"""Add user(s) to a group."""
groupname = args[ARG_NAME]
ls_users = args[ARG_USERLIST]
group = Group.find(groupname)
if not group:
self.print_error(MSG_GROUP_NOT_EXIST.format(groupname))
return
added, not_added, already_there = group.add_users(ls_users)
if added:
self.print_success(
MSG_ADD_USER.format(", ".join(added), group.name)
)
if already_there:
if len(already_there) == 1:
verb = "is"
else:
verb = "are"
self.print_error(
MSG_USER_IN_GROUP.format(
", ".join(already_there), verb, group.name
)
)
if not_added:
if len(not_added) == 1:
msg = MSG_USER_NOT_EXIST
else:
msg = MSG_USERS_NOT_EXIST
self.print_error(msg.format(", ".join(not_added)))
def init(self):
"""Create the tables"""
create_tables()
create_root()
create_default_users()
session = self.create_session()
self.save_session(session)
def drop(self):
"""Remove the keyspace"""
print("*********************************")
print("** WARNING **")
print("*********************************")
print("This will remove every data stored in the database.")
confirm = input("Are you sure you want to continue ? [y/N] ")
if confirm.lower() in ["true", "y", "yes"]:
destroy()
session = self.create_session()
self.save_session(session)
def change_dir(self, args):
"Move into a different container."
session = self.get_session()
cwd = session.get('cwd', '/')
if args[ARG_PATH]:
path = args[ARG_PATH]
else:
path = "/"
if not path.startswith("/"):
# relative path
path = "{}{}".format(cwd, path)
if not path.endswith("/"):
path = path + '/'
col = Collection.find(path)
if not col:
self.print_error(MSG_COLL_NOT_EXIST.format(path))
return
session['cwd'] = path
# Save the client for persistent use
self.save_session(session)
return 0
def create_session(self):
"""Return a new session"""
# The session is a dictionary that stores the current status
return {"cwd" : "/"}
def get(self, args):
"Fetch a data object from the archive to a local file."
src = args["<src>"]
# Determine local filename
if args["<dest>"]:
localpath = args["<dest>"]
else:
localpath = src.rsplit("/")[-1]
# Get the full destination path of the new resource
src = self.get_full_path(src)
# Check for overwrite of existing file, directory, link
if os.path.isfile(localpath):
if not args["--force"]:
self.print_error(
"File '{0}' exists, --force option not used" "".format(localpath)
)
return errno.EEXIST
elif os.path.isdir(localpath):
self.print_error("'{0}' is a directory".format(localpath))
return errno.EISDIR
elif os.path.exists(localpath):
self.print_error("'{0}'exists but not a file".format(localpath))
return errno.EEXIST
resc = Resource.find(src)
if not resc:
self.print_error(MSG_RESC_NOT_EXIST.format(src))
return
dest_folder = os.path.dirname(localpath)
if dest_folder and not os.path.isdir(dest_folder):
os.makedirs(dest_folder)
lfh = open(localpath, "wb")
for chunk in resc.chunk_content():
lfh.write(chunk)
lfh.close()
return 0
def get_full_path(self, path):
"""Return the full path in Radon"""
session = self.get_session()
cwd = session.get('cwd', '/')
if not path.startswith("/"):
# relative path
path = "{}{}".format(cwd, path)
return path
def get_session(self):
"""Return the persistent session stored in the session_path file"""
try:
# Load existing session, so as to keep current dir etc.
with open(self.session_path, "rb") as fhandle:
session = pickle.load(fhandle)
except (IOError, pickle.PickleError):
# Create a new session
session = self.create_session()
return session
def ls(self, args):
"""List a container."""
session = self.get_session()
cwd = session.get('cwd', '/')
if args[ARG_PATH]:
path = args[ARG_PATH]
if not path.startswith("/"):
# relative path
path = "{}{}".format(cwd, path)
else:
# Get the current working dir from the session file
path = cwd
# --v option specify the version we want to display
if args["--v"]:
version = int(args["--v"])
col = Collection.find(path, version)
else:
col = Collection.find(path)
if not col:
self.print_error(MSG_COLL_NOT_EXIST.format(path))
return
# Display name of the collection
if path == "/":
print("Root:")
else:
print("{}:".format(col.path))
# Display Acl
if args["-a"]:
acl = col.get_acl_dict()
if acl:
for gid in acl:
print(" ACL - {}: {}".format(
gid, acl[gid]))
else:
print(" ACL: No ACE defined")
# Display child
c_colls, c_objs = col.get_child()
for child in sorted(c_colls, key=methodcaller("lower")):
print(self.terminal.blue(child))
for child in sorted(c_objs, key=methodcaller("lower")):
print(child)
def list_groups(self, args):
"""List all groups or a specific group if the name is specified"""
if args[ARG_NAME]:
name = args[ARG_NAME]
group = Group.find(name)
if group:
group_info = group.to_dict()
members = ", ".join(group_info.get("members", []))
print(
"{0.bold}Group name{0.normal}: {1}".format(
self.terminal, group_info.get("name", name)
)
)
print(
"{0.bold}Group id{0.normal}: {1}".format(
self.terminal, group_info.get("uuid", "")
)
)
print("{0.bold}Members{0.normal}: {1}".format(self.terminal, members))
else:
self.print_error(MSG_GROUP_NOT_EXIST.format(name))
else:
for group in Group.objects.all():
print(group.name)
def list_users(self, args):
"""List all users or a specific user if the name is specified"""
if args[ARG_NAME]:
name = args[ARG_NAME]
user = User.find(name)
if user:
user_info = user.to_dict()
groups = ", ".join([el["name"] for el in user_info.get("groups", [])])
if not user_info.get("ldap"):
print(
"{0.bold}User name{0.normal}: {1}".format(
self.terminal, user_info.get("username", name)
)
)
print(
"{0.bold}Email{0.normal}: {1}".format(
self.terminal, user_info.get("email", "")
)
)
print(
"{0.bold}User id{0.normal}: {1}".format(
self.terminal, user_info.get("uuid", "")
)
)
print(
"{0.bold}Administrator{0.normal}: {1}".format(
self.terminal, user_info.get("administrator", False)
)
)
print(
"{0.bold}Active{0.normal}: {1}".format(
self.terminal, user_info.get("active", False)
)
)
print("{0.bold}Groups{0.normal}: {1}".format(self.terminal, groups))
else:
print(
"{0.bold}User name (ldap){0.normal}: {1}".format(
self.terminal, user_info.get("username", name)
)
)
print(
"{0.bold}Administrator{0.normal}: {1}".format(
self.terminal, user_info.get("administrator", False)
)
)
print(
"{0.bold}Active{0.normal}: {1}".format(
self.terminal, user_info.get("active", False)
)
)
print("{0.bold}Groups{0.normal}: {1}".format(self.terminal, groups))
else:
self.print_error(MSG_USER_NOT_EXIST.format(name))
else:
for user in User.objects.all():
print(user.name)
def mkdir(self, args):
"Create a new container."
session = self.get_session()
cwd = session.get('cwd', '/')
path = args[ARG_PATH]
# Collections names should end with a '/'
if not path.endswith("/"):
path += '/'
if not path.startswith("/"):
# relative path
path = "{}{}".format(cwd, path)
col = Collection.find(path)
if col:
self.print_error(MSG_COLL_EXIST.format(path))
return
parent, name = split(path)
if name.startswith("cdmi_"):
self.print_error(MSG_COLL_WRONG_NAME.format(name))
return
p_coll = Collection.find(parent)
if not p_coll:
self.print_error(MSG_COLL_NOT_EXIST.format(path))
return
Collection.create(name=name, container=parent)
def mk_group(self, args):
"""Create a new group. Ask in the terminal for mandatory fields"""
if not args[ARG_NAME]:
name = input("Please enter the group name: ")
else:
name = args[ARG_NAME]
group = Group.find(name)
if group:
self.print_error(MSG_GROUP_EXIST.format(name))
return
group = Group.create(name=name)
print(MSG_GROUP_CREATED.format(group.name))
def mk_ldap_user(self, args):
"""Create a new ldap user. Ask in the terminal for mandatory fields"""
if not args[ARG_NAME]:
name = input("Please enter the user's username: ")
else:
name = args[ARG_NAME]
if User.find(name):
self.print_error("Username {} already exists".format(name))
return
admin = input("Is this an administrator? [y/N] ")
pwd = random_password(20)
User.create(
name=name,
password=pwd,
email="STORED_IN_LDAP",
ldap=True,
administrator=(admin.lower() in ["true", "y", "yes"]),
)
print(MSG_USER_CREATED.format(name))
def mk_user(self, args):
"""Create a new user. Ask in the terminal for mandatory fields"""
if not args[ARG_NAME]:
name = input("Please enter the user's username: ")
else:
name = args[ARG_NAME]
if User.find(name):
self.print_error("Username {} already exists".format(name))
return
admin = input("Is this an administrator? [y/N] ")
email = ""
while not email:
email = input("Please enter the user's email address: ")
pwd = ""
while not pwd:
pwd = getpass("Please enter the user's password: ")
User.create(
name=name,
password=pwd,
email=email,
ldap=False,
administrator=(admin.lower() in ["true", "y", "yes"]),
)
print(MSG_USER_CREATED.format(name))
def mod_user(self, args):
"""Modify a user. Ask in the terminal if the value isn't provided"""
name = args[ARG_NAME]
user = User.find(name)
if not user:
self.print_error("User {} doesn't exist".format(name))
return
value = args["<value>"]
if not value:
if args["password"]:
while not value:
value = getpass("Please enter the new password: ")
else:
while not value:
value = input("Please enter the new value: ")
if args["email"]:
user.update(email=value)
elif args["administrator"]:
user.update(administrator=value.lower() in ["true", "y", "yes"])
elif args["active"]:
user.update(active=value.lower() in ["true", "y", "yes"])
elif args["ldap"]:
user.update(ldap=value.lower() in ["true", "y", "yes"])
elif args["password"]:
user.update(password=value)
print(MSG_USER_MODIFIED.format(name))
def print_error(self, msg):
"""Display an error message."""
print("{0.bold_red}Error{0.normal} - {1}".format(self.terminal, msg))
def print_success(self, msg):
"""Display a success message."""
print("{0.bold_green}Success{0.normal} - {1}".format(self.terminal, msg))
def put(self, args):
"Put a file to a path."
is_reference = args["--ref"]
if is_reference:
url = args["<url>"]
dest_path = args["<dest>"]
# Get the full destination path of the new resource
dest_path = self.get_full_path(dest_path)
else:
src = args["<src>"]
# Absolutize local path
local_path = os.path.abspath(src)
# Check that local file exists
if not os.path.exists(local_path):
self.print_error("File '{}' doesn't exist".format(local_path))
return errno.ENOENT
if args["<dest>"]:
dest_path = args["<dest>"]
# We try to put the new file in a subcollection
if dest_path.endswith('/'):
dest_path = "{}{}".format(dest_path,
os.path.basename(local_path))
else:
# PUT to same name in pwd on server
dest_path = os.path.basename(local_path)
# Get the full destination path of the new resource
dest_path = self.get_full_path(dest_path)
# Check resource objects on the database
resc = Resource.find(dest_path)
if resc:
self.print_error(MSG_RESC_EXIST.format(dest_path))
return
parent, name = split(dest_path)
try:
if is_reference:
resc = Resource.create(parent, name, url=url)
else:
resc = Resource.create(parent, name)
with open(local_path, "rb") as fh:
resc.put(fh)
print(resc)
except NoSuchCollectionError:
self.print_error(MSG_COLL_NOT_EXIST.format(os.path.dirname(dest_path)))
def pwd(self, args):
"""Print working directory"""
session = self.get_session()
print(session.get('cwd', '/'))
def rm(self, args):
"""Remove a data object or a collection.
"""
path = args["<path>"]
# Get the full path of the object to delete
path = self.get_full_path(path)
resc = Resource.find(path)
if resc:
resc.delete()
return
coll = Collection.find(path)
if coll:
coll.delete()
return
self.print_error(MSG_NO_OBJECT.format(path))
def rm_from_group(self, args):
"""Remove user(s) from a group."""
groupname = args[ARG_NAME]
group = Group.find(groupname)
if not group:
self.print_error(MSG_GROUP_NOT_EXIST.format(groupname))
return
ls_users = args[ARG_USERLIST]
removed, not_there, not_exist = group.rm_users(ls_users)
if removed:
self.print_success(
"Removed {} from the group {}".format(", ".join(removed), group.name)
)
if not_there:
if len(not_there) == 1:
verb = "isn't"
else:
verb = "aren't"
self.print_error(
"{} {} in the group {}".format(", ".join(not_there), verb, group.name)
)
if not_exist:
if len(not_exist) == 1:
msg = "{} doesn't exist"
else:
msg = "{} don't exist"
self.print_error(msg.format(", ".join(not_exist)))
def rm_group(self, args):
"""Remove a group."""
if not args[ARG_NAME]:
name = input(MSG_PROMPT_GROUP)
else:
name = args[ARG_NAME]
group = Group.find(name)
if not group:
self.print_error(MSG_GROUP_NOT_EXIST.format(name))
return
group.delete()
print(MSG_GROUP_DELETED.format(name))
def rm_user(self, args):
"""Remove a user."""
if not args[ARG_NAME]:
name = input(MSG_PROMPT_USER)
else:
name = args[ARG_NAME]
user = User.find(name)
if not user:
self.print_error(MSG_USER_NOT_EXIST.format(name))
return
user.delete()
print(MSG_USER_DELETED.format(name))
#
def save_session(self, session):
"""Save the status of the session for subsequent use."""
if not os.path.exists(os.path.dirname(self.session_path)):
os.makedirs(os.path.dirname(self.session_path))
# Save existing session, so as to keep current dir etc.
with open(self.session_path, "wb") as fh:
pickle.dump(session, fh, pickle.HIGHEST_PROTOCOL)
def main():
"""Main function"""
logging.basicConfig(level=logging.WARNING)
logging.getLogger("dse.policies").setLevel(logging.WARNING)
logging.getLogger("dse.cluster").setLevel(logging.WARNING)
logging.getLogger("dse.cqlengine.management").setLevel(logging.WARNING)
import sys
arguments = docopt.docopt(
__doc_opt__, version="Radon Admin CLI {}".format(radon.__version__)
)
app = RadonApplication(SESSION_PATH)
if arguments["init"]:
return app.init()
if arguments["drop"]:
return app.drop()
elif arguments["ls"]:
return app.ls(arguments)
elif arguments["mkdir"]:
return app.mkdir(arguments)
elif arguments["pwd"]:
return app.pwd(arguments)
elif arguments["cd"]:
return app.change_dir(arguments)
elif arguments["put"]:
return app.put(arguments)
elif arguments["get"]:
return app.get(arguments)
elif arguments["rm"]:
return app.rm(arguments)
elif arguments["lg"]:
return app.list_groups(arguments)
elif arguments["lu"]:
return app.list_users(arguments)
elif arguments["atg"]:
return app.add_to_group(arguments)
elif arguments["mkgroup"]:
return app.mk_group(arguments)
elif arguments["mkldapuser"]:
return app.mk_ldap_user(arguments)
elif arguments["mkuser"]:
return app.mk_user(arguments)
elif arguments["moduser"]:
return app.mod_user(arguments)
elif arguments["rfg"]:
return app.rm_from_group(arguments)
elif arguments["rmgroup"]:
return app.rm_group(arguments)
elif arguments["rmuser"]:
return app.rm_user(arguments)
if __name__ == "__main__":
main()
| 32.90516 | 88 | 0.535455 |
cd875ba0723bc934b78a7d8d29dd16f1cfdef470 | 6,224 | py | Python | src/auditlog_tests/models.py | mathspace/django-auditlog | 7bfe1897c53aff77c13c1e3858d3712c294f3027 | [
"MIT"
] | null | null | null | src/auditlog_tests/models.py | mathspace/django-auditlog | 7bfe1897c53aff77c13c1e3858d3712c294f3027 | [
"MIT"
] | null | null | null | src/auditlog_tests/models.py | mathspace/django-auditlog | 7bfe1897c53aff77c13c1e3858d3712c294f3027 | [
"MIT"
] | null | null | null | import uuid
from django.contrib.postgres.fields import ArrayField
from django.db import models
from auditlog.models import AuditlogHistoryField
from auditlog.registry import auditlog
from multiselectfield import MultiSelectField
@auditlog.register()
class SimpleModel(models.Model):
"""
A simple model with no special things going on.
"""
text = models.TextField(blank=True)
boolean = models.BooleanField(default=False)
integer = models.IntegerField(blank=True, null=True)
datetime = models.DateTimeField(auto_now=True)
history = AuditlogHistoryField()
class AltPrimaryKeyModel(models.Model):
"""
A model with a non-standard primary key.
"""
key = models.CharField(max_length=100, primary_key=True)
text = models.TextField(blank=True)
boolean = models.BooleanField(default=False)
integer = models.IntegerField(blank=True, null=True)
datetime = models.DateTimeField(auto_now=True)
history = AuditlogHistoryField(pk_indexable=False)
class UUIDPrimaryKeyModel(models.Model):
"""
A model with a UUID primary key.
"""
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
text = models.TextField(blank=True)
boolean = models.BooleanField(default=False)
integer = models.IntegerField(blank=True, null=True)
datetime = models.DateTimeField(auto_now=True)
history = AuditlogHistoryField(pk_indexable=False)
class ProxyModel(SimpleModel):
"""
A model that is a proxy for another model.
"""
class Meta:
proxy = True
class RelatedModel(models.Model):
"""
A model with a foreign key.
"""
related = models.ForeignKey(to='self', on_delete=models.CASCADE)
history = AuditlogHistoryField()
class ManyRelatedModel(models.Model):
"""
A model with a many to many relation.
"""
related = models.ManyToManyField('self')
history = AuditlogHistoryField()
@auditlog.register(include_fields=['label'])
class SimpleIncludeModel(models.Model):
"""
A simple model used for register's include_fields kwarg
"""
label = models.CharField(max_length=100)
text = models.TextField(blank=True)
history = AuditlogHistoryField()
class SimpleExcludeModel(models.Model):
"""
A simple model used for register's exclude_fields kwarg
"""
label = models.CharField(max_length=100)
text = models.TextField(blank=True)
history = AuditlogHistoryField()
class SimpleMappingModel(models.Model):
"""
A simple model used for register's mapping_fields kwarg
"""
sku = models.CharField(max_length=100)
vtxt = models.CharField(verbose_name='Version', max_length=100)
not_mapped = models.CharField(max_length=100)
history = AuditlogHistoryField()
class AdditionalDataIncludedModel(models.Model):
"""
A model where get_additional_data is defined which allows for logging extra
information about the model in JSON
"""
label = models.CharField(max_length=100)
text = models.TextField(blank=True)
related = models.ForeignKey(to=SimpleModel, on_delete=models.CASCADE)
history = AuditlogHistoryField()
def get_additional_data(self):
"""
Returns JSON that captures a snapshot of additional details of the
model instance. This method, if defined, is accessed by auditlog
manager and added to each logentry instance on creation.
"""
object_details = {
'related_model_id': self.related.id,
'related_model_text': self.related.text
}
return object_details
class DateTimeFieldModel(models.Model):
"""
A model with a DateTimeField, used to test DateTimeField
changes are detected properly.
"""
label = models.CharField(max_length=100)
timestamp = models.DateTimeField()
date = models.DateField()
time = models.TimeField()
naive_dt = models.DateTimeField(null=True, blank=True)
history = AuditlogHistoryField()
class ChoicesFieldModel(models.Model):
"""
A model with a CharField restricted to a set of choices.
This model is used to test the changes_display_dict method.
"""
RED = 'r'
YELLOW = 'y'
GREEN = 'g'
STATUS_CHOICES = (
(RED, 'Red'),
(YELLOW, 'Yellow'),
(GREEN, 'Green'),
)
status = models.CharField(max_length=1, choices=STATUS_CHOICES)
multiselect = MultiSelectField(max_length=3, choices=STATUS_CHOICES, max_choices=3)
multiplechoice = models.CharField(max_length=3, choices=STATUS_CHOICES)
history = AuditlogHistoryField()
class CharfieldTextfieldModel(models.Model):
"""
A model with a max length CharField and a Textfield.
This model is used to test the changes_display_dict
method's ability to truncate long text.
"""
longchar = models.CharField(max_length=255)
longtextfield = models.TextField()
history = AuditlogHistoryField()
class PostgresArrayFieldModel(models.Model):
"""
Test auditlog with Postgres's ArrayField
"""
RED = 'r'
YELLOW = 'y'
GREEN = 'g'
STATUS_CHOICES = (
(RED, 'Red'),
(YELLOW, 'Yellow'),
(GREEN, 'Green'),
)
arrayfield = ArrayField(models.CharField(max_length=1, choices=STATUS_CHOICES), size=3)
history = AuditlogHistoryField()
class PostgresDecimalFieldModel(models.Model):
"""
Test auditlog with DecimalField on Postgres
"""
decimal = models.DecimalField(decimal_places=3, max_digits=10, null=True, blank=True)
history = AuditlogHistoryField()
auditlog.register(AltPrimaryKeyModel)
auditlog.register(UUIDPrimaryKeyModel)
auditlog.register(ProxyModel)
auditlog.register(RelatedModel)
auditlog.register(ManyRelatedModel)
auditlog.register(ManyRelatedModel.related.through)
auditlog.register(SimpleExcludeModel, exclude_fields=['text'])
auditlog.register(SimpleMappingModel, mapping_fields={'sku': 'Product No.'})
auditlog.register(AdditionalDataIncludedModel)
auditlog.register(DateTimeFieldModel)
auditlog.register(ChoicesFieldModel)
auditlog.register(CharfieldTextfieldModel)
auditlog.register(PostgresArrayFieldModel)
auditlog.register(PostgresDecimalFieldModel)
| 26.485106 | 91 | 0.711279 |
da242ab51b146fa05a0b65849ce69197407aef49 | 16,694 | py | Python | oscar/lib/python2.7/site-packages/flake8/processor.py | sainjusajan/django-oscar | 466e8edc807be689b0a28c9e525c8323cc48b8e1 | [
"BSD-3-Clause"
] | null | null | null | oscar/lib/python2.7/site-packages/flake8/processor.py | sainjusajan/django-oscar | 466e8edc807be689b0a28c9e525c8323cc48b8e1 | [
"BSD-3-Clause"
] | null | null | null | oscar/lib/python2.7/site-packages/flake8/processor.py | sainjusajan/django-oscar | 466e8edc807be689b0a28c9e525c8323cc48b8e1 | [
"BSD-3-Clause"
] | null | null | null | """Module containing our file processor that tokenizes a file for checks."""
import contextlib
import io
import logging
import sys
import tokenize
import flake8
from flake8 import defaults
from flake8 import exceptions
from flake8 import utils
LOG = logging.getLogger(__name__)
PyCF_ONLY_AST = 1024
NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE])
# Work around Python < 2.6 behaviour, which does not generate NL after
# a comment which is on a line by itself.
COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n'
SKIP_TOKENS = frozenset([tokenize.NL, tokenize.NEWLINE, tokenize.INDENT,
tokenize.DEDENT])
class FileProcessor(object):
"""Processes a file and holdes state.
This processes a file by generating tokens, logical and physical lines,
and AST trees. This also provides a way of passing state about the file
to checks expecting that state. Any public attribute on this object can
be requested by a plugin. The known public attributes are:
- :attr:`blank_before`
- :attr:`blank_lines`
- :attr:`checker_state`
- :attr:`indent_char`
- :attr:`indent_level`
- :attr:`line_number`
- :attr:`logical_line`
- :attr:`max_line_length`
- :attr:`multiline`
- :attr:`noqa`
- :attr:`previous_indent_level`
- :attr:`previous_logical`
- :attr:`previous_unindented_logical_line`
- :attr:`tokens`
- :attr:`file_tokens`
- :attr:`total_lines`
- :attr:`verbose`
"""
def __init__(self, filename, options, lines=None):
"""Initialice our file processor.
:param str filename:
Name of the file to process
"""
self.options = options
self.filename = filename
self.lines = lines
if lines is None:
self.lines = self.read_lines()
self.strip_utf_bom()
# Defaults for public attributes
#: Number of preceding blank lines
self.blank_before = 0
#: Number of blank lines
self.blank_lines = 0
#: Checker states for each plugin?
self._checker_states = {}
#: Current checker state
self.checker_state = None
#: User provided option for hang closing
self.hang_closing = options.hang_closing
#: Character used for indentation
self.indent_char = None
#: Current level of indentation
self.indent_level = 0
#: Line number in the file
self.line_number = 0
#: Current logical line
self.logical_line = ''
#: Maximum line length as configured by the user
self.max_line_length = options.max_line_length
#: Whether the current physical line is multiline
self.multiline = False
#: Whether or not we're observing NoQA
self.noqa = False
#: Previous level of indentation
self.previous_indent_level = 0
#: Previous logical line
self.previous_logical = ''
#: Previous unindented (i.e. top-level) logical line
self.previous_unindented_logical_line = ''
#: Current set of tokens
self.tokens = []
#: Total number of lines in the file
self.total_lines = len(self.lines)
#: Verbosity level of Flake8
self.verbose = options.verbose
#: Statistics dictionary
self.statistics = {
'logical lines': 0,
}
self._file_tokens = None
@property
def file_tokens(self):
"""The complete set of tokens for a file.
Accessing this attribute *may* raise an InvalidSyntax exception.
:raises: flake8.exceptions.InvalidSyntax
"""
if self._file_tokens is None:
line_iter = iter(self.lines)
try:
self._file_tokens = list(tokenize.generate_tokens(
lambda: next(line_iter)
))
except tokenize.TokenError as exc:
raise exceptions.InvalidSyntax(exc.message, exception=exc)
return self._file_tokens
@contextlib.contextmanager
def inside_multiline(self, line_number):
"""Context-manager to toggle the multiline attribute."""
self.line_number = line_number
self.multiline = True
yield
self.multiline = False
def reset_blank_before(self):
"""Reset the blank_before attribute to zero."""
self.blank_before = 0
def delete_first_token(self):
"""Delete the first token in the list of tokens."""
del self.tokens[0]
def visited_new_blank_line(self):
"""Note that we visited a new blank line."""
self.blank_lines += 1
def update_state(self, mapping):
"""Update the indent level based on the logical line mapping."""
(start_row, start_col) = mapping[0][1]
start_line = self.lines[start_row - 1]
self.indent_level = expand_indent(start_line[:start_col])
if self.blank_before < self.blank_lines:
self.blank_before = self.blank_lines
def update_checker_state_for(self, plugin):
"""Update the checker_state attribute for the plugin."""
if 'checker_state' in plugin['parameters']:
self.checker_state = self._checker_states.setdefault(
plugin['name'], {}
)
def next_logical_line(self):
"""Record the previous logical line.
This also resets the tokens list and the blank_lines count.
"""
if self.logical_line:
self.previous_indent_level = self.indent_level
self.previous_logical = self.logical_line
if not self.indent_level:
self.previous_unindented_logical_line = self.logical_line
self.blank_lines = 0
self.tokens = []
self.noqa = False
def build_logical_line_tokens(self):
"""Build the mapping, comments, and logical line lists."""
logical = []
comments = []
length = 0
previous_row = previous_column = mapping = None
for token_type, text, start, end, line in self.tokens:
if token_type in SKIP_TOKENS:
continue
if not mapping:
mapping = [(0, start)]
if token_type == tokenize.COMMENT:
comments.append(text)
continue
if token_type == tokenize.STRING:
text = mutate_string(text)
if previous_row:
(start_row, start_column) = start
if previous_row != start_row:
row_index = previous_row - 1
column_index = previous_column - 1
previous_text = self.lines[row_index][column_index]
if (previous_text == ',' or
(previous_text not in '{[(' and
text not in '}])')):
text = ' ' + text
elif previous_column != start_column:
text = line[previous_column:start_column] + text
logical.append(text)
length += len(text)
mapping.append((length, end))
(previous_row, previous_column) = end
return comments, logical, mapping
def build_ast(self):
"""Build an abstract syntax tree from the list of lines."""
return compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST)
def build_logical_line(self):
"""Build a logical line from the current tokens list."""
comments, logical, mapping_list = self.build_logical_line_tokens()
joined_comments = ''.join(comments)
self.logical_line = ''.join(logical)
if defaults.NOQA_INLINE_REGEXP.search(joined_comments):
self.noqa = True
self.statistics['logical lines'] += 1
return joined_comments, self.logical_line, mapping_list
def split_line(self, token):
"""Split a physical line's line based on new-lines.
This also auto-increments the line number for the caller.
"""
for line in token[1].split('\n')[:-1]:
yield line
self.line_number += 1
def keyword_arguments_for(self, parameters, arguments=None):
"""Generate the keyword arguments for a list of parameters."""
if arguments is None:
arguments = {}
for param, required in parameters.items():
if param in arguments:
continue
try:
arguments[param] = getattr(self, param)
except AttributeError as exc:
if required:
LOG.exception(exc)
raise
else:
LOG.warning('Plugin requested optional parameter "%s" '
'but this is not an available parameter.',
param)
return arguments
def check_physical_error(self, error_code, line):
"""Update attributes based on error code and line."""
if error_code == 'E101':
self.indent_char = line[0]
def generate_tokens(self):
"""Tokenize the file and yield the tokens.
:raises flake8.exceptions.InvalidSyntax:
If a :class:`tokenize.TokenError` is raised while generating
tokens.
"""
try:
for token in tokenize.generate_tokens(self.next_line):
if token[2][0] > self.total_lines:
break
self.tokens.append(token)
yield token
except (tokenize.TokenError, SyntaxError) as exc:
raise exceptions.InvalidSyntax(exception=exc)
def line_for(self, line_number):
"""Retrieve the physical line at the specified line number."""
adjusted_line_number = line_number - 1
# NOTE(sigmavirus24): Some plugins choose to report errors for empty
# files on Line 1. In those casese, we shouldn't bother trying to
# retrieve a physical line (since none exist).
if 0 <= adjusted_line_number < len(self.lines):
return self.lines[adjusted_line_number]
return None
def next_line(self):
"""Get the next line from the list."""
if self.line_number >= self.total_lines:
return ''
line = self.lines[self.line_number]
self.line_number += 1
if self.indent_char is None and line[:1] in defaults.WHITESPACE:
self.indent_char = line[0]
return line
def read_lines(self):
# type: () -> List[str]
"""Read the lines for this file checker."""
if self.filename is None or self.filename == '-':
self.filename = self.options.stdin_display_name or 'stdin'
lines = self.read_lines_from_stdin()
else:
lines = self.read_lines_from_filename()
return lines
def _readlines_py2(self):
# type: () -> List[str]
with open(self.filename, 'rU') as fd:
return fd.readlines()
def _readlines_py3(self):
# type: () -> List[str]
try:
with open(self.filename, 'rb') as fd:
(coding, lines) = tokenize.detect_encoding(fd.readline)
textfd = io.TextIOWrapper(fd, coding, line_buffering=True)
return ([l.decode(coding) for l in lines] +
textfd.readlines())
except (LookupError, SyntaxError, UnicodeError):
# If we can't detect the codec with tokenize.detect_encoding, or
# the detected encoding is incorrect, just fallback to latin-1.
with open(self.filename, encoding='latin-1') as fd:
return fd.readlines()
def read_lines_from_filename(self):
# type: () -> List[str]
"""Read the lines for a file."""
if (2, 6) <= sys.version_info < (3, 0):
readlines = self._readlines_py2
elif (3, 0) <= sys.version_info < (4, 0):
readlines = self._readlines_py3
return readlines()
def read_lines_from_stdin(self):
# type: () -> List[str]
"""Read the lines from standard in."""
return utils.stdin_get_value().splitlines(True)
def should_ignore_file(self):
# type: () -> bool
"""Check if ``# flake8: noqa`` is in the file to be ignored.
:returns:
True if a line matches :attr:`defaults.NOQA_FILE`,
otherwise False
:rtype:
bool
"""
ignore_file = defaults.NOQA_FILE.search
return any(ignore_file(line) for line in self.lines)
def strip_utf_bom(self):
# type: () -> NoneType
"""Strip the UTF bom from the lines of the file."""
if not self.lines:
# If we have nothing to analyze quit early
return
first_byte = ord(self.lines[0][0])
if first_byte not in (0xEF, 0xFEFF):
return
# If the first byte of the file is a UTF-8 BOM, strip it
if first_byte == 0xFEFF:
self.lines[0] = self.lines[0][1:]
elif self.lines[0][:3] == '\xEF\xBB\xBF':
self.lines[0] = self.lines[0][3:]
def is_eol_token(token):
"""Check if the token is an end-of-line token."""
return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == '\\\n'
if COMMENT_WITH_NL: # If on Python 2.6
def is_eol_token(token, _is_eol_token=is_eol_token):
"""Check if the token is an end-of-line token."""
return (_is_eol_token(token) or
(token[0] == tokenize.COMMENT and token[1] == token[4]))
def is_multiline_string(token):
"""Check if this is a multiline string."""
return token[0] == tokenize.STRING and '\n' in token[1]
def token_is_newline(token):
"""Check if the token type is a newline token type."""
return token[0] in NEWLINE
def token_is_comment(token):
"""Check if the token type is a comment."""
return COMMENT_WITH_NL and token[0] == tokenize.COMMENT
def count_parentheses(current_parentheses_count, token_text):
"""Count the number of parentheses."""
current_parentheses_count = current_parentheses_count or 0
if token_text in '([{':
return current_parentheses_count + 1
elif token_text in '}])':
return current_parentheses_count - 1
return current_parentheses_count
def log_token(log, token):
"""Log a token to a provided logging object."""
if token[2][0] == token[3][0]:
pos = '[%s:%s]' % (token[2][1] or '', token[3][1])
else:
pos = 'l.%s' % token[3][0]
log.log(flake8._EXTRA_VERBOSE, 'l.%s\t%s\t%s\t%r' %
(token[2][0], pos, tokenize.tok_name[token[0]],
token[1]))
# NOTE(sigmavirus24): This was taken wholesale from
# https://github.com/PyCQA/pycodestyle
def expand_indent(line):
r"""Return the amount of indentation.
Tabs are expanded to the next multiple of 8.
>>> expand_indent(' ')
4
>>> expand_indent('\t')
8
>>> expand_indent(' \t')
8
>>> expand_indent(' \t')
16
"""
if '\t' not in line:
return len(line) - len(line.lstrip())
result = 0
for char in line:
if char == '\t':
result = result // 8 * 8 + 8
elif char == ' ':
result += 1
else:
break
return result
# NOTE(sigmavirus24): This was taken wholesale from
# https://github.com/PyCQA/pycodestyle. The in-line comments were edited to be
# more descriptive.
def mutate_string(text):
"""Replace contents with 'xxx' to prevent syntax matching.
>>> mute_string('"abc"')
'"xxx"'
>>> mute_string("'''abc'''")
"'''xxx'''"
>>> mute_string("r'abc'")
"r'xxx'"
"""
# NOTE(sigmavirus24): If there are string modifiers (e.g., b, u, r)
# use the last "character" to determine if we're using single or double
# quotes and then find the first instance of it
start = text.index(text[-1]) + 1
end = len(text) - 1
# Check for triple-quoted strings
if text[-3:] in ('"""', "'''"):
start += 2
end -= 2
return text[:start] + 'x' * (end - start) + text[end:]
| 35.824034 | 79 | 0.574997 |
e6663448e51189fccb952c112ccc0dea551582a8 | 449 | py | Python | webempresa/social/admin.py | tomapkiewicz/web-empresa-curso-django-2 | 175a32b617157f24687e8a5b68c062d80edb1731 | [
"Apache-2.0"
] | null | null | null | webempresa/social/admin.py | tomapkiewicz/web-empresa-curso-django-2 | 175a32b617157f24687e8a5b68c062d80edb1731 | [
"Apache-2.0"
] | null | null | null | webempresa/social/admin.py | tomapkiewicz/web-empresa-curso-django-2 | 175a32b617157f24687e8a5b68c062d80edb1731 | [
"Apache-2.0"
] | null | null | null | from django.contrib import admin
from .models import Link
# Register your models here.
class LinkAdmin(admin.ModelAdmin):
readonly_fields = ('created', 'updated')
def get_readonly_fields(self, request, obj=None):
# if request.user.groups.filter(name='admin').exists():
if request.user.is_superuser:
return ('created', 'updated')
else:
return ('key','name')
admin.site.register(Link,LinkAdmin) | 32.071429 | 62 | 0.668151 |
e3e9b8477e234b9dd55c3ab9faa01d21c05d7454 | 335 | py | Python | peples_heigth/solution.py | andriiglukhyi/leetcode | 22be8c8417b28b2888be5aee82ccfe47f57f1945 | [
"MIT"
] | 1 | 2018-08-16T09:42:44.000Z | 2018-08-16T09:42:44.000Z | peples_heigth/solution.py | andriiglukhyi/leetcode | 22be8c8417b28b2888be5aee82ccfe47f57f1945 | [
"MIT"
] | null | null | null | peples_heigth/solution.py | andriiglukhyi/leetcode | 22be8c8417b28b2888be5aee82ccfe47f57f1945 | [
"MIT"
] | null | null | null | class Solution:
def reconstructQueue(self, people):
"""
:type people: List[List[int]]
:rtype: List[List[int]]
"""
people.sort(key = lambda person: [-person[0],person[1]])
newlist = []
for person in people:
newlist.insert(person[1],person)
return newlist | 30.454545 | 65 | 0.540299 |
117700c216a16e3ab9a6bf4a58b54b4a93055bc0 | 7,907 | py | Python | messenger/envs/stage_one.py | vzhong/messenger-emma | ecf94af47b30718edf419928e0bbebda9a21a87f | [
"MIT"
] | null | null | null | messenger/envs/stage_one.py | vzhong/messenger-emma | ecf94af47b30718edf419928e0bbebda9a21a87f | [
"MIT"
] | null | null | null | messenger/envs/stage_one.py | vzhong/messenger-emma | ecf94af47b30718edf419928e0bbebda9a21a87f | [
"MIT"
] | null | null | null | '''
Classes that follows a gym-like interface and implements stage one of the Messenger
environment.
'''
import json
import random
from collections import namedtuple
from pathlib import Path
import numpy as np
from.base import MessengerEnv, Position
from . import config
from .manual import TextManual
from .utils import games_from_json
# Used to track sprites in StageOne, where we do not use VGDL to handle sprites.
Sprite = namedtuple("Sprite", ["name", "id", "position"])
class StageOne(MessengerEnv):
def __init__(self, split, message_prob=0.2, shuffle_obs=True):
'''
Stage one where objects are all immovable. Since the episode length is short and entities
do not move, we do not use VGDL engine for efficiency.
message_prob:
the probability that the avatar starts with the message
shuffle_obs:
shuffle the observation including the text manual
'''
super().__init__()
self.message_prob = message_prob
self.shuffle_obs = shuffle_obs
this_folder = Path(__file__).parent
# Get the games and manual
games_json_path = this_folder.joinpath("games.json")
if "train" in split and "mc" in split: # multi-combination games
game_split = "train_multi_comb"
text_json_path = this_folder.joinpath("texts", "text_train.json")
elif "train" in split and "sc" in split: # single-combination games
game_split = "train_single_comb"
text_json_path = this_folder.joinpath("texts", "text_train.json")
elif "val" in split:
game_split = "val"
text_json_path = this_folder.joinpath("texts", "text_val.json")
elif "test" in split:
game_split = "test"
text_json_path = this_folder.joinpath("texts", "text_test.json")
else:
raise Exception(f"Split: {split} not understood.")
# list of Game namedtuples
self.all_games = games_from_json(json_path=games_json_path, split=game_split)
# we only need the immovable and unknown descriptions, so just extract those.
with text_json_path.open(mode="r") as f:
descrip = json.load(f)
self.descriptors = {}
for entity in descrip:
self.descriptors[entity] = {}
for role in ("enemy", "message", "goal"):
self.descriptors[entity][role] = []
for sent in descrip[entity][role]["immovable"]:
self.descriptors[entity][role].append(sent)
for sent in descrip[entity][role]["unknown"]:
self.descriptors[entity][role].append(sent)
self.positions = [ # all possible entity locations
Position(y=3, x=5),
Position(y=5, x=3),
Position(y=5, x=7),
Position(y=7, x=5)
]
self.avatar_start_pos = Position(y=5, x=5)
self.avatar = None
self.enemy = None
self.message = None
self.neutral = None
self.goal = None
def _get_manual(self):
enemy_str = random.choice(self.descriptors[self.enemy.name]["enemy"])
key_str = random.choice(self.descriptors[self.message.name]["message"])
goal_str = random.choice(self.descriptors[self.goal.name]["goal"])
manual = [enemy_str, key_str, goal_str]
if self.shuffle_obs:
random.shuffle(manual)
return manual
def _get_obs(self):
entities = np.zeros((config.STATE_HEIGHT, config.STATE_WIDTH, 1))
avatar = np.zeros((config.STATE_HEIGHT, config.STATE_WIDTH, 1))
for sprite in (self.enemy, self.message, self.goal):
entities[sprite.position.y, sprite.position.x, 0] = sprite.id
avatar[self.avatar.position.y, self.avatar.position.x, 0] = self.avatar.id
return {"entities": entities, "avatar": avatar}
def reset(self):
self.game = random.choice(self.all_games)
enemy, message, goal = self.game.enemy, self.game.message, self.game.goal
# randomly choose where to put enemy, key, goal
shuffled_pos = random.sample(self.positions, 4)
self.enemy = Sprite(name=enemy.name, id=enemy.id, position=shuffled_pos[0])
self.message = Sprite(name=message.name, id=message.id, position=shuffled_pos[1])
self.goal = Sprite(name=goal.name, id=goal.id, position=shuffled_pos[2])
if random.random() < self.message_prob:
self.avatar = Sprite(
name=config.WITH_MESSAGE.name,
id=config.WITH_MESSAGE.id,
position=self.avatar_start_pos
)
else: # decide whether avatar has message or not
self.avatar = Sprite(
name=config.NO_MESSAGE.name,
id=config.NO_MESSAGE.id,
position=self.avatar_start_pos
)
obs = self._get_obs()
manual = self._get_manual()
return obs, manual
def _move_avatar(self, action):
if action == config.ACTIONS.stay:
return
elif action == config.ACTIONS.up:
if self.avatar.position.y <= 0:
return
else:
new_position = Position(
y = self.avatar.position.y - 1,
x = self.avatar.position.x
)
elif action == config.ACTIONS.down:
if self.avatar.position.y >= config.STATE_HEIGHT - 1:
return
else:
new_position = Position(
y = self.avatar.position.y + 1,
x = self.avatar.position.x
)
elif action == config.ACTIONS.left:
if self.avatar.position.x <= 0:
return
else:
new_position = Position(
y = self.avatar.position.y,
x = self.avatar.position.x - 1
)
elif action == config.ACTIONS.right:
if self.avatar.position.x >= config.STATE_WIDTH - 1:
return
else:
new_position = Position(
y = self.avatar.position.y,
x = self.avatar.position.x + 1
)
else:
raise Exception(f"{action} is not a valid action.")
self.avatar = Sprite(
name=self.avatar.name,
id=self.avatar.id,
position=new_position
)
def _overlap(self, sprite_1, sprite_2):
if (sprite_1.position.x == sprite_2.position.x and
sprite_1.position.y == sprite_2.position.y):
return True
else:
return False
def step(self, action):
self._move_avatar(action)
obs = self._get_obs()
if self._overlap(self.avatar, self.enemy):
return obs, -1.0, True, None # state, reward, done, info
if self._overlap(self.avatar, self.message):
if self.avatar.name == config.WITH_MESSAGE.name:
return obs, -1.0, True, None
elif self.avatar.name == config.NO_MESSAGE.name:
return obs, 1.0, True, None
else:
raise Exception("Unknown avatar name {avatar.name}")
if self._overlap(self.avatar, self.goal):
if self.avatar.name == config.WITH_MESSAGE.name:
return obs, 1.0, True, None
elif self.avatar.name == config.NO_MESSAGE.name:
return obs, -1.0, True, None
else:
raise Exception("Unknown avatar name {avatar.name}")
return obs, 0.0, False, None
| 37.29717 | 97 | 0.563298 |
16e328f912f8530fa9a64a538e5c0d855fad9df7 | 27,394 | py | Python | resources/usr/lib/python2.7/dist-packages/numpy/polynomial/polytemplate.py | edawson/parliament2 | 2632aa3484ef64c9539c4885026b705b737f6d1e | [
"Apache-2.0"
] | null | null | null | resources/usr/lib/python2.7/dist-packages/numpy/polynomial/polytemplate.py | edawson/parliament2 | 2632aa3484ef64c9539c4885026b705b737f6d1e | [
"Apache-2.0"
] | null | null | null | resources/usr/lib/python2.7/dist-packages/numpy/polynomial/polytemplate.py | edawson/parliament2 | 2632aa3484ef64c9539c4885026b705b737f6d1e | [
"Apache-2.0"
] | 1 | 2020-05-28T23:01:44.000Z | 2020-05-28T23:01:44.000Z | """
Template for the Chebyshev and Polynomial classes.
This module houses a Python string module Template object (see, e.g.,
http://docs.python.org/library/string.html#template-strings) used by
the `polynomial` and `chebyshev` modules to implement their respective
`Polynomial` and `Chebyshev` classes. It provides a mechanism for easily
creating additional specific polynomial classes (e.g., Legendre, Jacobi,
etc.) in the future, such that all these classes will have a common API.
"""
import string
import sys
if sys.version_info[0] >= 3:
rel_import = "from . import"
else:
rel_import = "import"
polytemplate = string.Template('''
from __future__ import division
import numpy as np
import warnings
REL_IMPORT polyutils as pu
class $name(pu.PolyBase) :
"""A $name series class.
$name instances provide the standard Python numerical methods '+',
'-', '*', '//', '%', 'divmod', '**', and '()' as well as the listed
methods.
Parameters
----------
coef : array_like
$name coefficients, in increasing order. For example,
``(1, 2, 3)`` implies ``P_0 + 2P_1 + 3P_2`` where the
``P_i`` are a graded polynomial basis.
domain : (2,) array_like, optional
Domain to use. The interval ``[domain[0], domain[1]]`` is mapped to
the interval ``[window[0], window[1]]`` by shifting and scaling.
The default value is $domain.
window : (2,) array_like, optional
Window, see ``domain`` for its use. The default value is $domain.
.. versionadded:: 1.6.0
Attributes
----------
coef : (N,) array
$name coefficients, from low to high.
domain : (2,) array
Domain that is mapped to ``window``.
window : (2,) array
Window that ``domain`` is mapped to.
Class Attributes
----------------
maxpower : int
Maximum power allowed, i.e., the largest number ``n`` such that
``p(x)**n`` is allowed. This is to limit runaway polynomial size.
domain : (2,) ndarray
Default domain of the class.
window : (2,) ndarray
Default window of the class.
Notes
-----
It is important to specify the domain in many cases, for instance in
fitting data, because many of the important properties of the
polynomial basis only hold in a specified interval and consequently
the data must be mapped into that interval in order to benefit.
Examples
--------
"""
# Limit runaway size. T_n^m has degree n*2^m
maxpower = 16
# Default domain
domain = np.array($domain)
# Default window
window = np.array($domain)
# Don't let participate in array operations. Value doesn't matter.
__array_priority__ = 0
def has_samecoef(self, other):
"""Check if coefficients match.
Parameters
----------
other : class instance
The other class must have the ``coef`` attribute.
Returns
-------
bool : boolean
True if the coefficients are the same, False otherwise.
Notes
-----
.. versionadded:: 1.6.0
"""
if len(self.coef) != len(other.coef):
return False
elif not np.all(self.coef == other.coef):
return False
else:
return True
def has_samedomain(self, other):
"""Check if domains match.
Parameters
----------
other : class instance
The other class must have the ``domain`` attribute.
Returns
-------
bool : boolean
True if the domains are the same, False otherwise.
Notes
-----
.. versionadded:: 1.6.0
"""
return np.all(self.domain == other.domain)
def has_samewindow(self, other):
"""Check if windows match.
Parameters
----------
other : class instance
The other class must have the ``window`` attribute.
Returns
-------
bool : boolean
True if the windows are the same, False otherwise.
Notes
-----
.. versionadded:: 1.6.0
"""
return np.all(self.window == other.window)
def has_sametype(self, other):
"""Check if types match.
Parameters
----------
other : object
Class instance.
Returns
-------
bool : boolean
True if other is same class as self
Notes
-----
.. versionadded:: 1.7.0
"""
return isinstance(other, self.__class__)
def __init__(self, coef, domain=$domain, window=$domain) :
[coef, dom, win] = pu.as_series([coef, domain, window], trim=False)
if len(dom) != 2 :
raise ValueError("Domain has wrong number of elements.")
if len(win) != 2 :
raise ValueError("Window has wrong number of elements.")
self.coef = coef
self.domain = dom
self.window = win
def __repr__(self):
format = "%s(%s, %s, %s)"
coef = repr(self.coef)[6:-1]
domain = repr(self.domain)[6:-1]
window = repr(self.window)[6:-1]
return format % ('$name', coef, domain, window)
def __str__(self) :
format = "%s(%s)"
coef = str(self.coef)
return format % ('$nick', coef)
# Pickle and copy
def __getstate__(self) :
ret = self.__dict__.copy()
ret['coef'] = self.coef.copy()
ret['domain'] = self.domain.copy()
ret['window'] = self.window.copy()
return ret
def __setstate__(self, dict) :
self.__dict__ = dict
# Call
def __call__(self, arg) :
off, scl = pu.mapparms(self.domain, self.window)
arg = off + scl*arg
return ${nick}val(arg, self.coef)
def __iter__(self) :
return iter(self.coef)
def __len__(self) :
return len(self.coef)
# Numeric properties.
def __neg__(self) :
return self.__class__(-self.coef, self.domain, self.window)
def __pos__(self) :
return self
def __add__(self, other) :
"""Returns sum"""
if isinstance(other, pu.PolyBase):
if not self.has_sametype(other):
raise TypeError("Polynomial types differ")
elif not self.has_samedomain(other):
raise TypeError("Domains differ")
elif not self.has_samewindow(other):
raise TypeError("Windows differ")
else:
coef = ${nick}add(self.coef, other.coef)
else :
try :
coef = ${nick}add(self.coef, other)
except :
return NotImplemented
return self.__class__(coef, self.domain, self.window)
def __sub__(self, other) :
"""Returns difference"""
if isinstance(other, pu.PolyBase):
if not self.has_sametype(other):
raise TypeError("Polynomial types differ")
elif not self.has_samedomain(other):
raise TypeError("Domains differ")
elif not self.has_samewindow(other):
raise TypeError("Windows differ")
else:
coef = ${nick}sub(self.coef, other.coef)
else :
try :
coef = ${nick}sub(self.coef, other)
except :
return NotImplemented
return self.__class__(coef, self.domain, self.window)
def __mul__(self, other) :
"""Returns product"""
if isinstance(other, pu.PolyBase):
if not self.has_sametype(other):
raise TypeError("Polynomial types differ")
elif not self.has_samedomain(other):
raise TypeError("Domains differ")
elif not self.has_samewindow(other):
raise TypeError("Windows differ")
else:
coef = ${nick}mul(self.coef, other.coef)
else :
try :
coef = ${nick}mul(self.coef, other)
except :
return NotImplemented
return self.__class__(coef, self.domain, self.window)
def __div__(self, other):
# set to __floordiv__, /, for now.
return self.__floordiv__(other)
def __truediv__(self, other) :
# there is no true divide if the rhs is not a scalar, although it
# could return the first n elements of an infinite series.
# It is hard to see where n would come from, though.
if np.isscalar(other) :
# this might be overly restrictive
coef = self.coef/other
return self.__class__(coef, self.domain, self.window)
else :
return NotImplemented
def __floordiv__(self, other) :
"""Returns the quotient."""
if isinstance(other, pu.PolyBase):
if not self.has_sametype(other):
raise TypeError("Polynomial types differ")
elif not self.has_samedomain(other):
raise TypeError("Domains differ")
elif not self.has_samewindow(other):
raise TypeError("Windows differ")
else:
quo, rem = ${nick}div(self.coef, other.coef)
else :
try :
quo, rem = ${nick}div(self.coef, other)
except :
return NotImplemented
return self.__class__(quo, self.domain, self.window)
def __mod__(self, other) :
"""Returns the remainder."""
if isinstance(other, pu.PolyBase):
if not self.has_sametype(other):
raise TypeError("Polynomial types differ")
elif not self.has_samedomain(other):
raise TypeError("Domains differ")
elif not self.has_samewindow(other):
raise TypeError("Windows differ")
else:
quo, rem = ${nick}div(self.coef, other.coef)
else :
try :
quo, rem = ${nick}div(self.coef, other)
except :
return NotImplemented
return self.__class__(rem, self.domain, self.window)
def __divmod__(self, other) :
"""Returns quo, remainder"""
if isinstance(other, self.__class__) :
if not self.has_samedomain(other):
raise TypeError("Domains are not equal")
elif not self.has_samewindow(other):
raise TypeError("Windows are not equal")
else:
quo, rem = ${nick}div(self.coef, other.coef)
else :
try :
quo, rem = ${nick}div(self.coef, other)
except :
return NotImplemented
quo = self.__class__(quo, self.domain, self.window)
rem = self.__class__(rem, self.domain, self.window)
return quo, rem
def __pow__(self, other) :
try :
coef = ${nick}pow(self.coef, other, maxpower = self.maxpower)
except :
raise
return self.__class__(coef, self.domain, self.window)
def __radd__(self, other) :
try :
coef = ${nick}add(other, self.coef)
except :
return NotImplemented
return self.__class__(coef, self.domain, self.window)
def __rsub__(self, other):
try :
coef = ${nick}sub(other, self.coef)
except :
return NotImplemented
return self.__class__(coef, self.domain, self.window)
def __rmul__(self, other) :
try :
coef = ${nick}mul(other, self.coef)
except :
return NotImplemented
return self.__class__(coef, self.domain, self.window)
def __rdiv__(self, other):
# set to __floordiv__ /.
return self.__rfloordiv__(other)
def __rtruediv__(self, other) :
# there is no true divide if the rhs is not a scalar, although it
# could return the first n elements of an infinite series.
# It is hard to see where n would come from, though.
if len(self.coef) == 1 :
try :
quo, rem = ${nick}div(other, self.coef[0])
except :
return NotImplemented
return self.__class__(quo, self.domain, self.window)
def __rfloordiv__(self, other) :
try :
quo, rem = ${nick}div(other, self.coef)
except :
return NotImplemented
return self.__class__(quo, self.domain, self.window)
def __rmod__(self, other) :
try :
quo, rem = ${nick}div(other, self.coef)
except :
return NotImplemented
return self.__class__(rem, self.domain, self.window)
def __rdivmod__(self, other) :
try :
quo, rem = ${nick}div(other, self.coef)
except :
return NotImplemented
quo = self.__class__(quo, self.domain, self.window)
rem = self.__class__(rem, self.domain, self.window)
return quo, rem
# Enhance me
# some augmented arithmetic operations could be added here
def __eq__(self, other) :
res = isinstance(other, self.__class__) \
and self.has_samecoef(other) \
and self.has_samedomain(other) \
and self.has_samewindow(other)
return res
def __ne__(self, other) :
return not self.__eq__(other)
#
# Extra methods.
#
def copy(self) :
"""Return a copy.
Return a copy of the current $name instance.
Returns
-------
new_instance : $name
Copy of current instance.
"""
return self.__class__(self.coef, self.domain, self.window)
def degree(self) :
"""The degree of the series.
Notes
-----
.. versionadded:: 1.5.0
"""
return len(self) - 1
def cutdeg(self, deg) :
"""Truncate series to the given degree.
Reduce the degree of the $name series to `deg` by discarding the
high order terms. If `deg` is greater than the current degree a
copy of the current series is returned. This can be useful in least
squares where the coefficients of the high degree terms may be very
small.
Parameters
----------
deg : non-negative int
The series is reduced to degree `deg` by discarding the high
order terms. The value of `deg` must be a non-negative integer.
Returns
-------
new_instance : $name
New instance of $name with reduced degree.
Notes
-----
.. versionadded:: 1.5.0
"""
return self.truncate(deg + 1)
def trim(self, tol=0) :
"""Remove small leading coefficients
Remove leading coefficients until a coefficient is reached whose
absolute value greater than `tol` or the beginning of the series is
reached. If all the coefficients would be removed the series is set to
``[0]``. A new $name instance is returned with the new coefficients.
The current instance remains unchanged.
Parameters
----------
tol : non-negative number.
All trailing coefficients less than `tol` will be removed.
Returns
-------
new_instance : $name
Contains the new set of coefficients.
"""
coef = pu.trimcoef(self.coef, tol)
return self.__class__(coef, self.domain, self.window)
def truncate(self, size) :
"""Truncate series to length `size`.
Reduce the $name series to length `size` by discarding the high
degree terms. The value of `size` must be a positive integer. This
can be useful in least squares where the coefficients of the
high degree terms may be very small.
Parameters
----------
size : positive int
The series is reduced to length `size` by discarding the high
degree terms. The value of `size` must be a positive integer.
Returns
-------
new_instance : $name
New instance of $name with truncated coefficients.
"""
isize = int(size)
if isize != size or isize < 1 :
raise ValueError("size must be a positive integer")
if isize >= len(self.coef) :
coef = self.coef
else :
coef = self.coef[:isize]
return self.__class__(coef, self.domain, self.window)
def convert(self, domain=None, kind=None, window=None) :
"""Convert to different class and/or domain.
Parameters
----------
domain : array_like, optional
The domain of the converted series. If the value is None,
the default domain of `kind` is used.
kind : class, optional
The polynomial series type class to which the current instance
should be converted. If kind is None, then the class of the
current instance is used.
window : array_like, optional
The window of the converted series. If the value is None,
the default window of `kind` is used.
Returns
-------
new_series_instance : `kind`
The returned class can be of different type than the current
instance and/or have a different domain.
Notes
-----
Conversion between domains and class types can result in
numerically ill defined series.
Examples
--------
"""
if kind is None:
kind = $name
if domain is None:
domain = kind.domain
if window is None:
window = kind.window
return self(kind.identity(domain, window=window))
def mapparms(self) :
"""Return the mapping parameters.
The returned values define a linear map ``off + scl*x`` that is
applied to the input arguments before the series is evaluated. The
map depends on the ``domain`` and ``window``; if the current
``domain`` is equal to the ``window`` the resulting map is the
identity. If the coeffients of the ``$name`` instance are to be
used by themselves outside this class, then the linear function
must be substituted for the ``x`` in the standard representation of
the base polynomials.
Returns
-------
off, scl : floats or complex
The mapping function is defined by ``off + scl*x``.
Notes
-----
If the current domain is the interval ``[l_1, r_1]`` and the window
is ``[l_2, r_2]``, then the linear mapping function ``L`` is
defined by the equations::
L(l_1) = l_2
L(r_1) = r_2
"""
return pu.mapparms(self.domain, self.window)
def integ(self, m=1, k=[], lbnd=None) :
"""Integrate.
Return an instance of $name that is the definite integral of the
current series. Refer to `${nick}int` for full documentation.
Parameters
----------
m : non-negative int
The number of integrations to perform.
k : array_like
Integration constants. The first constant is applied to the
first integration, the second to the second, and so on. The
list of values must less than or equal to `m` in length and any
missing values are set to zero.
lbnd : Scalar
The lower bound of the definite integral.
Returns
-------
integral : $name
The integral of the series using the same domain.
See Also
--------
${nick}int : similar function.
${nick}der : similar function for derivative.
"""
off, scl = self.mapparms()
if lbnd is None :
lbnd = 0
else :
lbnd = off + scl*lbnd
coef = ${nick}int(self.coef, m, k, lbnd, 1./scl)
return self.__class__(coef, self.domain, self.window)
def deriv(self, m=1):
"""Differentiate.
Return an instance of $name that is the derivative of the current
series. Refer to `${nick}der` for full documentation.
Parameters
----------
m : non-negative int
The number of integrations to perform.
Returns
-------
derivative : $name
The derivative of the series using the same domain.
See Also
--------
${nick}der : similar function.
${nick}int : similar function for integration.
"""
off, scl = self.mapparms()
coef = ${nick}der(self.coef, m, scl)
return self.__class__(coef, self.domain, self.window)
def roots(self) :
"""Return list of roots.
Return ndarray of roots for this series. See `${nick}roots` for
full documentation. Note that the accuracy of the roots is likely to
decrease the further outside the domain they lie.
See Also
--------
${nick}roots : similar function
${nick}fromroots : function to go generate series from roots.
"""
roots = ${nick}roots(self.coef)
return pu.mapdomain(roots, self.window, self.domain)
def linspace(self, n=100, domain=None):
"""Return x,y values at equally spaced points in domain.
Returns x, y values at `n` equally spaced points across domain.
Here y is the value of the polynomial at the points x. This is
intended as a plotting aid.
Parameters
----------
n : int, optional
Number of point pairs to return. The default value is 100.
Returns
-------
x, y : ndarrays
``x`` is equal to linspace(self.domain[0], self.domain[1], n)
``y`` is the polynomial evaluated at ``x``.
.. versionadded:: 1.5.0
"""
if domain is None:
domain = self.domain
x = np.linspace(domain[0], domain[1], n)
y = self(x)
return x, y
@staticmethod
def fit(x, y, deg, domain=None, rcond=None, full=False, w=None,
window=$domain):
"""Least squares fit to data.
Return a `$name` instance that is the least squares fit to the data
`y` sampled at `x`. Unlike `${nick}fit`, the domain of the returned
instance can be specified and this will often result in a superior
fit with less chance of ill conditioning. See `${nick}fit` for full
documentation of the implementation.
Parameters
----------
x : array_like, shape (M,)
x-coordinates of the M sample points ``(x[i], y[i])``.
y : array_like, shape (M,) or (M, K)
y-coordinates of the sample points. Several data sets of sample
points sharing the same x-coordinates can be fitted at once by
passing in a 2D-array that contains one dataset per column.
deg : int
Degree of the fitting polynomial.
domain : {None, [beg, end], []}, optional
Domain to use for the returned $name instance. If ``None``,
then a minimal domain that covers the points `x` is chosen. If
``[]`` the default domain ``$domain`` is used. The default
value is $domain in numpy 1.4.x and ``None`` in later versions.
The ``'[]`` value was added in numpy 1.5.0.
rcond : float, optional
Relative condition number of the fit. Singular values smaller
than this relative to the largest singular value will be
ignored. The default value is len(x)*eps, where eps is the
relative precision of the float type, about 2e-16 in most
cases.
full : bool, optional
Switch determining nature of return value. When it is False
(the default) just the coefficients are returned, when True
diagnostic information from the singular value decomposition is
also returned.
w : array_like, shape (M,), optional
Weights. If not None the contribution of each point
``(x[i],y[i])`` to the fit is weighted by `w[i]`. Ideally the
weights are chosen so that the errors of the products
``w[i]*y[i]`` all have the same variance. The default value is
None.
.. versionadded:: 1.5.0
window : {[beg, end]}, optional
Window to use for the returned $name instance. The default
value is ``$domain``
.. versionadded:: 1.6.0
Returns
-------
least_squares_fit : instance of $name
The $name instance is the least squares fit to the data and
has the domain specified in the call.
[residuals, rank, singular_values, rcond] : only if `full` = True
Residuals of the least-squares fit, the effective rank of the
scaled Vandermonde matrix and its singular values, and the
specified value of `rcond`. For more details, see
`linalg.lstsq`.
See Also
--------
${nick}fit : similar function
"""
if domain is None:
domain = pu.getdomain(x)
elif domain == []:
domain = $domain
if window == []:
window = $domain
xnew = pu.mapdomain(x, domain, window)
res = ${nick}fit(xnew, y, deg, w=w, rcond=rcond, full=full)
if full :
[coef, status] = res
return $name(coef, domain=domain, window=window), status
else :
coef = res
return $name(coef, domain=domain, window=window)
@staticmethod
def fromroots(roots, domain=$domain, window=$domain) :
"""Return $name instance with specified roots.
Returns an instance of $name representing the product
``(x - r[0])*(x - r[1])*...*(x - r[n-1])``, where ``r`` is the
list of roots.
Parameters
----------
roots : array_like
List of roots.
Returns
-------
object : $name
Series with the specified roots.
See Also
--------
${nick}fromroots : equivalent function
"""
if domain is None :
domain = pu.getdomain(roots)
rnew = pu.mapdomain(roots, domain, window)
coef = ${nick}fromroots(rnew)
return $name(coef, domain=domain, window=window)
@staticmethod
def identity(domain=$domain, window=$domain) :
"""Identity function.
If ``p`` is the returned $name object, then ``p(x) == x`` for all
values of x.
Parameters
----------
domain : array_like
The resulting array must be if the form ``[beg, end]``, where
``beg`` and ``end`` are the endpoints of the domain.
window : array_like
The resulting array must be if the form ``[beg, end]``, where
``beg`` and ``end`` are the endpoints of the window.
Returns
-------
identity : $name object
"""
off, scl = pu.mapparms(window, domain)
coef = ${nick}line(off, scl)
return $name(coef, domain, window)
'''.replace('REL_IMPORT', rel_import))
| 32.304245 | 78 | 0.562787 |
c62751905d32efe77afdeb0c1d02caff0e3ca9bd | 5,003 | py | Python | pythran/dist.py | PierreBlancfat/pythran2 | 37869bc73aae1054253c2b1643aee5c63f11b7e8 | [
"BSD-3-Clause"
] | null | null | null | pythran/dist.py | PierreBlancfat/pythran2 | 37869bc73aae1054253c2b1643aee5c63f11b7e8 | [
"BSD-3-Clause"
] | null | null | null | pythran/dist.py | PierreBlancfat/pythran2 | 37869bc73aae1054253c2b1643aee5c63f11b7e8 | [
"BSD-3-Clause"
] | null | null | null | '''
This modules contains a distutils extension mechanism for Pythran
* PythranExtension: is used as distutils's Extension
'''
import pythran.config as cfg
from collections import defaultdict, Iterable
import os.path
import os
import sys
from distutils.command.build_ext import build_ext as LegacyBuildExt
from numpy.distutils.extension import Extension
class PythranBuildExt(LegacyBuildExt, object):
"""Subclass of `distutils.command.build_ext.build_ext` which is required to
build `PythranExtension` with the configured C++ compiler. It may also be
subclassed if you want to combine with another build_ext class (NumPy,
Cython implementations).
"""
def build_extension(self, ext):
StringTypes = (str, unicode) if sys.version_info[0] == 2 else (str,)
def get_value(obj, key):
var = getattr(obj, key)
if isinstance(var, Iterable) and not isinstance(var, StringTypes):
return var[0]
else:
return var
def set_value(obj, key, value):
var = getattr(obj, key)
if isinstance(var, Iterable) and not isinstance(var, StringTypes):
var[0] = value
else:
setattr(obj, key, value)
prev = {
# linux-like
'preprocessor': None,
'compiler_cxx': None,
'compiler_so': None,
'compiler': None,
'linker_exe': None,
'linker_so': None,
# Windows-like
'cc': None,
'linker': None,
'lib': None,
'rc': None,
'mc': None,
}
# Backup compiler settings
for key in list(prev.keys()):
if hasattr(self.compiler, key):
prev[key] = get_value(self.compiler, key)
else:
del prev[key]
# try hard to modify the compiler
if getattr(ext, 'cxx', None) is not None:
for comp in prev:
if hasattr(self.compiler, comp):
set_value(self.compiler, comp, ext.cxx)
# In general, distutils uses -Wstrict-prototypes, but this option
# is not valid for C++ code, only for C. Remove it if it's there
# to avoid a spurious warning on every compilation.
for flag in cfg.cfg.get('compiler', "ignoreflags").split():
for target in ('compiler_so', 'linker_so'):
try:
getattr(self.compiler, target).remove(flag)
except (AttributeError, ValueError):
pass
# Remove -arch i386 if 'x86_64' is specified, otherwise incorrect
# code is generated, at least on OSX
if hasattr(self.compiler, 'compiler_so'):
archs = defaultdict(list)
for i, flag in enumerate(self.compiler.compiler_so[1:]):
if self.compiler.compiler_so[i] == '-arch':
archs[flag].append(i + 1)
if 'x86_64' in archs and 'i386' in archs:
for i in archs['i386']:
self.compiler.compiler_so[i] = 'x86_64'
try:
return super(PythranBuildExt, self).build_extension(ext)
finally:
# Revert compiler settings
for key in prev.keys():
set_value(self.compiler, key, prev[key])
class PythranExtension(Extension):
'''
Description of a Pythran extension
Similar to distutils.core.Extension except that the sources are .py files
They must be processable by pythran, of course.
The compilation process ends up in a native Python module.
'''
def __init__(self, name, sources, *args, **kwargs):
cfg_ext = cfg.make_extension(python=True, **kwargs)
self.cxx = cfg_ext.pop('cxx', None)
self._sources = sources
Extension.__init__(self, name, sources, *args, **cfg_ext)
self.__dict__.pop("sources", None)
@property
def sources(self):
import pythran.toolchain as tc
cxx_sources = []
for source in self._sources:
base, ext = os.path.splitext(source)
if ext != '.py':
cxx_sources.append(source)
continue
output_file = base + '.cpp' # target name
if os.path.exists(source) and (not os.path.exists(output_file)
or os.stat(output_file) < os.stat(source)):
# get the last name in the path
if '.' in self.name:
module_name = os.path.splitext(self.name)[-1][1:]
else:
module_name = self.name
tc.compile_pythranfile(source, output_file,
module_name, cpponly=True)
cxx_sources.append(output_file)
return cxx_sources
@sources.setter
def sources(self, sources):
self._sources = sources
| 35.232394 | 79 | 0.563862 |
372c0f0fe3b374aa7d15fd83db35a0ff0c84f106 | 27,147 | py | Python | ambari-server/src/main/resources/stacks/HDP/3.0/services/HIVE/package/scripts/hive.py | niuchp/ambari-2.7.3 | 12e67c143b7eb07bec692edf2b8349ccda99f3c6 | [
"Apache-2.0"
] | null | null | null | ambari-server/src/main/resources/stacks/HDP/3.0/services/HIVE/package/scripts/hive.py | niuchp/ambari-2.7.3 | 12e67c143b7eb07bec692edf2b8349ccda99f3c6 | [
"Apache-2.0"
] | null | null | null | ambari-server/src/main/resources/stacks/HDP/3.0/services/HIVE/package/scripts/hive.py | niuchp/ambari-2.7.3 | 12e67c143b7eb07bec692edf2b8349ccda99f3c6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Python Imports
import os
import glob
import traceback
from urlparse import urlparse
# Ambari Commons & Resource Management Imports
from ambari_commons.constants import SERVICE
from resource_management.core import utils
from resource_management.core.resources.system import File, Execute, Directory
from resource_management.core.logger import Logger
from resource_management.core.shell import as_user, quote_bash_args
from resource_management.core.source import StaticFile, Template, DownloadSource, InlineTemplate
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.generate_logfeeder_input_config import generate_logfeeder_input_config
from resource_management.libraries.functions.get_config import get_config
from resource_management.libraries.functions.get_user_call_output import get_user_call_output
from resource_management.libraries.functions.is_empty import is_empty
from resource_management.libraries.functions.security_commons import update_credential_provider_path
from resource_management.libraries.functions.setup_atlas_hook import setup_atlas_hook
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.resources.xml_config import XmlConfig
from resource_management.libraries.functions.lzo_utils import install_lzo_if_needed
def hive(name=None):
import params
install_lzo_if_needed()
# We should change configurations for client as well as for server.
# The reason is that stale-configs are service-level, not component.
Logger.info("Directories to fill with configs: %s" % str(params.hive_conf_dirs_list))
for conf_dir in params.hive_conf_dirs_list:
fill_conf_dir(conf_dir)
params.hive_site_config = update_credential_provider_path(params.hive_site_config,
'hive-site',
os.path.join(params.hive_config_dir, 'hive-site.jceks'),
params.hive_user,
params.user_group
)
XmlConfig("hive-site.xml",
conf_dir = params.hive_config_dir,
configurations = params.hive_site_config,
configuration_attributes = params.config['configurationAttributes']['hive-site'],
owner = params.hive_user,
group = params.user_group,
mode = 0644)
# Generate atlas-application.properties.xml file
if params.enable_atlas_hook:
atlas_hook_filepath = os.path.join(params.hive_config_dir, params.atlas_hook_filename)
setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.hive_user, params.user_group)
File(format("{hive_config_dir}/hive-env.sh"),
owner=params.hive_user,
group=params.user_group,
content=InlineTemplate(params.hive_env_sh_template),
mode=0755
)
# On some OS this folder could be not exists, so we will create it before pushing there files
Directory(params.limits_conf_dir,
create_parents = True,
owner='root',
group='root'
)
File(os.path.join(params.limits_conf_dir, 'hive.conf'),
owner='root',
group='root',
mode=0644,
content=Template("hive.conf.j2")
)
if params.security_enabled:
File(os.path.join(params.hive_config_dir, 'zkmigrator_jaas.conf'),
owner=params.hive_user,
group=params.user_group,
content=Template("zkmigrator_jaas.conf.j2")
)
File(format("/usr/lib/ambari-agent/{check_db_connection_jar_name}"),
content = DownloadSource(format("{jdk_location}/{check_db_connection_jar_name}")),
mode = 0644,
)
if params.hive_jdbc_target is not None and not os.path.exists(params.hive_jdbc_target):
jdbc_connector(params.hive_jdbc_target, params.hive_previous_jdbc_jar)
if name != "client":
setup_non_client()
if name == 'hiveserver2':
setup_hiveserver2()
if name == 'metastore':
setup_metastore()
def setup_hiveserver2():
import params
File(params.start_hiveserver2_path,
mode=0755,
content=Template(format('{start_hiveserver2_script}'))
)
File(os.path.join(params.hive_server_conf_dir, "hadoop-metrics2-hiveserver2.properties"),
owner=params.hive_user,
group=params.user_group,
content=Template("hadoop-metrics2-hiveserver2.properties.j2"),
mode=0600
)
XmlConfig("hiveserver2-site.xml",
conf_dir=params.hive_server_conf_dir,
configurations=params.config['configurations']['hiveserver2-site'],
configuration_attributes=params.config['configurationAttributes']['hiveserver2-site'],
owner=params.hive_user,
group=params.user_group,
mode=0600)
# ****** Begin Copy Tarballs ******
# *********************************
# if copy tarball to HDFS feature supported copy mapreduce.tar.gz and tez.tar.gz to HDFS
if params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major):
copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
# Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
# This can use a different source and dest location to account
copy_to_hdfs("pig",
params.user_group,
params.hdfs_user,
file_mode=params.tarballs_mode,
custom_source_file=params.pig_tar_source,
custom_dest_file=params.pig_tar_dest_file,
skip=params.sysprep_skip_copy_tarballs_hdfs)
copy_to_hdfs("hive",
params.user_group,
params.hdfs_user,
file_mode=params.tarballs_mode,
custom_source_file=params.hive_tar_source,
custom_dest_file=params.hive_tar_dest_file,
skip=params.sysprep_skip_copy_tarballs_hdfs)
wildcard_tarballs = ["sqoop", "hadoop_streaming"]
for tarball_name in wildcard_tarballs:
source_file_pattern = eval("params." + tarball_name + "_tar_source")
dest_dir = eval("params." + tarball_name + "_tar_dest_dir")
if source_file_pattern is None or dest_dir is None:
continue
source_files = glob.glob(source_file_pattern) if "*" in source_file_pattern else [source_file_pattern]
for source_file in source_files:
src_filename = os.path.basename(source_file)
dest_file = os.path.join(dest_dir, src_filename)
copy_to_hdfs(tarball_name,
params.user_group,
params.hdfs_user,
file_mode=params.tarballs_mode,
custom_source_file=source_file,
custom_dest_file=dest_file,
skip=params.sysprep_skip_copy_tarballs_hdfs)
# ******* End Copy Tarballs *******
# *********************************
# if warehouse directory is in DFS
if not params.whs_dir_protocol or params.whs_dir_protocol == urlparse(params.default_fs).scheme:
if not is_empty(params.tez_hook_proto_base_directory):
params.HdfsResource(params.tez_hook_proto_base_directory,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
mode = 01755
)
if not is_empty(params.hive_hook_proto_base_directory):
params.HdfsResource(params.hive_hook_proto_base_directory,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
mode = 01777
)
dag_meta = params.tez_hook_proto_base_directory + "dag_meta"
params.HdfsResource(dag_meta,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
mode = 01777
)
dag_data = params.tez_hook_proto_base_directory + "dag_data"
params.HdfsResource(dag_data,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
mode = 01777
)
app_data = params.tez_hook_proto_base_directory + "app_data"
params.HdfsResource(app_data,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
mode = 01777
)
if not is_empty(params.hive_exec_scratchdir) and not urlparse(params.hive_exec_scratchdir).path.startswith("/tmp"):
params.HdfsResource(params.hive_exec_scratchdir,
type="directory",
action="create_on_execute",
owner=params.hive_user,
group=params.hdfs_user,
mode=0777) # Hive expects this dir to be writeable by everyone as it is used as a temp dir
if params.hive_repl_cmrootdir is not None and params.hive_repl_cmrootdir.strip() != "":
params.HdfsResource(params.hive_repl_cmrootdir,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
group=params.user_group,
mode = 01777)
if params.hive_repl_rootdir is not None and params.hive_repl_rootdir.strip() != "":
params.HdfsResource(params.hive_repl_rootdir,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
group=params.user_group,
mode = 0700)
params.HdfsResource(None, action="execute")
generate_logfeeder_input_config('hive', Template("input.config-hive.json.j2", extra_imports=[default]))
def create_hive_hdfs_dirs():
import params
# Create Hive User Dir
params.HdfsResource(params.hive_hdfs_user_dir,
type="directory",
action="create_on_execute",
owner=params.hive_user,
mode=params.hive_hdfs_user_mode
)
# if warehouse directory is in DFS
if not params.whs_dir_protocol or params.whs_dir_protocol == urlparse(params.default_fs).scheme:
# Create Hive Metastore Warehouse Dir
external_dir = params.hive_metastore_warehouse_external_dir
managed_dir = params.hive_metastore_warehouse_dir
params.HdfsResource(external_dir,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
group = params.user_group,
mode = 01777
)
params.HdfsResource(managed_dir,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
group = params.user_group,
mode = 0700
)
if __is_hdfs_acls_enabled():
if params.security_enabled:
kinit_cmd = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}; ")
Execute(kinit_cmd, user=params.hdfs_user)
Execute(format("hdfs dfs -setfacl -m default:user:{hive_user}:rwx {external_dir}"),
user = params.hdfs_user)
Execute(format("hdfs dfs -setfacl -m default:user:{hive_user}:rwx {managed_dir}"),
user = params.hdfs_user)
else:
Logger.info(format("Could not set default ACLs for HDFS directories {external_dir} and {managed_dir} as ACLs are not enabled!"))
else:
Logger.info(format("Not creating warehouse directory '{hive_metastore_warehouse_dir}', as the location is not in DFS."))
params.HdfsResource(None, action = "execute")
def __is_hdfs_acls_enabled():
import params
hdfs_protocol = params.fs_root.startswith("hdfs://")
return_code, stdout, _ = get_user_call_output("hdfs getconf -confKey dfs.namenode.acls.enabled",
user = params.hdfs_user)
acls_enabled = stdout == "true"
return_code, stdout, _ = get_user_call_output("hdfs getconf -confKey dfs.namenode.posix.acl.inheritance.enabled",
user = params.hdfs_user)
acls_inheritance_enabled = stdout == "true"
return hdfs_protocol and acls_enabled and acls_inheritance_enabled
def setup_non_client():
import params
Directory(params.hive_pid_dir,
create_parents = True,
cd_access='a',
owner=params.hive_user,
group=params.user_group,
mode=0755)
Directory(params.hive_log_dir,
create_parents = True,
cd_access='a',
owner=params.hive_user,
group=params.user_group,
mode=0755)
Directory(params.hive_var_lib,
create_parents = True,
cd_access='a',
owner=params.hive_user,
group=params.user_group,
mode=0755)
def setup_metastore():
import params
if params.hive_metastore_site_supported:
hivemetastore_site_config = get_config("hivemetastore-site")
if hivemetastore_site_config:
XmlConfig("hivemetastore-site.xml",
conf_dir=params.hive_server_conf_dir,
configurations=params.config['configurations']['hivemetastore-site'],
configuration_attributes=params.config['configurationAttributes']['hivemetastore-site'],
owner=params.hive_user,
group=params.user_group,
mode=0600)
File(os.path.join(params.hive_server_conf_dir, "hadoop-metrics2-hivemetastore.properties"),
owner=params.hive_user,
group=params.user_group,
content=Template("hadoop-metrics2-hivemetastore.properties.j2"),
mode=0600
)
File(params.start_metastore_path,
mode=0755,
content=StaticFile('startMetastore.sh')
)
if params.hive_repl_cmrootdir is not None and params.hive_repl_cmrootdir.strip() != "":
params.HdfsResource(params.hive_repl_cmrootdir,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
group=params.user_group,
mode = 01777)
if params.hive_repl_rootdir is not None and params.hive_repl_rootdir.strip() != "":
params.HdfsResource(params.hive_repl_rootdir,
type = "directory",
action = "create_on_execute",
owner = params.hive_user,
group=params.user_group,
mode = 0700)
params.HdfsResource(None, action="execute")
generate_logfeeder_input_config('hive', Template("input.config-hive.json.j2", extra_imports=[default]))
def refresh_yarn():
import params
if params.enable_ranger_hive or not params.doAs:
return
YARN_REFRESHED_FILE = "/etc/hive/yarn.refreshed"
if os.path.isfile(YARN_REFRESHED_FILE):
Logger.info("Yarn already refreshed")
return
if params.security_enabled:
Execute(params.yarn_kinit_cmd, user = params.yarn_user)
Execute("yarn rmadmin -refreshSuperUserGroupsConfiguration", user = params.yarn_user)
Execute("touch " + YARN_REFRESHED_FILE, user = "root")
def create_hive_metastore_schema():
import params
SYS_DB_CREATED_FILE = "/etc/hive/sys.db.created"
if os.path.isfile(SYS_DB_CREATED_FILE):
Logger.info("Sys DB is already created")
return
create_hive_schema_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
"{hive_schematool_bin}/schematool -initSchema "
"-dbType hive "
"-metaDbType {hive_metastore_db_type} "
"-userName {hive_metastore_user_name} "
"-passWord {hive_metastore_user_passwd!p} "
"-verbose")
check_hive_schema_created_cmd = as_user(format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
"{hive_schematool_bin}/schematool -info "
"-dbType hive "
"-metaDbType {hive_metastore_db_type} "
"-userName {hive_metastore_user_name} "
"-passWord {hive_metastore_user_passwd!p} "
"-verbose"), params.hive_user)
# HACK: in cases with quoted passwords and as_user (which does the quoting as well) !p won't work for hiding passwords.
# Fixing it with the hack below:
quoted_hive_metastore_user_passwd = quote_bash_args(quote_bash_args(params.hive_metastore_user_passwd))
if quoted_hive_metastore_user_passwd.startswith("'") and quoted_hive_metastore_user_passwd.endswith("'") \
or quoted_hive_metastore_user_passwd.startswith('"') and quoted_hive_metastore_user_passwd.endswith('"'):
quoted_hive_metastore_user_passwd = quoted_hive_metastore_user_passwd[1:-1]
Logger.sensitive_strings[repr(create_hive_schema_cmd)] = repr(create_hive_schema_cmd.replace(
format("-passWord {quoted_hive_metastore_user_passwd}"), "-passWord " + utils.PASSWORDS_HIDE_STRING))
Logger.sensitive_strings[repr(check_hive_schema_created_cmd)] = repr(check_hive_schema_created_cmd.replace(
format("-passWord {quoted_hive_metastore_user_passwd}"), "-passWord " + utils.PASSWORDS_HIDE_STRING))
try:
if params.security_enabled:
hive_kinit_cmd = format("{kinit_path_local} -kt {hive_server2_keytab} {hive_principal}; ")
Execute(hive_kinit_cmd, user=params.hive_user)
Execute(create_hive_schema_cmd,
not_if = check_hive_schema_created_cmd,
user = params.hive_user
)
Execute("touch " + SYS_DB_CREATED_FILE, user = "root")
Logger.info("Sys DB is set up")
except:
Logger.error("Could not create Sys DB.")
Logger.error(traceback.format_exc())
def create_metastore_schema():
import params
create_schema_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
"{hive_schematool_bin}/schematool -initSchema "
"-dbType {hive_metastore_db_type} "
"-userName {hive_metastore_user_name} "
"-passWord {hive_metastore_user_passwd!p} -verbose")
check_schema_created_cmd = as_user(format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
"{hive_schematool_bin}/schematool -info "
"-dbType {hive_metastore_db_type} "
"-userName {hive_metastore_user_name} "
"-passWord {hive_metastore_user_passwd!p} -verbose"), params.hive_user)
# HACK: in cases with quoted passwords and as_user (which does the quoting as well) !p won't work for hiding passwords.
# Fixing it with the hack below:
quoted_hive_metastore_user_passwd = quote_bash_args(quote_bash_args(params.hive_metastore_user_passwd))
if quoted_hive_metastore_user_passwd[0] == "'" and quoted_hive_metastore_user_passwd[-1] == "'" \
or quoted_hive_metastore_user_passwd[0] == '"' and quoted_hive_metastore_user_passwd[-1] == '"':
quoted_hive_metastore_user_passwd = quoted_hive_metastore_user_passwd[1:-1]
Logger.sensitive_strings[repr(check_schema_created_cmd)] = repr(check_schema_created_cmd.replace(
format("-passWord {quoted_hive_metastore_user_passwd}"), "-passWord " + utils.PASSWORDS_HIDE_STRING))
Execute(create_schema_cmd,
not_if = check_schema_created_cmd,
user = params.hive_user
)
"""
Writes configuration files required by Hive.
"""
def fill_conf_dir(component_conf_dir):
import params
hive_client_conf_path = os.path.realpath(format("{stack_root}/current/{component_directory}/conf"))
component_conf_dir = os.path.realpath(component_conf_dir)
mode_identified_for_file = 0644 if component_conf_dir == hive_client_conf_path else 0600
mode_identified_for_dir = 0755 if component_conf_dir == hive_client_conf_path else 0700
Directory(component_conf_dir,
owner=params.hive_user,
group=params.user_group,
create_parents = True,
mode=mode_identified_for_dir
)
XmlConfig("mapred-site.xml",
conf_dir=component_conf_dir,
configurations=params.config['configurations']['mapred-site'],
configuration_attributes=params.config['configurationAttributes']['mapred-site'],
owner=params.hive_user,
group=params.user_group,
mode=mode_identified_for_file)
File(format("{component_conf_dir}/hive-default.xml.template"),
owner=params.hive_user,
group=params.user_group,
mode=mode_identified_for_file
)
File(format("{component_conf_dir}/hive-env.sh.template"),
owner=params.hive_user,
group=params.user_group,
mode=0755
)
# Create properties files under conf dir
# llap-daemon-log4j2.properties
# llap-cli-log4j2.properties
# hive-log4j2.properties
# hive-exec-log4j2.properties
# beeline-log4j2.properties
llap_daemon_log4j_filename = 'llap-daemon-log4j2.properties'
File(format("{component_conf_dir}/{llap_daemon_log4j_filename}"),
mode=mode_identified_for_file,
group=params.user_group,
owner=params.hive_user,
content=InlineTemplate(params.llap_daemon_log4j))
llap_cli_log4j2_filename = 'llap-cli-log4j2.properties'
File(format("{component_conf_dir}/{llap_cli_log4j2_filename}"),
mode=mode_identified_for_file,
group=params.user_group,
owner=params.hive_user,
content=InlineTemplate(params.llap_cli_log4j2))
hive_log4j2_filename = 'hive-log4j2.properties'
File(format("{component_conf_dir}/{hive_log4j2_filename}"),
mode=mode_identified_for_file,
group=params.user_group,
owner=params.hive_user,
content=InlineTemplate(params.hive_log4j2))
hive_exec_log4j2_filename = 'hive-exec-log4j2.properties'
File(format("{component_conf_dir}/{hive_exec_log4j2_filename}"),
mode=mode_identified_for_file,
group=params.user_group,
owner=params.hive_user,
content=InlineTemplate(params.hive_exec_log4j2))
beeline_log4j2_filename = 'beeline-log4j2.properties'
File(format("{component_conf_dir}/{beeline_log4j2_filename}"),
mode=mode_identified_for_file,
group=params.user_group,
owner=params.hive_user,
content=InlineTemplate(params.beeline_log4j2))
XmlConfig("beeline-site.xml",
conf_dir=component_conf_dir,
configurations=params.beeline_site_config,
owner=params.hive_user,
group=params.user_group,
mode=mode_identified_for_file)
if params.parquet_logging_properties is not None:
File(format("{component_conf_dir}/parquet-logging.properties"),
mode = mode_identified_for_file,
group = params.user_group,
owner = params.hive_user,
content = params.parquet_logging_properties)
def jdbc_connector(target, hive_previous_jdbc_jar):
"""
Shared by Hive Batch, Hive Metastore, and Hive Interactive
:param target: Target of jdbc jar name, which could be for any of the components above.
"""
import params
if not params.jdbc_jar_name:
return
if params.hive_jdbc_driver in params.hive_jdbc_drivers_list and params.hive_use_existing_db:
environment = {
"no_proxy": format("{ambari_server_hostname}")
}
if hive_previous_jdbc_jar and os.path.isfile(hive_previous_jdbc_jar):
File(hive_previous_jdbc_jar, action='delete')
# TODO: should be removed after ranger_hive_plugin will not provide jdbc
if params.prepackaged_jdbc_name != params.jdbc_jar_name:
Execute(('rm', '-f', params.prepackaged_ojdbc_symlink),
path=["/bin", "/usr/bin/"],
sudo = True)
File(params.downloaded_custom_connector,
content = DownloadSource(params.driver_curl_source))
# maybe it will be more correcvly to use db type
if params.sqla_db_used:
untar_sqla_type2_driver = ('tar', '-xvf', params.downloaded_custom_connector, '-C', params.tmp_dir)
Execute(untar_sqla_type2_driver, sudo = True)
Execute(format("yes | {sudo} cp {jars_path_in_archive} {hive_lib}"))
Directory(params.jdbc_libs_dir,
create_parents = True)
Execute(format("yes | {sudo} cp {libs_path_in_archive} {jdbc_libs_dir}"))
Execute(format("{sudo} chown -R {hive_user}:{user_group} {hive_lib}/*"))
else:
Execute(('cp', '--remove-destination', params.downloaded_custom_connector, target),
#creates=target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
path=["/bin", "/usr/bin/"],
sudo = True)
else:
#for default hive db (Mysql)
File(params.downloaded_custom_connector, content = DownloadSource(params.driver_curl_source))
Execute(('cp', '--remove-destination', params.downloaded_custom_connector, target),
#creates=target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
path=["/bin", "/usr/bin/"],
sudo=True
)
pass
File(target,
mode = 0644,
)
| 42.351014 | 139 | 0.659336 |
a760eaad6c434c094becc50e93c6d801ba0e8e42 | 72,685 | py | Python | IPython/frontend/qt/console/console_widget.py | btel/ipython | 6ecc466f4571913479172d974a6dd8bea5658941 | [
"BSD-3-Clause-Clear"
] | null | null | null | IPython/frontend/qt/console/console_widget.py | btel/ipython | 6ecc466f4571913479172d974a6dd8bea5658941 | [
"BSD-3-Clause-Clear"
] | null | null | null | IPython/frontend/qt/console/console_widget.py | btel/ipython | 6ecc466f4571913479172d974a6dd8bea5658941 | [
"BSD-3-Clause-Clear"
] | null | null | null | """ An abstract base class for console-type widgets.
"""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import os
from os.path import commonprefix
import re
import sys
from textwrap import dedent
from unicodedata import category
# System library imports
from IPython.external.qt import QtCore, QtGui
# Local imports
from IPython.config.configurable import Configurable
from IPython.frontend.qt.rich_text import HtmlExporter
from IPython.frontend.qt.util import MetaQObjectHasTraits, get_font
from IPython.utils.text import columnize
from IPython.utils.traitlets import Bool, Enum, Int, Unicode
from ansi_code_processor import QtAnsiCodeProcessor
from completion_widget import CompletionWidget
from kill_ring import QtKillRing
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
def is_letter_or_number(char):
""" Returns whether the specified unicode character is a letter or a number.
"""
cat = category(char)
return cat.startswith('L') or cat.startswith('N')
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ConsoleWidget(Configurable, QtGui.QWidget):
""" An abstract base class for console-type widgets. This class has
functionality for:
* Maintaining a prompt and editing region
* Providing the traditional Unix-style console keyboard shortcuts
* Performing tab completion
* Paging text
* Handling ANSI escape codes
ConsoleWidget also provides a number of utility methods that will be
convenient to implementors of a console-style widget.
"""
__metaclass__ = MetaQObjectHasTraits
#------ Configuration ------------------------------------------------------
ansi_codes = Bool(True, config=True,
help="Whether to process ANSI escape codes."
)
buffer_size = Int(500, config=True,
help="""
The maximum number of lines of text before truncation. Specifying a
non-positive number disables text truncation (not recommended).
"""
)
gui_completion = Bool(False, config=True,
help="""
Use a list widget instead of plain text output for tab completion.
"""
)
# NOTE: this value can only be specified during initialization.
kind = Enum(['plain', 'rich'], default_value='plain', config=True,
help="""
The type of underlying text widget to use. Valid values are 'plain',
which specifies a QPlainTextEdit, and 'rich', which specifies a
QTextEdit.
"""
)
# NOTE: this value can only be specified during initialization.
paging = Enum(['inside', 'hsplit', 'vsplit', 'custom', 'none'],
default_value='inside', config=True,
help="""
The type of paging to use. Valid values are:
'inside' : The widget pages like a traditional terminal.
'hsplit' : When paging is requested, the widget is split
horizontally. The top pane contains the console, and the
bottom pane contains the paged text.
'vsplit' : Similar to 'hsplit', except that a vertical splitter
used.
'custom' : No action is taken by the widget beyond emitting a
'custom_page_requested(str)' signal.
'none' : The text is written directly to the console.
""")
font_family = Unicode(config=True,
help="""The font family to use for the console.
On OSX this defaults to Monaco, on Windows the default is
Consolas with fallback of Courier, and on other platforms
the default is Monospace.
""")
def _font_family_default(self):
if sys.platform == 'win32':
# Consolas ships with Vista/Win7, fallback to Courier if needed
return 'Consolas'
elif sys.platform == 'darwin':
# OSX always has Monaco, no need for a fallback
return 'Monaco'
else:
# Monospace should always exist, no need for a fallback
return 'Monospace'
font_size = Int(config=True,
help="""The font size. If unconfigured, Qt will be entrusted
with the size of the font.
""")
# Whether to override ShortcutEvents for the keybindings defined by this
# widget (Ctrl+n, Ctrl+a, etc). Enable this if you want this widget to take
# priority (when it has focus) over, e.g., window-level menu shortcuts.
override_shortcuts = Bool(False)
#------ Signals ------------------------------------------------------------
# Signals that indicate ConsoleWidget state.
copy_available = QtCore.Signal(bool)
redo_available = QtCore.Signal(bool)
undo_available = QtCore.Signal(bool)
# Signal emitted when paging is needed and the paging style has been
# specified as 'custom'.
custom_page_requested = QtCore.Signal(object)
# Signal emitted when the font is changed.
font_changed = QtCore.Signal(QtGui.QFont)
#------ Protected class variables ------------------------------------------
# When the control key is down, these keys are mapped.
_ctrl_down_remap = { QtCore.Qt.Key_B : QtCore.Qt.Key_Left,
QtCore.Qt.Key_F : QtCore.Qt.Key_Right,
QtCore.Qt.Key_A : QtCore.Qt.Key_Home,
QtCore.Qt.Key_P : QtCore.Qt.Key_Up,
QtCore.Qt.Key_N : QtCore.Qt.Key_Down,
QtCore.Qt.Key_H : QtCore.Qt.Key_Backspace,
QtCore.Qt.Key_D : QtCore.Qt.Key_Delete, }
if not sys.platform == 'darwin':
# On OS X, Ctrl-E already does the right thing, whereas End moves the
# cursor to the bottom of the buffer.
_ctrl_down_remap[QtCore.Qt.Key_E] = QtCore.Qt.Key_End
# The shortcuts defined by this widget. We need to keep track of these to
# support 'override_shortcuts' above.
_shortcuts = set(_ctrl_down_remap.keys() +
[ QtCore.Qt.Key_C, QtCore.Qt.Key_G, QtCore.Qt.Key_O,
QtCore.Qt.Key_V ])
#---------------------------------------------------------------------------
# 'QObject' interface
#---------------------------------------------------------------------------
def __init__(self, parent=None, **kw):
""" Create a ConsoleWidget.
Parameters:
-----------
parent : QWidget, optional [default None]
The parent for this widget.
"""
QtGui.QWidget.__init__(self, parent)
Configurable.__init__(self, **kw)
# Create the layout and underlying text widget.
layout = QtGui.QStackedLayout(self)
layout.setContentsMargins(0, 0, 0, 0)
self._control = self._create_control()
self._page_control = None
self._splitter = None
if self.paging in ('hsplit', 'vsplit'):
self._splitter = QtGui.QSplitter()
if self.paging == 'hsplit':
self._splitter.setOrientation(QtCore.Qt.Horizontal)
else:
self._splitter.setOrientation(QtCore.Qt.Vertical)
self._splitter.addWidget(self._control)
layout.addWidget(self._splitter)
else:
layout.addWidget(self._control)
# Create the paging widget, if necessary.
if self.paging in ('inside', 'hsplit', 'vsplit'):
self._page_control = self._create_page_control()
if self._splitter:
self._page_control.hide()
self._splitter.addWidget(self._page_control)
else:
layout.addWidget(self._page_control)
# Initialize protected variables. Some variables contain useful state
# information for subclasses; they should be considered read-only.
self._append_before_prompt_pos = 0
self._ansi_processor = QtAnsiCodeProcessor()
self._completion_widget = CompletionWidget(self._control)
self._continuation_prompt = '> '
self._continuation_prompt_html = None
self._executing = False
self._filter_drag = False
self._filter_resize = False
self._html_exporter = HtmlExporter(self._control)
self._input_buffer_executing = ''
self._input_buffer_pending = ''
self._kill_ring = QtKillRing(self._control)
self._prompt = ''
self._prompt_html = None
self._prompt_pos = 0
self._prompt_sep = ''
self._reading = False
self._reading_callback = None
self._tab_width = 8
self._text_completing_pos = 0
# Set a monospaced font.
self.reset_font()
# Configure actions.
action = QtGui.QAction('Print', None)
action.setEnabled(True)
printkey = QtGui.QKeySequence(QtGui.QKeySequence.Print)
if printkey.matches("Ctrl+P") and sys.platform != 'darwin':
# Only override the default if there is a collision.
# Qt ctrl = cmd on OSX, so the match gets a false positive on OSX.
printkey = "Ctrl+Shift+P"
action.setShortcut(printkey)
action.triggered.connect(self.print_)
self.addAction(action)
self._print_action = action
action = QtGui.QAction('Save as HTML/XML', None)
action.setShortcut(QtGui.QKeySequence.Save)
action.triggered.connect(self.export_html)
self.addAction(action)
self._export_action = action
action = QtGui.QAction('Select All', None)
action.setEnabled(True)
action.setShortcut(QtGui.QKeySequence.SelectAll)
action.triggered.connect(self.select_all)
self.addAction(action)
self._select_all_action = action
def eventFilter(self, obj, event):
""" Reimplemented to ensure a console-like behavior in the underlying
text widgets.
"""
etype = event.type()
if etype == QtCore.QEvent.KeyPress:
# Re-map keys for all filtered widgets.
key = event.key()
if self._control_key_down(event.modifiers()) and \
key in self._ctrl_down_remap:
new_event = QtGui.QKeyEvent(QtCore.QEvent.KeyPress,
self._ctrl_down_remap[key],
QtCore.Qt.NoModifier)
QtGui.qApp.sendEvent(obj, new_event)
return True
elif obj == self._control:
return self._event_filter_console_keypress(event)
elif obj == self._page_control:
return self._event_filter_page_keypress(event)
# Make middle-click paste safe.
elif etype == QtCore.QEvent.MouseButtonRelease and \
event.button() == QtCore.Qt.MidButton and \
obj == self._control.viewport():
cursor = self._control.cursorForPosition(event.pos())
self._control.setTextCursor(cursor)
self.paste(QtGui.QClipboard.Selection)
return True
# Manually adjust the scrollbars *after* a resize event is dispatched.
elif etype == QtCore.QEvent.Resize and not self._filter_resize:
self._filter_resize = True
QtGui.qApp.sendEvent(obj, event)
self._adjust_scrollbars()
self._filter_resize = False
return True
# Override shortcuts for all filtered widgets.
elif etype == QtCore.QEvent.ShortcutOverride and \
self.override_shortcuts and \
self._control_key_down(event.modifiers()) and \
event.key() in self._shortcuts:
event.accept()
# Ensure that drags are safe. The problem is that the drag starting
# logic, which determines whether the drag is a Copy or Move, is locked
# down in QTextControl. If the widget is editable, which it must be if
# we're not executing, the drag will be a Move. The following hack
# prevents QTextControl from deleting the text by clearing the selection
# when a drag leave event originating from this widget is dispatched.
# The fact that we have to clear the user's selection is unfortunate,
# but the alternative--trying to prevent Qt from using its hardwired
# drag logic and writing our own--is worse.
elif etype == QtCore.QEvent.DragEnter and \
obj == self._control.viewport() and \
event.source() == self._control.viewport():
self._filter_drag = True
elif etype == QtCore.QEvent.DragLeave and \
obj == self._control.viewport() and \
self._filter_drag:
cursor = self._control.textCursor()
cursor.clearSelection()
self._control.setTextCursor(cursor)
self._filter_drag = False
# Ensure that drops are safe.
elif etype == QtCore.QEvent.Drop and obj == self._control.viewport():
cursor = self._control.cursorForPosition(event.pos())
if self._in_buffer(cursor.position()):
text = event.mimeData().text()
self._insert_plain_text_into_buffer(cursor, text)
# Qt is expecting to get something here--drag and drop occurs in its
# own event loop. Send a DragLeave event to end it.
QtGui.qApp.sendEvent(obj, QtGui.QDragLeaveEvent())
return True
return super(ConsoleWidget, self).eventFilter(obj, event)
#---------------------------------------------------------------------------
# 'QWidget' interface
#---------------------------------------------------------------------------
def sizeHint(self):
""" Reimplemented to suggest a size that is 80 characters wide and
25 lines high.
"""
font_metrics = QtGui.QFontMetrics(self.font)
margin = (self._control.frameWidth() +
self._control.document().documentMargin()) * 2
style = self.style()
splitwidth = style.pixelMetric(QtGui.QStyle.PM_SplitterWidth)
# Note 1: Despite my best efforts to take the various margins into
# account, the width is still coming out a bit too small, so we include
# a fudge factor of one character here.
# Note 2: QFontMetrics.maxWidth is not used here or anywhere else due
# to a Qt bug on certain Mac OS systems where it returns 0.
width = font_metrics.width(' ') * 81 + margin
width += style.pixelMetric(QtGui.QStyle.PM_ScrollBarExtent)
if self.paging == 'hsplit':
width = width * 2 + splitwidth
height = font_metrics.height() * 25 + margin
if self.paging == 'vsplit':
height = height * 2 + splitwidth
return QtCore.QSize(width, height)
#---------------------------------------------------------------------------
# 'ConsoleWidget' public interface
#---------------------------------------------------------------------------
def can_copy(self):
""" Returns whether text can be copied to the clipboard.
"""
return self._control.textCursor().hasSelection()
def can_cut(self):
""" Returns whether text can be cut to the clipboard.
"""
cursor = self._control.textCursor()
return (cursor.hasSelection() and
self._in_buffer(cursor.anchor()) and
self._in_buffer(cursor.position()))
def can_paste(self):
""" Returns whether text can be pasted from the clipboard.
"""
if self._control.textInteractionFlags() & QtCore.Qt.TextEditable:
return bool(QtGui.QApplication.clipboard().text())
return False
def clear(self, keep_input=True):
""" Clear the console.
Parameters:
-----------
keep_input : bool, optional (default True)
If set, restores the old input buffer if a new prompt is written.
"""
if self._executing:
self._control.clear()
else:
if keep_input:
input_buffer = self.input_buffer
self._control.clear()
self._show_prompt()
if keep_input:
self.input_buffer = input_buffer
def copy(self):
""" Copy the currently selected text to the clipboard.
"""
self._control.copy()
def cut(self):
""" Copy the currently selected text to the clipboard and delete it
if it's inside the input buffer.
"""
self.copy()
if self.can_cut():
self._control.textCursor().removeSelectedText()
def execute(self, source=None, hidden=False, interactive=False):
""" Executes source or the input buffer, possibly prompting for more
input.
Parameters:
-----------
source : str, optional
The source to execute. If not specified, the input buffer will be
used. If specified and 'hidden' is False, the input buffer will be
replaced with the source before execution.
hidden : bool, optional (default False)
If set, no output will be shown and the prompt will not be modified.
In other words, it will be completely invisible to the user that
an execution has occurred.
interactive : bool, optional (default False)
Whether the console is to treat the source as having been manually
entered by the user. The effect of this parameter depends on the
subclass implementation.
Raises:
-------
RuntimeError
If incomplete input is given and 'hidden' is True. In this case,
it is not possible to prompt for more input.
Returns:
--------
A boolean indicating whether the source was executed.
"""
# WARNING: The order in which things happen here is very particular, in
# large part because our syntax highlighting is fragile. If you change
# something, test carefully!
# Decide what to execute.
if source is None:
source = self.input_buffer
if not hidden:
# A newline is appended later, but it should be considered part
# of the input buffer.
source += '\n'
elif not hidden:
self.input_buffer = source
# Execute the source or show a continuation prompt if it is incomplete.
complete = self._is_complete(source, interactive)
if hidden:
if complete:
self._execute(source, hidden)
else:
error = 'Incomplete noninteractive input: "%s"'
raise RuntimeError(error % source)
else:
if complete:
self._append_plain_text('\n')
self._input_buffer_executing = self.input_buffer
self._executing = True
self._prompt_finished()
# The maximum block count is only in effect during execution.
# This ensures that _prompt_pos does not become invalid due to
# text truncation.
self._control.document().setMaximumBlockCount(self.buffer_size)
# Setting a positive maximum block count will automatically
# disable the undo/redo history, but just to be safe:
self._control.setUndoRedoEnabled(False)
# Perform actual execution.
self._execute(source, hidden)
else:
# Do this inside an edit block so continuation prompts are
# removed seamlessly via undo/redo.
cursor = self._get_end_cursor()
cursor.beginEditBlock()
cursor.insertText('\n')
self._insert_continuation_prompt(cursor)
cursor.endEditBlock()
# Do not do this inside the edit block. It works as expected
# when using a QPlainTextEdit control, but does not have an
# effect when using a QTextEdit. I believe this is a Qt bug.
self._control.moveCursor(QtGui.QTextCursor.End)
return complete
def export_html(self):
""" Shows a dialog to export HTML/XML in various formats.
"""
self._html_exporter.export()
def _get_input_buffer(self, force=False):
""" The text that the user has entered entered at the current prompt.
If the console is currently executing, the text that is executing will
always be returned.
"""
# If we're executing, the input buffer may not even exist anymore due to
# the limit imposed by 'buffer_size'. Therefore, we store it.
if self._executing and not force:
return self._input_buffer_executing
cursor = self._get_end_cursor()
cursor.setPosition(self._prompt_pos, QtGui.QTextCursor.KeepAnchor)
input_buffer = cursor.selection().toPlainText()
# Strip out continuation prompts.
return input_buffer.replace('\n' + self._continuation_prompt, '\n')
def _set_input_buffer(self, string):
""" Sets the text in the input buffer.
If the console is currently executing, this call has no *immediate*
effect. When the execution is finished, the input buffer will be updated
appropriately.
"""
# If we're executing, store the text for later.
if self._executing:
self._input_buffer_pending = string
return
# Remove old text.
cursor = self._get_end_cursor()
cursor.beginEditBlock()
cursor.setPosition(self._prompt_pos, QtGui.QTextCursor.KeepAnchor)
cursor.removeSelectedText()
# Insert new text with continuation prompts.
self._insert_plain_text_into_buffer(self._get_prompt_cursor(), string)
cursor.endEditBlock()
self._control.moveCursor(QtGui.QTextCursor.End)
input_buffer = property(_get_input_buffer, _set_input_buffer)
def _get_font(self):
""" The base font being used by the ConsoleWidget.
"""
return self._control.document().defaultFont()
def _set_font(self, font):
""" Sets the base font for the ConsoleWidget to the specified QFont.
"""
font_metrics = QtGui.QFontMetrics(font)
self._control.setTabStopWidth(self.tab_width * font_metrics.width(' '))
self._completion_widget.setFont(font)
self._control.document().setDefaultFont(font)
if self._page_control:
self._page_control.document().setDefaultFont(font)
self.font_changed.emit(font)
font = property(_get_font, _set_font)
def paste(self, mode=QtGui.QClipboard.Clipboard):
""" Paste the contents of the clipboard into the input region.
Parameters:
-----------
mode : QClipboard::Mode, optional [default QClipboard::Clipboard]
Controls which part of the system clipboard is used. This can be
used to access the selection clipboard in X11 and the Find buffer
in Mac OS. By default, the regular clipboard is used.
"""
if self._control.textInteractionFlags() & QtCore.Qt.TextEditable:
# Make sure the paste is safe.
self._keep_cursor_in_buffer()
cursor = self._control.textCursor()
# Remove any trailing newline, which confuses the GUI and forces the
# user to backspace.
text = QtGui.QApplication.clipboard().text(mode).rstrip()
self._insert_plain_text_into_buffer(cursor, dedent(text))
def print_(self, printer = None):
""" Print the contents of the ConsoleWidget to the specified QPrinter.
"""
if (not printer):
printer = QtGui.QPrinter()
if(QtGui.QPrintDialog(printer).exec_() != QtGui.QDialog.Accepted):
return
self._control.print_(printer)
def prompt_to_top(self):
""" Moves the prompt to the top of the viewport.
"""
if not self._executing:
prompt_cursor = self._get_prompt_cursor()
if self._get_cursor().blockNumber() < prompt_cursor.blockNumber():
self._set_cursor(prompt_cursor)
self._set_top_cursor(prompt_cursor)
def redo(self):
""" Redo the last operation. If there is no operation to redo, nothing
happens.
"""
self._control.redo()
def reset_font(self):
""" Sets the font to the default fixed-width font for this platform.
"""
if sys.platform == 'win32':
# Consolas ships with Vista/Win7, fallback to Courier if needed
fallback = 'Courier'
elif sys.platform == 'darwin':
# OSX always has Monaco
fallback = 'Monaco'
else:
# Monospace should always exist
fallback = 'Monospace'
font = get_font(self.font_family, fallback)
if self.font_size:
font.setPointSize(self.font_size)
else:
font.setPointSize(QtGui.qApp.font().pointSize())
font.setStyleHint(QtGui.QFont.TypeWriter)
self._set_font(font)
def change_font_size(self, delta):
"""Change the font size by the specified amount (in points).
"""
font = self.font
size = max(font.pointSize() + delta, 1) # minimum 1 point
font.setPointSize(size)
self._set_font(font)
def select_all(self):
""" Selects all the text in the buffer.
"""
self._control.selectAll()
def _get_tab_width(self):
""" The width (in terms of space characters) for tab characters.
"""
return self._tab_width
def _set_tab_width(self, tab_width):
""" Sets the width (in terms of space characters) for tab characters.
"""
font_metrics = QtGui.QFontMetrics(self.font)
self._control.setTabStopWidth(tab_width * font_metrics.width(' '))
self._tab_width = tab_width
tab_width = property(_get_tab_width, _set_tab_width)
def undo(self):
""" Undo the last operation. If there is no operation to undo, nothing
happens.
"""
self._control.undo()
#---------------------------------------------------------------------------
# 'ConsoleWidget' abstract interface
#---------------------------------------------------------------------------
def _is_complete(self, source, interactive):
""" Returns whether 'source' can be executed. When triggered by an
Enter/Return key press, 'interactive' is True; otherwise, it is
False.
"""
raise NotImplementedError
def _execute(self, source, hidden):
""" Execute 'source'. If 'hidden', do not show any output.
"""
raise NotImplementedError
def _prompt_started_hook(self):
""" Called immediately after a new prompt is displayed.
"""
pass
def _prompt_finished_hook(self):
""" Called immediately after a prompt is finished, i.e. when some input
will be processed and a new prompt displayed.
"""
pass
def _up_pressed(self, shift_modifier):
""" Called when the up key is pressed. Returns whether to continue
processing the event.
"""
return True
def _down_pressed(self, shift_modifier):
""" Called when the down key is pressed. Returns whether to continue
processing the event.
"""
return True
def _tab_pressed(self):
""" Called when the tab key is pressed. Returns whether to continue
processing the event.
"""
return False
#--------------------------------------------------------------------------
# 'ConsoleWidget' protected interface
#--------------------------------------------------------------------------
def _append_custom(self, insert, input, before_prompt=False):
""" A low-level method for appending content to the end of the buffer.
If 'before_prompt' is enabled, the content will be inserted before the
current prompt, if there is one.
"""
# Determine where to insert the content.
cursor = self._control.textCursor()
if before_prompt and not self._executing:
cursor.setPosition(self._append_before_prompt_pos)
else:
cursor.movePosition(QtGui.QTextCursor.End)
start_pos = cursor.position()
# Perform the insertion.
result = insert(cursor, input)
# Adjust the prompt position if we have inserted before it. This is safe
# because buffer truncation is disabled when not executing.
if before_prompt and not self._executing:
diff = cursor.position() - start_pos
self._append_before_prompt_pos += diff
self._prompt_pos += diff
return result
def _append_html(self, html, before_prompt=False):
""" Appends HTML at the end of the console buffer.
"""
self._append_custom(self._insert_html, html, before_prompt)
def _append_html_fetching_plain_text(self, html, before_prompt=False):
""" Appends HTML, then returns the plain text version of it.
"""
return self._append_custom(self._insert_html_fetching_plain_text,
html, before_prompt)
def _append_plain_text(self, text, before_prompt=False):
""" Appends plain text, processing ANSI codes if enabled.
"""
self._append_custom(self._insert_plain_text, text, before_prompt)
def _cancel_text_completion(self):
""" If text completion is progress, cancel it.
"""
if self._text_completing_pos:
self._clear_temporary_buffer()
self._text_completing_pos = 0
def _clear_temporary_buffer(self):
""" Clears the "temporary text" buffer, i.e. all the text following
the prompt region.
"""
# Select and remove all text below the input buffer.
cursor = self._get_prompt_cursor()
prompt = self._continuation_prompt.lstrip()
while cursor.movePosition(QtGui.QTextCursor.NextBlock):
temp_cursor = QtGui.QTextCursor(cursor)
temp_cursor.select(QtGui.QTextCursor.BlockUnderCursor)
text = temp_cursor.selection().toPlainText().lstrip()
if not text.startswith(prompt):
break
else:
# We've reached the end of the input buffer and no text follows.
return
cursor.movePosition(QtGui.QTextCursor.Left) # Grab the newline.
cursor.movePosition(QtGui.QTextCursor.End,
QtGui.QTextCursor.KeepAnchor)
cursor.removeSelectedText()
# After doing this, we have no choice but to clear the undo/redo
# history. Otherwise, the text is not "temporary" at all, because it
# can be recalled with undo/redo. Unfortunately, Qt does not expose
# fine-grained control to the undo/redo system.
if self._control.isUndoRedoEnabled():
self._control.setUndoRedoEnabled(False)
self._control.setUndoRedoEnabled(True)
def _complete_with_items(self, cursor, items):
""" Performs completion with 'items' at the specified cursor location.
"""
self._cancel_text_completion()
if len(items) == 1:
cursor.setPosition(self._control.textCursor().position(),
QtGui.QTextCursor.KeepAnchor)
cursor.insertText(items[0])
elif len(items) > 1:
current_pos = self._control.textCursor().position()
prefix = commonprefix(items)
if prefix:
cursor.setPosition(current_pos, QtGui.QTextCursor.KeepAnchor)
cursor.insertText(prefix)
current_pos = cursor.position()
if self.gui_completion:
cursor.movePosition(QtGui.QTextCursor.Left, n=len(prefix))
self._completion_widget.show_items(cursor, items)
else:
cursor.beginEditBlock()
self._append_plain_text('\n')
self._page(self._format_as_columns(items))
cursor.endEditBlock()
cursor.setPosition(current_pos)
self._control.moveCursor(QtGui.QTextCursor.End)
self._control.setTextCursor(cursor)
self._text_completing_pos = current_pos
def _context_menu_make(self, pos):
""" Creates a context menu for the given QPoint (in widget coordinates).
"""
menu = QtGui.QMenu(self)
cut_action = menu.addAction('Cut', self.cut)
cut_action.setEnabled(self.can_cut())
cut_action.setShortcut(QtGui.QKeySequence.Cut)
copy_action = menu.addAction('Copy', self.copy)
copy_action.setEnabled(self.can_copy())
copy_action.setShortcut(QtGui.QKeySequence.Copy)
paste_action = menu.addAction('Paste', self.paste)
paste_action.setEnabled(self.can_paste())
paste_action.setShortcut(QtGui.QKeySequence.Paste)
menu.addSeparator()
menu.addAction(self._select_all_action)
menu.addSeparator()
menu.addAction(self._export_action)
menu.addAction(self._print_action)
return menu
def _control_key_down(self, modifiers, include_command=False):
""" Given a KeyboardModifiers flags object, return whether the Control
key is down.
Parameters:
-----------
include_command : bool, optional (default True)
Whether to treat the Command key as a (mutually exclusive) synonym
for Control when in Mac OS.
"""
# Note that on Mac OS, ControlModifier corresponds to the Command key
# while MetaModifier corresponds to the Control key.
if sys.platform == 'darwin':
down = include_command and (modifiers & QtCore.Qt.ControlModifier)
return bool(down) ^ bool(modifiers & QtCore.Qt.MetaModifier)
else:
return bool(modifiers & QtCore.Qt.ControlModifier)
def _create_control(self):
""" Creates and connects the underlying text widget.
"""
# Create the underlying control.
if self.kind == 'plain':
control = QtGui.QPlainTextEdit()
elif self.kind == 'rich':
control = QtGui.QTextEdit()
control.setAcceptRichText(False)
# Install event filters. The filter on the viewport is needed for
# mouse events and drag events.
control.installEventFilter(self)
control.viewport().installEventFilter(self)
# Connect signals.
control.cursorPositionChanged.connect(self._cursor_position_changed)
control.customContextMenuRequested.connect(
self._custom_context_menu_requested)
control.copyAvailable.connect(self.copy_available)
control.redoAvailable.connect(self.redo_available)
control.undoAvailable.connect(self.undo_available)
# Hijack the document size change signal to prevent Qt from adjusting
# the viewport's scrollbar. We are relying on an implementation detail
# of Q(Plain)TextEdit here, which is potentially dangerous, but without
# this functionality we cannot create a nice terminal interface.
layout = control.document().documentLayout()
layout.documentSizeChanged.disconnect()
layout.documentSizeChanged.connect(self._adjust_scrollbars)
# Configure the control.
control.setAttribute(QtCore.Qt.WA_InputMethodEnabled, True)
control.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
control.setReadOnly(True)
control.setUndoRedoEnabled(False)
control.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
return control
def _create_page_control(self):
""" Creates and connects the underlying paging widget.
"""
if self.kind == 'plain':
control = QtGui.QPlainTextEdit()
elif self.kind == 'rich':
control = QtGui.QTextEdit()
control.installEventFilter(self)
control.setReadOnly(True)
control.setUndoRedoEnabled(False)
control.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
return control
def _event_filter_console_keypress(self, event):
""" Filter key events for the underlying text widget to create a
console-like interface.
"""
intercepted = False
cursor = self._control.textCursor()
position = cursor.position()
key = event.key()
ctrl_down = self._control_key_down(event.modifiers())
alt_down = event.modifiers() & QtCore.Qt.AltModifier
shift_down = event.modifiers() & QtCore.Qt.ShiftModifier
#------ Special sequences ----------------------------------------------
if event.matches(QtGui.QKeySequence.Copy):
self.copy()
intercepted = True
elif event.matches(QtGui.QKeySequence.Cut):
self.cut()
intercepted = True
elif event.matches(QtGui.QKeySequence.Paste):
self.paste()
intercepted = True
#------ Special modifier logic -----------------------------------------
elif key in (QtCore.Qt.Key_Return, QtCore.Qt.Key_Enter):
intercepted = True
# Special handling when tab completing in text mode.
self._cancel_text_completion()
if self._in_buffer(position):
# Special handling when a reading a line of raw input.
if self._reading:
self._append_plain_text('\n')
self._reading = False
if self._reading_callback:
self._reading_callback()
# If the input buffer is a single line or there is only
# whitespace after the cursor, execute. Otherwise, split the
# line with a continuation prompt.
elif not self._executing:
cursor.movePosition(QtGui.QTextCursor.End,
QtGui.QTextCursor.KeepAnchor)
at_end = len(cursor.selectedText().strip()) == 0
single_line = (self._get_end_cursor().blockNumber() ==
self._get_prompt_cursor().blockNumber())
if (at_end or shift_down or single_line) and not ctrl_down:
self.execute(interactive = not shift_down)
else:
# Do this inside an edit block for clean undo/redo.
cursor.beginEditBlock()
cursor.setPosition(position)
cursor.insertText('\n')
self._insert_continuation_prompt(cursor)
cursor.endEditBlock()
# Ensure that the whole input buffer is visible.
# FIXME: This will not be usable if the input buffer is
# taller than the console widget.
self._control.moveCursor(QtGui.QTextCursor.End)
self._control.setTextCursor(cursor)
#------ Control/Cmd modifier -------------------------------------------
elif ctrl_down:
if key == QtCore.Qt.Key_G:
self._keyboard_quit()
intercepted = True
elif key == QtCore.Qt.Key_K:
if self._in_buffer(position):
cursor.movePosition(QtGui.QTextCursor.EndOfLine,
QtGui.QTextCursor.KeepAnchor)
if not cursor.hasSelection():
# Line deletion (remove continuation prompt)
cursor.movePosition(QtGui.QTextCursor.NextBlock,
QtGui.QTextCursor.KeepAnchor)
cursor.movePosition(QtGui.QTextCursor.Right,
QtGui.QTextCursor.KeepAnchor,
len(self._continuation_prompt))
self._kill_ring.kill_cursor(cursor)
intercepted = True
elif key == QtCore.Qt.Key_L:
self.prompt_to_top()
intercepted = True
elif key == QtCore.Qt.Key_O:
if self._page_control and self._page_control.isVisible():
self._page_control.setFocus()
intercepted = True
elif key == QtCore.Qt.Key_U:
if self._in_buffer(position):
start_line = cursor.blockNumber()
if start_line == self._get_prompt_cursor().blockNumber():
offset = len(self._prompt)
else:
offset = len(self._continuation_prompt)
cursor.movePosition(QtGui.QTextCursor.StartOfBlock,
QtGui.QTextCursor.KeepAnchor)
cursor.movePosition(QtGui.QTextCursor.Right,
QtGui.QTextCursor.KeepAnchor, offset)
self._kill_ring.kill_cursor(cursor)
intercepted = True
elif key == QtCore.Qt.Key_Y:
self._keep_cursor_in_buffer()
self._kill_ring.yank()
intercepted = True
elif key in (QtCore.Qt.Key_Backspace, QtCore.Qt.Key_Delete):
if key == QtCore.Qt.Key_Backspace:
cursor = self._get_word_start_cursor(position)
else: # key == QtCore.Qt.Key_Delete
cursor = self._get_word_end_cursor(position)
cursor.setPosition(position, QtGui.QTextCursor.KeepAnchor)
self._kill_ring.kill_cursor(cursor)
intercepted = True
elif key in (QtCore.Qt.Key_Plus, QtCore.Qt.Key_Equal):
self.change_font_size(1)
intercepted = True
elif key == QtCore.Qt.Key_Minus:
self.change_font_size(-1)
intercepted = True
elif key == QtCore.Qt.Key_0:
self.reset_font()
intercepted = True
#------ Alt modifier ---------------------------------------------------
elif alt_down:
if key == QtCore.Qt.Key_B:
self._set_cursor(self._get_word_start_cursor(position))
intercepted = True
elif key == QtCore.Qt.Key_F:
self._set_cursor(self._get_word_end_cursor(position))
intercepted = True
elif key == QtCore.Qt.Key_Y:
self._kill_ring.rotate()
intercepted = True
elif key == QtCore.Qt.Key_Backspace:
cursor = self._get_word_start_cursor(position)
cursor.setPosition(position, QtGui.QTextCursor.KeepAnchor)
self._kill_ring.kill_cursor(cursor)
intercepted = True
elif key == QtCore.Qt.Key_D:
cursor = self._get_word_end_cursor(position)
cursor.setPosition(position, QtGui.QTextCursor.KeepAnchor)
self._kill_ring.kill_cursor(cursor)
intercepted = True
elif key == QtCore.Qt.Key_Delete:
intercepted = True
elif key == QtCore.Qt.Key_Greater:
self._control.moveCursor(QtGui.QTextCursor.End)
intercepted = True
elif key == QtCore.Qt.Key_Less:
self._control.setTextCursor(self._get_prompt_cursor())
intercepted = True
#------ No modifiers ---------------------------------------------------
else:
if shift_down:
anchormode = QtGui.QTextCursor.KeepAnchor
else:
anchormode = QtGui.QTextCursor.MoveAnchor
if key == QtCore.Qt.Key_Escape:
self._keyboard_quit()
intercepted = True
elif key == QtCore.Qt.Key_Up:
if self._reading or not self._up_pressed(shift_down):
intercepted = True
else:
prompt_line = self._get_prompt_cursor().blockNumber()
intercepted = cursor.blockNumber() <= prompt_line
elif key == QtCore.Qt.Key_Down:
if self._reading or not self._down_pressed(shift_down):
intercepted = True
else:
end_line = self._get_end_cursor().blockNumber()
intercepted = cursor.blockNumber() == end_line
elif key == QtCore.Qt.Key_Tab:
if not self._reading:
intercepted = not self._tab_pressed()
elif key == QtCore.Qt.Key_Left:
# Move to the previous line
line, col = cursor.blockNumber(), cursor.columnNumber()
if line > self._get_prompt_cursor().blockNumber() and \
col == len(self._continuation_prompt):
self._control.moveCursor(QtGui.QTextCursor.PreviousBlock,
mode=anchormode)
self._control.moveCursor(QtGui.QTextCursor.EndOfBlock,
mode=anchormode)
intercepted = True
# Regular left movement
else:
intercepted = not self._in_buffer(position - 1)
elif key == QtCore.Qt.Key_Right:
original_block_number = cursor.blockNumber()
cursor.movePosition(QtGui.QTextCursor.Right,
mode=anchormode)
if cursor.blockNumber() != original_block_number:
cursor.movePosition(QtGui.QTextCursor.Right,
n=len(self._continuation_prompt),
mode=anchormode)
self._set_cursor(cursor)
intercepted = True
elif key == QtCore.Qt.Key_Home:
start_line = cursor.blockNumber()
if start_line == self._get_prompt_cursor().blockNumber():
start_pos = self._prompt_pos
else:
cursor.movePosition(QtGui.QTextCursor.StartOfBlock,
QtGui.QTextCursor.KeepAnchor)
start_pos = cursor.position()
start_pos += len(self._continuation_prompt)
cursor.setPosition(position)
if shift_down and self._in_buffer(position):
cursor.setPosition(start_pos, QtGui.QTextCursor.KeepAnchor)
else:
cursor.setPosition(start_pos)
self._set_cursor(cursor)
intercepted = True
elif key == QtCore.Qt.Key_Backspace:
# Line deletion (remove continuation prompt)
line, col = cursor.blockNumber(), cursor.columnNumber()
if not self._reading and \
col == len(self._continuation_prompt) and \
line > self._get_prompt_cursor().blockNumber():
cursor.beginEditBlock()
cursor.movePosition(QtGui.QTextCursor.StartOfBlock,
QtGui.QTextCursor.KeepAnchor)
cursor.removeSelectedText()
cursor.deletePreviousChar()
cursor.endEditBlock()
intercepted = True
# Regular backwards deletion
else:
anchor = cursor.anchor()
if anchor == position:
intercepted = not self._in_buffer(position - 1)
else:
intercepted = not self._in_buffer(min(anchor, position))
elif key == QtCore.Qt.Key_Delete:
# Line deletion (remove continuation prompt)
if not self._reading and self._in_buffer(position) and \
cursor.atBlockEnd() and not cursor.hasSelection():
cursor.movePosition(QtGui.QTextCursor.NextBlock,
QtGui.QTextCursor.KeepAnchor)
cursor.movePosition(QtGui.QTextCursor.Right,
QtGui.QTextCursor.KeepAnchor,
len(self._continuation_prompt))
cursor.removeSelectedText()
intercepted = True
# Regular forwards deletion:
else:
anchor = cursor.anchor()
intercepted = (not self._in_buffer(anchor) or
not self._in_buffer(position))
# Don't move the cursor if Control/Cmd is pressed to allow copy-paste
# using the keyboard in any part of the buffer. Also, permit scrolling
# with Page Up/Down keys. Finally, if we're executing, don't move the
# cursor (if even this made sense, we can't guarantee that the prompt
# position is still valid due to text truncation).
if not (self._control_key_down(event.modifiers(), include_command=True)
or key in (QtCore.Qt.Key_PageUp, QtCore.Qt.Key_PageDown)
or (self._executing and not self._reading)):
self._keep_cursor_in_buffer()
return intercepted
def _event_filter_page_keypress(self, event):
""" Filter key events for the paging widget to create console-like
interface.
"""
key = event.key()
ctrl_down = self._control_key_down(event.modifiers())
alt_down = event.modifiers() & QtCore.Qt.AltModifier
if ctrl_down:
if key == QtCore.Qt.Key_O:
self._control.setFocus()
intercept = True
elif alt_down:
if key == QtCore.Qt.Key_Greater:
self._page_control.moveCursor(QtGui.QTextCursor.End)
intercepted = True
elif key == QtCore.Qt.Key_Less:
self._page_control.moveCursor(QtGui.QTextCursor.Start)
intercepted = True
elif key in (QtCore.Qt.Key_Q, QtCore.Qt.Key_Escape):
if self._splitter:
self._page_control.hide()
else:
self.layout().setCurrentWidget(self._control)
return True
elif key in (QtCore.Qt.Key_Enter, QtCore.Qt.Key_Return):
new_event = QtGui.QKeyEvent(QtCore.QEvent.KeyPress,
QtCore.Qt.Key_PageDown,
QtCore.Qt.NoModifier)
QtGui.qApp.sendEvent(self._page_control, new_event)
return True
elif key == QtCore.Qt.Key_Backspace:
new_event = QtGui.QKeyEvent(QtCore.QEvent.KeyPress,
QtCore.Qt.Key_PageUp,
QtCore.Qt.NoModifier)
QtGui.qApp.sendEvent(self._page_control, new_event)
return True
return False
def _format_as_columns(self, items, separator=' '):
""" Transform a list of strings into a single string with columns.
Parameters
----------
items : sequence of strings
The strings to process.
separator : str, optional [default is two spaces]
The string that separates columns.
Returns
-------
The formatted string.
"""
# Calculate the number of characters available.
width = self._control.viewport().width()
char_width = QtGui.QFontMetrics(self.font).width(' ')
displaywidth = max(10, (width / char_width) - 1)
return columnize(items, separator, displaywidth)
def _get_block_plain_text(self, block):
""" Given a QTextBlock, return its unformatted text.
"""
cursor = QtGui.QTextCursor(block)
cursor.movePosition(QtGui.QTextCursor.StartOfBlock)
cursor.movePosition(QtGui.QTextCursor.EndOfBlock,
QtGui.QTextCursor.KeepAnchor)
return cursor.selection().toPlainText()
def _get_cursor(self):
""" Convenience method that returns a cursor for the current position.
"""
return self._control.textCursor()
def _get_end_cursor(self):
""" Convenience method that returns a cursor for the last character.
"""
cursor = self._control.textCursor()
cursor.movePosition(QtGui.QTextCursor.End)
return cursor
def _get_input_buffer_cursor_column(self):
""" Returns the column of the cursor in the input buffer, excluding the
contribution by the prompt, or -1 if there is no such column.
"""
prompt = self._get_input_buffer_cursor_prompt()
if prompt is None:
return -1
else:
cursor = self._control.textCursor()
return cursor.columnNumber() - len(prompt)
def _get_input_buffer_cursor_line(self):
""" Returns the text of the line of the input buffer that contains the
cursor, or None if there is no such line.
"""
prompt = self._get_input_buffer_cursor_prompt()
if prompt is None:
return None
else:
cursor = self._control.textCursor()
text = self._get_block_plain_text(cursor.block())
return text[len(prompt):]
def _get_input_buffer_cursor_prompt(self):
""" Returns the (plain text) prompt for line of the input buffer that
contains the cursor, or None if there is no such line.
"""
if self._executing:
return None
cursor = self._control.textCursor()
if cursor.position() >= self._prompt_pos:
if cursor.blockNumber() == self._get_prompt_cursor().blockNumber():
return self._prompt
else:
return self._continuation_prompt
else:
return None
def _get_prompt_cursor(self):
""" Convenience method that returns a cursor for the prompt position.
"""
cursor = self._control.textCursor()
cursor.setPosition(self._prompt_pos)
return cursor
def _get_selection_cursor(self, start, end):
""" Convenience method that returns a cursor with text selected between
the positions 'start' and 'end'.
"""
cursor = self._control.textCursor()
cursor.setPosition(start)
cursor.setPosition(end, QtGui.QTextCursor.KeepAnchor)
return cursor
def _get_word_start_cursor(self, position):
""" Find the start of the word to the left the given position. If a
sequence of non-word characters precedes the first word, skip over
them. (This emulates the behavior of bash, emacs, etc.)
"""
document = self._control.document()
position -= 1
while position >= self._prompt_pos and \
not is_letter_or_number(document.characterAt(position)):
position -= 1
while position >= self._prompt_pos and \
is_letter_or_number(document.characterAt(position)):
position -= 1
cursor = self._control.textCursor()
cursor.setPosition(position + 1)
return cursor
def _get_word_end_cursor(self, position):
""" Find the end of the word to the right the given position. If a
sequence of non-word characters precedes the first word, skip over
them. (This emulates the behavior of bash, emacs, etc.)
"""
document = self._control.document()
end = self._get_end_cursor().position()
while position < end and \
not is_letter_or_number(document.characterAt(position)):
position += 1
while position < end and \
is_letter_or_number(document.characterAt(position)):
position += 1
cursor = self._control.textCursor()
cursor.setPosition(position)
return cursor
def _insert_continuation_prompt(self, cursor):
""" Inserts new continuation prompt using the specified cursor.
"""
if self._continuation_prompt_html is None:
self._insert_plain_text(cursor, self._continuation_prompt)
else:
self._continuation_prompt = self._insert_html_fetching_plain_text(
cursor, self._continuation_prompt_html)
def _insert_html(self, cursor, html):
""" Inserts HTML using the specified cursor in such a way that future
formatting is unaffected.
"""
cursor.beginEditBlock()
cursor.insertHtml(html)
# After inserting HTML, the text document "remembers" it's in "html
# mode", which means that subsequent calls adding plain text will result
# in unwanted formatting, lost tab characters, etc. The following code
# hacks around this behavior, which I consider to be a bug in Qt, by
# (crudely) resetting the document's style state.
cursor.movePosition(QtGui.QTextCursor.Left,
QtGui.QTextCursor.KeepAnchor)
if cursor.selection().toPlainText() == ' ':
cursor.removeSelectedText()
else:
cursor.movePosition(QtGui.QTextCursor.Right)
cursor.insertText(' ', QtGui.QTextCharFormat())
cursor.endEditBlock()
def _insert_html_fetching_plain_text(self, cursor, html):
""" Inserts HTML using the specified cursor, then returns its plain text
version.
"""
cursor.beginEditBlock()
cursor.removeSelectedText()
start = cursor.position()
self._insert_html(cursor, html)
end = cursor.position()
cursor.setPosition(start, QtGui.QTextCursor.KeepAnchor)
text = cursor.selection().toPlainText()
cursor.setPosition(end)
cursor.endEditBlock()
return text
def _insert_plain_text(self, cursor, text):
""" Inserts plain text using the specified cursor, processing ANSI codes
if enabled.
"""
cursor.beginEditBlock()
if self.ansi_codes:
for substring in self._ansi_processor.split_string(text):
for act in self._ansi_processor.actions:
# Unlike real terminal emulators, we don't distinguish
# between the screen and the scrollback buffer. A screen
# erase request clears everything.
if act.action == 'erase' and act.area == 'screen':
cursor.select(QtGui.QTextCursor.Document)
cursor.removeSelectedText()
# Simulate a form feed by scrolling just past the last line.
elif act.action == 'scroll' and act.unit == 'page':
cursor.insertText('\n')
cursor.endEditBlock()
self._set_top_cursor(cursor)
cursor.joinPreviousEditBlock()
cursor.deletePreviousChar()
format = self._ansi_processor.get_format()
cursor.insertText(substring, format)
else:
cursor.insertText(text)
cursor.endEditBlock()
def _insert_plain_text_into_buffer(self, cursor, text):
""" Inserts text into the input buffer using the specified cursor (which
must be in the input buffer), ensuring that continuation prompts are
inserted as necessary.
"""
lines = text.splitlines(True)
if lines:
cursor.beginEditBlock()
cursor.insertText(lines[0])
for line in lines[1:]:
if self._continuation_prompt_html is None:
cursor.insertText(self._continuation_prompt)
else:
self._continuation_prompt = \
self._insert_html_fetching_plain_text(
cursor, self._continuation_prompt_html)
cursor.insertText(line)
cursor.endEditBlock()
def _in_buffer(self, position=None):
""" Returns whether the current cursor (or, if specified, a position) is
inside the editing region.
"""
cursor = self._control.textCursor()
if position is None:
position = cursor.position()
else:
cursor.setPosition(position)
line = cursor.blockNumber()
prompt_line = self._get_prompt_cursor().blockNumber()
if line == prompt_line:
return position >= self._prompt_pos
elif line > prompt_line:
cursor.movePosition(QtGui.QTextCursor.StartOfBlock)
prompt_pos = cursor.position() + len(self._continuation_prompt)
return position >= prompt_pos
return False
def _keep_cursor_in_buffer(self):
""" Ensures that the cursor is inside the editing region. Returns
whether the cursor was moved.
"""
moved = not self._in_buffer()
if moved:
cursor = self._control.textCursor()
cursor.movePosition(QtGui.QTextCursor.End)
self._control.setTextCursor(cursor)
return moved
def _keyboard_quit(self):
""" Cancels the current editing task ala Ctrl-G in Emacs.
"""
if self._text_completing_pos:
self._cancel_text_completion()
else:
self.input_buffer = ''
def _page(self, text, html=False):
""" Displays text using the pager if it exceeds the height of the
viewport.
Parameters:
-----------
html : bool, optional (default False)
If set, the text will be interpreted as HTML instead of plain text.
"""
line_height = QtGui.QFontMetrics(self.font).height()
minlines = self._control.viewport().height() / line_height
if self.paging != 'none' and \
re.match("(?:[^\n]*\n){%i}" % minlines, text):
if self.paging == 'custom':
self.custom_page_requested.emit(text)
else:
self._page_control.clear()
cursor = self._page_control.textCursor()
if html:
self._insert_html(cursor, text)
else:
self._insert_plain_text(cursor, text)
self._page_control.moveCursor(QtGui.QTextCursor.Start)
self._page_control.viewport().resize(self._control.size())
if self._splitter:
self._page_control.show()
self._page_control.setFocus()
else:
self.layout().setCurrentWidget(self._page_control)
elif html:
self._append_plain_html(text)
else:
self._append_plain_text(text)
def _prompt_finished(self):
""" Called immediately after a prompt is finished, i.e. when some input
will be processed and a new prompt displayed.
"""
self._control.setReadOnly(True)
self._prompt_finished_hook()
def _prompt_started(self):
""" Called immediately after a new prompt is displayed.
"""
# Temporarily disable the maximum block count to permit undo/redo and
# to ensure that the prompt position does not change due to truncation.
self._control.document().setMaximumBlockCount(0)
self._control.setUndoRedoEnabled(True)
# Work around bug in QPlainTextEdit: input method is not re-enabled
# when read-only is disabled.
self._control.setReadOnly(False)
self._control.setAttribute(QtCore.Qt.WA_InputMethodEnabled, True)
if not self._reading:
self._executing = False
self._prompt_started_hook()
# If the input buffer has changed while executing, load it.
if self._input_buffer_pending:
self.input_buffer = self._input_buffer_pending
self._input_buffer_pending = ''
self._control.moveCursor(QtGui.QTextCursor.End)
def _readline(self, prompt='', callback=None):
""" Reads one line of input from the user.
Parameters
----------
prompt : str, optional
The prompt to print before reading the line.
callback : callable, optional
A callback to execute with the read line. If not specified, input is
read *synchronously* and this method does not return until it has
been read.
Returns
-------
If a callback is specified, returns nothing. Otherwise, returns the
input string with the trailing newline stripped.
"""
if self._reading:
raise RuntimeError('Cannot read a line. Widget is already reading.')
if not callback and not self.isVisible():
# If the user cannot see the widget, this function cannot return.
raise RuntimeError('Cannot synchronously read a line if the widget '
'is not visible!')
self._reading = True
self._show_prompt(prompt, newline=False)
if callback is None:
self._reading_callback = None
while self._reading:
QtCore.QCoreApplication.processEvents()
return self._get_input_buffer(force=True).rstrip('\n')
else:
self._reading_callback = lambda: \
callback(self._get_input_buffer(force=True).rstrip('\n'))
def _set_continuation_prompt(self, prompt, html=False):
""" Sets the continuation prompt.
Parameters
----------
prompt : str
The prompt to show when more input is needed.
html : bool, optional (default False)
If set, the prompt will be inserted as formatted HTML. Otherwise,
the prompt will be treated as plain text, though ANSI color codes
will be handled.
"""
if html:
self._continuation_prompt_html = prompt
else:
self._continuation_prompt = prompt
self._continuation_prompt_html = None
def _set_cursor(self, cursor):
""" Convenience method to set the current cursor.
"""
self._control.setTextCursor(cursor)
def _set_top_cursor(self, cursor):
""" Scrolls the viewport so that the specified cursor is at the top.
"""
scrollbar = self._control.verticalScrollBar()
scrollbar.setValue(scrollbar.maximum())
original_cursor = self._control.textCursor()
self._control.setTextCursor(cursor)
self._control.ensureCursorVisible()
self._control.setTextCursor(original_cursor)
def _show_prompt(self, prompt=None, html=False, newline=True):
""" Writes a new prompt at the end of the buffer.
Parameters
----------
prompt : str, optional
The prompt to show. If not specified, the previous prompt is used.
html : bool, optional (default False)
Only relevant when a prompt is specified. If set, the prompt will
be inserted as formatted HTML. Otherwise, the prompt will be treated
as plain text, though ANSI color codes will be handled.
newline : bool, optional (default True)
If set, a new line will be written before showing the prompt if
there is not already a newline at the end of the buffer.
"""
# Save the current end position to support _append*(before_prompt=True).
cursor = self._get_end_cursor()
self._append_before_prompt_pos = cursor.position()
# Insert a preliminary newline, if necessary.
if newline and cursor.position() > 0:
cursor.movePosition(QtGui.QTextCursor.Left,
QtGui.QTextCursor.KeepAnchor)
if cursor.selection().toPlainText() != '\n':
self._append_plain_text('\n')
# Write the prompt.
self._append_plain_text(self._prompt_sep)
if prompt is None:
if self._prompt_html is None:
self._append_plain_text(self._prompt)
else:
self._append_html(self._prompt_html)
else:
if html:
self._prompt = self._append_html_fetching_plain_text(prompt)
self._prompt_html = prompt
else:
self._append_plain_text(prompt)
self._prompt = prompt
self._prompt_html = None
self._prompt_pos = self._get_end_cursor().position()
self._prompt_started()
#------ Signal handlers ----------------------------------------------------
def _adjust_scrollbars(self):
""" Expands the vertical scrollbar beyond the range set by Qt.
"""
# This code is adapted from _q_adjustScrollbars in qplaintextedit.cpp
# and qtextedit.cpp.
document = self._control.document()
scrollbar = self._control.verticalScrollBar()
viewport_height = self._control.viewport().height()
if isinstance(self._control, QtGui.QPlainTextEdit):
maximum = max(0, document.lineCount() - 1)
step = viewport_height / self._control.fontMetrics().lineSpacing()
else:
# QTextEdit does not do line-based layout and blocks will not in
# general have the same height. Therefore it does not make sense to
# attempt to scroll in line height increments.
maximum = document.size().height()
step = viewport_height
diff = maximum - scrollbar.maximum()
scrollbar.setRange(0, maximum)
scrollbar.setPageStep(step)
# Compensate for undesirable scrolling that occurs automatically due to
# maximumBlockCount() text truncation.
if diff < 0 and document.blockCount() == document.maximumBlockCount():
scrollbar.setValue(scrollbar.value() + diff)
def _cursor_position_changed(self):
""" Clears the temporary buffer based on the cursor position.
"""
if self._text_completing_pos:
document = self._control.document()
if self._text_completing_pos < document.characterCount():
cursor = self._control.textCursor()
pos = cursor.position()
text_cursor = self._control.textCursor()
text_cursor.setPosition(self._text_completing_pos)
if pos < self._text_completing_pos or \
cursor.blockNumber() > text_cursor.blockNumber():
self._clear_temporary_buffer()
self._text_completing_pos = 0
else:
self._clear_temporary_buffer()
self._text_completing_pos = 0
def _custom_context_menu_requested(self, pos):
""" Shows a context menu at the given QPoint (in widget coordinates).
"""
menu = self._context_menu_make(pos)
menu.exec_(self._control.mapToGlobal(pos))
| 40.765564 | 80 | 0.579927 |
f66ea62a6a3a2f0b0e2ceaa04ff3482dd8ce7761 | 2,757 | py | Python | djangocms_text_ckeditor/migrations/0002_rename_plugin.py | webu/djangocms-text-ckeditor | 03e3139d6264d2ec5e7a61ad37c231fbdd502e0b | [
"BSD-3-Clause"
] | 1 | 2015-11-05T17:56:07.000Z | 2015-11-05T17:56:07.000Z | djangocms_text_ckeditor/migrations/0002_rename_plugin.py | webu/djangocms-text-ckeditor | 03e3139d6264d2ec5e7a61ad37c231fbdd502e0b | [
"BSD-3-Clause"
] | null | null | null | djangocms_text_ckeditor/migrations/0002_rename_plugin.py | webu/djangocms-text-ckeditor | 03e3139d6264d2ec5e7a61ad37c231fbdd502e0b | [
"BSD-3-Clause"
] | 1 | 2019-08-17T20:46:59.000Z | 2019-08-17T20:46:59.000Z | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models, connections
class Migration(SchemaMigration):
def forwards(self, orm):
table_names = connections[db.db_alias].introspection.table_names()
if 'cmsplugin_text' in table_names:
db.rename_table('cmsplugin_text', 'djangocms_text_ckeditor_text')
def backwards(self, orm):
db.rename_table('djangocms_text_ckeditor_text', 'cmsplugin_text')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'djangocms_text_ckeditor.text': {
'Meta': {'object_name': 'Text', 'db_table': "u'cmsplugin_text'"},
'body': ('django.db.models.fields.TextField', [], {}),
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['djangocms_text_ckeditor']
| 57.4375 | 156 | 0.598114 |
d5788bac20b923c0313dad6cc2f7d37fbbeab972 | 5,247 | py | Python | env/lib/python3.6/site-packages/appconf/base.py | anthowen/duplify | 846d01c1b21230937fdf0281b0cf8c0b08a8c24e | [
"MIT"
] | 10 | 2016-09-14T21:58:49.000Z | 2019-01-28T21:56:35.000Z | env/lib/python3.6/site-packages/appconf/base.py | anthowen/duplify | 846d01c1b21230937fdf0281b0cf8c0b08a8c24e | [
"MIT"
] | 10 | 2020-06-06T01:43:38.000Z | 2022-03-12T00:23:20.000Z | env/lib/python3.6/site-packages/appconf/base.py | anthowen/duplify | 846d01c1b21230937fdf0281b0cf8c0b08a8c24e | [
"MIT"
] | 1 | 2020-11-04T08:39:22.000Z | 2020-11-04T08:39:22.000Z | import sys
from django.core.exceptions import ImproperlyConfigured
from django.utils import six
from .utils import import_attribute
class AppConfOptions(object):
def __init__(self, meta, prefix=None):
self.prefix = prefix
self.holder_path = getattr(meta, 'holder', 'django.conf.settings')
self.holder = import_attribute(self.holder_path)
self.proxy = getattr(meta, 'proxy', False)
self.required = getattr(meta, 'required', [])
self.configured_data = {}
def prefixed_name(self, name):
if name.startswith(self.prefix.upper()):
return name
return "%s_%s" % (self.prefix.upper(), name.upper())
def contribute_to_class(self, cls, name):
cls._meta = self
self.names = {}
self.defaults = {}
class AppConfMetaClass(type):
def __new__(cls, name, bases, attrs):
super_new = super(AppConfMetaClass, cls).__new__
parents = [b for b in bases if isinstance(b, AppConfMetaClass)]
if not parents:
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop('__module__')
new_class = super_new(cls, name, bases, {'__module__': module})
attr_meta = attrs.pop('Meta', None)
if attr_meta:
meta = attr_meta
else:
attr_meta = type('Meta', (object,), {})
meta = getattr(new_class, 'Meta', None)
prefix = getattr(meta, 'prefix', getattr(meta, 'app_label', None))
if prefix is None:
# Figure out the prefix by looking one level up.
# For 'django.contrib.sites.models', this would be 'sites'.
model_module = sys.modules[new_class.__module__]
prefix = model_module.__name__.split('.')[-2]
new_class.add_to_class('_meta', AppConfOptions(meta, prefix))
new_class.add_to_class('Meta', attr_meta)
for parent in parents[::-1]:
if hasattr(parent, '_meta'):
new_class._meta.names.update(parent._meta.names)
new_class._meta.defaults.update(parent._meta.defaults)
new_class._meta.configured_data.update(
parent._meta.configured_data)
for name in filter(str.isupper, list(attrs.keys())):
prefixed_name = new_class._meta.prefixed_name(name)
new_class._meta.names[name] = prefixed_name
new_class._meta.defaults[prefixed_name] = attrs.pop(name)
# Add all attributes to the class.
for name, value in attrs.items():
new_class.add_to_class(name, value)
new_class._configure()
for name, value in six.iteritems(new_class._meta.configured_data):
prefixed_name = new_class._meta.prefixed_name(name)
setattr(new_class._meta.holder, prefixed_name, value)
new_class.add_to_class(name, value)
# Confirm presence of required settings.
for name in new_class._meta.required:
prefixed_name = new_class._meta.prefixed_name(name)
if not hasattr(new_class._meta.holder, prefixed_name):
raise ImproperlyConfigured('The required setting %s is'
' missing.' % prefixed_name)
return new_class
def add_to_class(cls, name, value):
if hasattr(value, 'contribute_to_class'):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _configure(cls):
# the ad-hoc settings class instance used to configure each value
obj = cls()
for name, prefixed_name in six.iteritems(obj._meta.names):
default_value = obj._meta.defaults.get(prefixed_name)
value = getattr(obj._meta.holder, prefixed_name, default_value)
callback = getattr(obj, "configure_%s" % name.lower(), None)
if callable(callback):
value = callback(value)
cls._meta.configured_data[name] = value
cls._meta.configured_data = obj.configure()
class AppConf(six.with_metaclass(AppConfMetaClass)):
"""
An app setting object to be used for handling app setting defaults
gracefully and providing a nice API for them.
"""
def __init__(self, **kwargs):
for name, value in six.iteritems(kwargs):
setattr(self, name, value)
def __dir__(self):
return sorted(list(set(self._meta.names.keys())))
# For instance access..
@property
def configured_data(self):
return self._meta.configured_data
def __getattr__(self, name):
if self._meta.proxy:
return getattr(self._meta.holder, name)
raise AttributeError("%s not found. Use '%s' instead." %
(name, self._meta.holder_path))
def __setattr__(self, name, value):
if name == name.upper():
setattr(self._meta.holder,
self._meta.prefixed_name(name), value)
object.__setattr__(self, name, value)
def configure(self):
"""
Hook for doing any extra configuration, returning a dictionary
containing the configured data.
"""
return self.configured_data
| 36.692308 | 75 | 0.618258 |
63ca86ca502af280ab1243932cde81a9b3530503 | 11,249 | py | Python | systems/iodine/dimerHighlyCoupled.py | jgoodknight/spectroscopy | 5c41852dff8d02fe49395ecc5cd36902ec867033 | [
"MIT"
] | 6 | 2017-04-06T00:47:24.000Z | 2020-05-12T19:33:39.000Z | systems/iodine/dimerHighlyCoupled.py | jgoodknight/spectroscopy | 5c41852dff8d02fe49395ecc5cd36902ec867033 | [
"MIT"
] | null | null | null | systems/iodine/dimerHighlyCoupled.py | jgoodknight/spectroscopy | 5c41852dff8d02fe49395ecc5cd36902ec867033 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Fri Jan 04 13:23:27 2013
@author: Joey
"""
import copy
import os
import time
import multiprocessing
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import scipy.integrate
import spectroscopy.Spacetime as Spacetime
import spectroscopy.TimeElectronicWavefunction as TimeElectronicWavefunction
import spectroscopy.ElectronicOperator as ElectronicOperator
import spectroscopy.TimeFunction as TimeFunction
import spectroscopy.NuclearWavefunction as NuclearWavefunction
import spectroscopy.NuclearOperator as NuclearOperator
import spectroscopy.ElectronicWavefunction as ElectronicWavefunction
import spectroscopy.TimePerturbation as TimePerturbation
#import spectroscopy.Experiments as Experiments
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 04 13:23:27 2013
@author: Joey
"""
import numpy as np
import spectroscopy.Spacetime as Spacetime
import spectroscopy.ElectronicOperator as ElectronicOperator
import spectroscopy.NuclearWavefunction as NuclearWavefunction
import spectroscopy.NuclearOperator as NuclearOperator
import spectroscopy.ElectronicWavefunction as ElectronicWavefunction
#FROM HERZBERG SPECTRA OF DIATOMIC MOLECULES
reducedMassIodineAMU = 63.466 #amu
reducedMassIodine = 1.0538E-25 #kg
betaCoefficient = 5.976917E21 #1/sqrt(kg * m)
betaCoefficient = betaCoefficient / 10.0 #1/sqrt(kg * cm)
#ground state
#looked up values
ground_omega_e = 214.57 #wavenumbers
ground_omega_e_chi_e = .6127 #wavenumbers
ground_T_e_ = 0.0 #wavenumbers
ground_radius_e = 2.6666E-10 #meters
#calculated values
ground_chi_e = ground_omega_e_chi_e / ground_omega_e #unitless
ground_D_e = ground_omega_e / (4.0 * ground_chi_e) #wavenumbers
ground_beta = 100 * betaCoefficient * np.sqrt(reducedMassIodine * ground_omega_e_chi_e) # 1/m
ground_zpe = ground_omega_e * .5 + ground_omega_e_chi_e * .25
#excited state
#looked up values
excited_omega_e = 128.0 #wavenumbers
excited_omega_e_chi_e = .834 #wavenumbers
excited_T_e = 15641.6 #wavenumbers, 0-->0 transition energy
excited_radius_e = 3.016E-10 #meters
#calculated values
excited_chi_e = excited_omega_e_chi_e / excited_omega_e #unitless
excited_D_e = excited_omega_e / (4.0 * excited_chi_e) #wavenumbers
excited_beta = 100 * betaCoefficient * np.sqrt(reducedMassIodine * excited_omega_e_chi_e) # 1/m
excited_zpe = excited_omega_e * .5 + excited_omega_e_chi_e * .25 #wavenumbers
excited_energy_gap = excited_T_e + ground_zpe - excited_zpe #wavenumbers
excited_center_gap = excited_radius_e - ground_radius_e #meters
##all units from here on out are defined by mySpace
mySpace = Spacetime.Spacetime(xMax = 15.0,
numberOfNuclearDimenions = 2,
numberOfElectronicDimensions = 4,
numberOfSimulationSpacePointsPerNuclearDimension = 100,
dt_SECONDS = .50000E-15,
UnityMassInElectronMasses = 10.0)
iodineGroundStateDeValue = mySpace.unitHandler.energyUnitsFromWavenumbers(ground_D_e)
iodineExcitedStateDeValue = mySpace.unitHandler.energyUnitsFromWavenumbers(excited_D_e)
iodineExcitedStateCenterOffset = mySpace.unitHandler.lengthUnitsFromMeters(excited_center_gap)
iodineExcitedStateEnergyOffset = mySpace.unitHandler.energyUnitsFromWavenumbers(excited_energy_gap)
iodineGroundStateBetaValue = 1.0 / mySpace.unitHandler.lengthUnitsFromMeters(1.0 / ground_beta)
iodineExcitedStateBetaValue = 1.0 / mySpace.unitHandler.lengthUnitsFromMeters(1.0 / excited_beta)
iodineReducedMass = mySpace.unitHandler.massUnitsFromAmu(reducedMassIodineAMU)
pulse_carrier_frequency = 2500.0 #wavenumbers
pulse_carrier_frequency = mySpace.unitHandler.energyUnitsFromWavenumbers(pulse_carrier_frequency)
#startingPoint = -mySpace.xMax + 10
groundCenter = - iodineExcitedStateCenterOffset / 2
excitedCenter = iodineExcitedStateCenterOffset / 2
opticalGap = iodineExcitedStateEnergyOffset
energyOffset = 0 #for more accurate calculation
omega_0_ground = iodineGroundStateBetaValue * np.sqrt(2 * iodineGroundStateDeValue / iodineReducedMass )
omega_0_ground_wavenumbers = mySpace.unitHandler.wavenumbersFromEnergyUnits(omega_0_ground)
omega_0_excited = iodineExcitedStateBetaValue * np.sqrt(2 * iodineExcitedStateDeValue / iodineReducedMass )
omega_0_excited_wavenumbers = mySpace.unitHandler.wavenumbersFromEnergyUnits(omega_0_excited)
HUANG_RHYS = iodineReducedMass * iodineExcitedStateCenterOffset**2 * omega_0_ground / ( 2* mySpace.hbar )
iodineGroundMorse = NuclearOperator.morsePotential(mySpace,
a= iodineGroundStateBetaValue,
De=iodineGroundStateDeValue,
mass=iodineReducedMass,
center=groundCenter,
energyOffset = 0.0 )
iodineExcitedMorse = NuclearOperator.morsePotential(mySpace,
a= iodineExcitedStateBetaValue,
De= iodineExcitedStateDeValue,
mass=iodineReducedMass,
center=excitedCenter,
energyOffset = energyOffset)
iodineGroundHO = iodineGroundMorse.correspondingHarmonicOscillator()
iodineExcitedHO = iodineExcitedMorse.correspondingHarmonicOscillator()
lowestEnergyTransitionHO = iodineExcitedHO.energyEigenvalue(0) - iodineGroundHO.energyEigenvalue(0)
mostImportantTransitionHO_wavenumbers = 1875.0
mostImportantTransitionHO = mySpace.unitHandler.energyUnitsFromWavenumbers(mostImportantTransitionHO_wavenumbers)
mostImportantTransitionMorse_wavenumbers = 3100.0
mostImportantTransitionMorse = mySpace.unitHandler.energyUnitsFromWavenumbers( mostImportantTransitionMorse_wavenumbers )
#HARMONIC OSCILLATOR
groundGroundStateHO = NuclearOperator.nuclearHamiltonian(mySpace, listOfOneDimensionalHamiltonians = [iodineGroundHO, iodineGroundHO] )
excitedGroundStateHO = NuclearOperator.nuclearHamiltonian(mySpace, listOfOneDimensionalHamiltonians = [iodineExcitedHO, iodineGroundHO ] )
groundExcitedStateHO = NuclearOperator.nuclearHamiltonian(mySpace, listOfOneDimensionalHamiltonians = [iodineGroundHO, iodineExcitedHO ] )
excitedExcitedStateHO = NuclearOperator.nuclearHamiltonian(mySpace, listOfOneDimensionalHamiltonians = [iodineExcitedHO, iodineExcitedHO ] )
J = 3.2 * omega_0_ground
couplingOperator = NuclearOperator.constantPositionNuclearOperator(mySpace, J)
groundStateNuclearWFHO = NuclearWavefunction.nuclearWavefunction(mySpace, groundStateNuclearHamiltonian = groundGroundStateHO )
ElectronicHamiltonianHO = ElectronicOperator.ElectronicHamiltonian(mySpace, [(0,0, groundGroundStateHO),
(1,1, excitedGroundStateHO),(2,2, groundExcitedStateHO),
(1,2, couplingOperator), (2,1, couplingOperator),
(3,3, excitedExcitedStateHO),])
initialEWFHO = ElectronicWavefunction.electronicWavefunction(mySpace,
listOfNuclearWavefunctions = [groundStateNuclearWFHO, 0, 0, 0],
Normalize=True)
#MORSE
#groundStateMorse = NuclearOperator.nuclearHamiltonian(mySpace, listOfOneDimensionalHamiltonians = [iodineGroundMorse] )
#
#excitedStateMorse = NuclearOperator.nuclearHamiltonian(mySpace, listOfOneDimensionalHamiltonians = [iodineExcitedMorse ] )
#
#
#groundStateNuclearWFMorse = NuclearWavefunction.nuclearWavefunction(mySpace, groundStateNuclearHamiltonian = groundStateMorse )
#
#
#ElectronicHamiltonianMorse = ElectronicOperator.ElectronicHamiltonian(mySpace, [(0,0, groundStateMorse),
# (1,1, excitedStateMorse)])
#
#
#initialEWFMorse = ElectronicWavefunction.electronicWavefunction(mySpace,
# listOfNuclearWavefunctions = [groundStateNuclearWFMorse, 0],
# Normalize=True)
#calculated using def2-TZVP basis and PBE0 function
#Provided by Dimitrij Rappaport
muGtoEValueConstant = -.165270 #e * r_b
muGtoEValueConstant = mySpace.unitHandler.lengthUnitsFromBohrRadii(muGtoEValueConstant)
muGtoEValueLinearCo = -.0548235 #e * r_b / r_b
muEtoGValueConstant = -.132477 #e * r_b
muEtoGValueConstant = mySpace.unitHandler.lengthUnitsFromBohrRadii(muEtoGValueConstant)
muEtoGValueLinearCo = -.04555 #e * r_b / r_b
mu_0 = (muGtoEValueConstant + muEtoGValueConstant ) / 2.0
constantMu = NuclearOperator.constantPositionNuclearOperator(mySpace, mu_0)
mu_double_prime = muGtoEValueLinearCo + muEtoGValueLinearCo
mu_prime = mu_double_prime * np.sqrt(mySpace.hbar / ( 2.0 * iodineReducedMass * omega_0_ground))
constantMu = NuclearOperator.constantPositionNuclearOperator(mySpace, muEtoGValueConstant)
linear_mu_function = lambda x, y: mu_0 + mu_double_prime * (x + y)
linearMu = NuclearOperator.functionalPositionNuclearOperator(mySpace, lambda x, y: linear_mu_function(x, y) )
xTransitionDipole_FC = ElectronicOperator.ElectronicPositionOperator(mySpace, [(0, 1, constantMu), (1, 0, constantMu),
(0, 2, constantMu), (2, 0, constantMu),
(1, 3, constantMu), (3, 1, constantMu),
(2, 3, constantMu), (3, 2, constantMu)])
yTransitionDipole_FC = ElectronicOperator.ElectronicPositionOperator(mySpace, [])
zTransitionDipole_FC = ElectronicOperator.ElectronicPositionOperator(mySpace, [])
transitionDipoleTuple_FC = (xTransitionDipole_FC, yTransitionDipole_FC, zTransitionDipole_FC)
xTransitionDipole_linear = ElectronicOperator.ElectronicPositionOperator(mySpace, [(0, 1, linearMu), (1, 0, linearMu),
(0, 2, linearMu), (2, 0, linearMu),
(1, 3, linearMu), (3, 1, linearMu),
(2, 3, linearMu), (3, 2, linearMu)])
yTransitionDipole_linear = ElectronicOperator.ElectronicPositionOperator(mySpace, [])
zTransitionDipole_linear = ElectronicOperator.ElectronicPositionOperator(mySpace, [])
transitionDipoleTuple_linear = (xTransitionDipole_linear, yTransitionDipole_linear, zTransitionDipole_linear)
| 46.292181 | 141 | 0.68335 |
90f8f23e1a23dbe2b2827f5667d68ed058d6cf3d | 1,549 | py | Python | src/fluxgui/fluxcontroller.py | 0xflotus/fluxgui | a3b55da976053fc978b29d191db52dfb8da2f8ee | [
"MIT"
] | 381 | 2015-01-02T01:11:19.000Z | 2021-11-08T11:55:12.000Z | src/fluxgui/fluxcontroller.py | JasonMetal/fluxgui | 23ac6af778ab259edb67972e9cc997370d810f39 | [
"MIT"
] | 49 | 2015-01-04T21:08:52.000Z | 2022-01-05T08:28:13.000Z | src/fluxgui/fluxcontroller.py | JasonMetal/fluxgui | 23ac6af778ab259edb67972e9cc997370d810f39 | [
"MIT"
] | 78 | 2015-01-07T17:07:48.000Z | 2019-10-20T15:23:45.000Z | from fluxgui import xfluxcontroller
class FluxController(xfluxcontroller.XfluxController):
"""
FluxController is the same as XfluxController except that it
requires a Settings instance and updates that instance when
relevant controller calls are made.
"""
def __init__(self, settings):
self.settings = settings
super(FluxController, self).__init__(
**self.settings.xflux_settings_dict())
def start(self):
if self.settings.zipcode == "" and self.settings.latitude == "":
raise ValueError("Cannot start xflux, missing zipcode and latitude")
super(FluxController, self).start()
# Controller methods that don't touch xflux
def set_autostart(self, autos):
self.settings.autostart = autos
# xflux methods that should also update settings
def set_xflux_latitude(self, lat):
self.settings.latitude = lat
super(FluxController, self).set_xflux_latitude(lat)
def set_xflux_longitude(self, longit):
self.settings.longitude = longit
super(FluxController, self).set_xflux_longitude(longit)
def set_xflux_zipcode(self, zipc):
self.settings.zipcode = zipc
super(FluxController, self).set_xflux_zipcode(zipc)
def _set_xflux_color(self, col):
self.settings.color = col
super(FluxController, self)._set_xflux_color(col)
def _get_xflux_color(self):
return super(FluxController, self)._get_xflux_color()
color=property(_get_xflux_color, _set_xflux_color)
| 33.673913 | 80 | 0.699161 |
2b1b32794cb2132eafa4df8f17ceb4b9a9b587d6 | 3,636 | py | Python | tdmclient/tools/list.py | epfl-mobots/tdm-python | 80eea3a0907c0fec4c19cc12e75bfffacf2eed43 | [
"BSD-3-Clause"
] | 2 | 2022-01-26T09:54:45.000Z | 2022-03-30T07:05:37.000Z | tdmclient/tools/list.py | epfl-mobots/tdm-python | 80eea3a0907c0fec4c19cc12e75bfffacf2eed43 | [
"BSD-3-Clause"
] | null | null | null | tdmclient/tools/list.py | epfl-mobots/tdm-python | 80eea3a0907c0fec4c19cc12e75bfffacf2eed43 | [
"BSD-3-Clause"
] | 1 | 2022-01-26T09:38:51.000Z | 2022-01-26T09:38:51.000Z | # This file is part of tdmclient.
# Copyright 2021 ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE,
# Miniature Mobile Robots group, Switzerland
# Author: Yves Piguet
#
# SPDX-License-Identifier: BSD-3-Clause
import sys
import os
import getopt
from time import sleep
from tdmclient import ClientAsync
def help():
print("""Usage: python3 -m tdmclient.tools.list [options]
Run program on robot, from file or stdin
Options:
--debug n display diagnostic information (0=none, 1=basic, 2=more, 3=verbose)
--help display this help message and exit
--password=PWD specify password for remote tdm
--robotid=I robot id; default=any
--robotname=N robot name; default=any
--tdmaddr=H tdm address (default: localhost or from zeroconf)
--tdmport=P tdm port (default: from zeroconf)
""")
if __name__ == "__main__":
debug = 0
tdm_addr = None
tdm_port = None
password = None
robot_id = None
robot_name = None
try:
arguments, values = getopt.getopt(sys.argv[1:],
"",
[
"debug=",
"help",
"password=",
"robotid=",
"robotname=",
"tdmaddr=",
"tdmport=",
])
except getopt.error as err:
print(str(err))
sys.exit(1)
for arg, val in arguments:
if arg == "--help":
help()
sys.exit(0)
elif arg == "--debug":
debug = int(val)
elif arg == "--password":
password = val
elif arg == "--robotid":
robot_id = val
elif arg == "--robotname":
robot_name = val
elif arg == "--tdmaddr":
tdm_addr = val
elif arg == "--tdmport":
tdm_port = int(val)
with ClientAsync(tdm_addr=tdm_addr, tdm_port=tdm_port,
password=password,
debug=debug) as client:
for _ in range(50):
client.process_waiting_messages()
if len(client.nodes) > 0:
break
sleep(0.1)
for node in client.filter_nodes(client.nodes,
node_id=robot_id, node_name=robot_name):
print(f"id: {node.id_str}")
if "group_id_str" in node.props and node.props["group_id_str"] is not None:
print(f"group id: {node.props['group_id_str']}")
if "name" in node.props:
print(f"name: {node.props['name']}")
if "status" in node.props:
status_str = {
ClientAsync.NODE_STATUS_UNKNOWN: "unknown",
ClientAsync.NODE_STATUS_CONNECTED: "connected",
ClientAsync.NODE_STATUS_AVAILABLE: "available",
ClientAsync.NODE_STATUS_BUSY: "busy",
ClientAsync.NODE_STATUS_READY: "ready",
ClientAsync.NODE_STATUS_DISCONNECTED: "disconnected",
}[node.status]
print(f"status: {node.status} ({status_str})")
if "capabilities" in node.props:
print(f"cap: {node.props['capabilities']}")
if "fw_version" in node.props:
print(f"firmware: {node.props['fw_version']}")
print()
| 34.961538 | 87 | 0.492574 |
c52265cd2218a25ec803d0703f6cfb58ab870921 | 11,047 | py | Python | care/hook_events/purchase_receipt.py | mohsinalimat/care | 5b2f85839c5fa9882eb0d0097825e149402a6a8c | [
"MIT"
] | null | null | null | care/hook_events/purchase_receipt.py | mohsinalimat/care | 5b2f85839c5fa9882eb0d0097825e149402a6a8c | [
"MIT"
] | null | null | null | care/hook_events/purchase_receipt.py | mohsinalimat/care | 5b2f85839c5fa9882eb0d0097825e149402a6a8c | [
"MIT"
] | null | null | null |
import frappe
import erpnext
from frappe.utils import flt
from erpnext.controllers.taxes_and_totals import get_itemised_tax_breakup_data
from erpnext.stock.doctype.purchase_receipt.purchase_receipt import PurchaseReceipt
from erpnext.stock.get_item_details import get_item_details
force_item_fields = (
"item_group",
"brand",
"stock_uom",
"is_fixed_asset",
"item_tax_rate",
"pricing_rules",
"weight_per_unit",
"weight_uom",
"total_weight",
)
class OverridePurchaseReceipt(PurchaseReceipt):
def calculate_taxes_and_totals(self):
from care.hook_events.taxes_and_total import calculate_taxes_and_totals
calculate_taxes_and_totals(self)
if self.doctype in (
"Sales Order",
"Delivery Note",
"Sales Invoice",
"POS Invoice",
):
self.calculate_commission()
self.calculate_contribution()
def set_missing_item_details(self, for_validate=False):
"""set missing item values"""
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
if hasattr(self, "items"):
parent_dict = {}
for fieldname in self.meta.get_valid_columns():
parent_dict[fieldname] = self.get(fieldname)
if self.doctype in ["Quotation", "Sales Order", "Delivery Note", "Sales Invoice"]:
document_type = "{} Item".format(self.doctype)
parent_dict.update({"document_type": document_type})
# party_name field used for customer in quotation
if (
self.doctype == "Quotation"
and self.quotation_to == "Customer"
and parent_dict.get("party_name")
):
parent_dict.update({"customer": parent_dict.get("party_name")})
self.pricing_rules = []
for item in self.get("items"):
if item.get("item_code"):
args = parent_dict.copy()
args.update(item.as_dict())
args["doctype"] = self.doctype
args["name"] = self.name
args["child_docname"] = item.name
args["ignore_pricing_rule"] = (
self.ignore_pricing_rule if hasattr(self, "ignore_pricing_rule") else 0
)
if not args.get("transaction_date"):
args["transaction_date"] = args.get("posting_date")
if self.get("is_subcontracted"):
args["is_subcontracted"] = self.is_subcontracted
ret = get_item_details(args, self, for_validate=True, overwrite_warehouse=False)
for fieldname, value in ret.items():
if item.meta.get_field(fieldname) and value is not None:
if item.get(fieldname) is None or fieldname in force_item_fields:
if fieldname in ['item_tax_template','item_tax_rate'] and item.order_receiving_item and self.get("__islocal"):
if frappe.get_value("Order Receiving Item", item.order_receiving_item, 'item_tax_template'):
item.set(fieldname, value)
else:
item.set(fieldname, value)
elif fieldname in ["cost_center", "conversion_factor"] and not item.get(fieldname):
item.set(fieldname, value)
elif fieldname == "serial_no":
# Ensure that serial numbers are matched against Stock UOM
item_conversion_factor = item.get("conversion_factor") or 1.0
item_qty = abs(item.get("qty")) * item_conversion_factor
if item_qty != len(get_serial_nos(item.get("serial_no"))):
item.set(fieldname, value)
elif (
ret.get("pricing_rule_removed")
and value is not None
and fieldname
in [
"discount_percentage",
"discount_amount",
"rate",
"margin_rate_or_amount",
"margin_type",
"remove_free_item",
]
):
# reset pricing rule fields if pricing_rule_removed
item.set(fieldname, value)
if self.doctype in ["Purchase Invoice", "Sales Invoice"] and item.meta.get_field(
"is_fixed_asset"
):
item.set("is_fixed_asset", ret.get("is_fixed_asset", 0))
# Double check for cost center
# Items add via promotional scheme may not have cost center set
if hasattr(item, "cost_center") and not item.get("cost_center"):
item.set(
"cost_center", self.get("cost_center") or erpnext.get_default_cost_center(self.company)
)
if ret.get("pricing_rules"):
self.apply_pricing_rule_on_items(item, ret)
self.set_pricing_rule_details(item, ret)
if self.doctype == "Purchase Invoice":
self.set_expense_account(for_validate)
def update_p_r_c_tool_status(doc, method):
if doc.purchase_invoice_creation_tool:
prc_doc = frappe.get_doc("Purchase Invoice Creation Tool", doc.purchase_invoice_creation_tool)
prc_doc.status = "Receipt Created"
prc_doc.db_update()
def cancel_update_p_r_c_tool_status(doc, method):
if doc.purchase_invoice_creation_tool:
prc_doc = frappe.get_doc("Purchase Invoice Creation Tool", doc.purchase_invoice_creation_tool)
prc_doc.status = "Success"
prc_doc.db_update()
def update_md_status(doc, method):
lst = []
for res in doc.items:
if res.material_demand_item and res.material_demand:
if frappe.db.exists("Material Demand Item", res.material_demand_item):
mdi = frappe.get_doc("Material Demand Item", res.material_demand_item)
exit_qty = float(mdi.received_qty)
mdi.received_qty = exit_qty + float(res.qty)
mdi.db_update()
if res.material_demand not in lst:
lst.append(res.material_demand)
for mrd in lst:
md = frappe.get_doc("Material Demand", mrd)
total_qty = 0
for r in md.items:
total_qty += r.received_qty
per = round((total_qty / md.total_qty) * 100, 2)
md.per_received = per
if per < 99.99:
md.status = "Partially Received"
else:
md.status = "Received"
md.db_update()
def cancel_update_md_status(doc, method):
lst = []
for res in doc.items:
if res.material_demand_item and res.material_demand:
if frappe.db.exists("Material Demand Item", res.material_demand_item):
mdi = frappe.get_doc("Material Demand Item", res.material_demand_item)
exit_qty = float(mdi.received_qty)
mdi.received_qty = exit_qty - float(res.qty)
mdi.db_update()
if res.material_demand not in lst:
lst.append(res.material_demand)
for mrd in lst:
md = frappe.get_doc("Material Demand", mrd)
total_qty = 0
for r in md.items:
total_qty += r.received_qty
per = round((total_qty / md.total_qty) * 100, 2)
md.per_received = per
if per < 99.99:
md.status = "Partially Received"
else:
md.status = "Received"
md.db_update()
def calculate_item_level_tax_breakup(doc, method):
if doc:
itemised_tax, itemised_taxable_amount = get_itemised_tax_breakup_data(doc)
if itemised_tax:
for res in doc.items:
total = 0
if res.item_code in itemised_tax.keys():
for key in itemised_tax[res.item_code].keys():
if 'Sales Tax' in key:
res.sales_tax = flt(itemised_tax[res.item_code][key]['tax_amount']) if itemised_tax[res.item_code][key]['tax_amount'] else 0
total += flt(res.sales_tax)
if 'Further Tax' in key:
res.further_tax = flt(itemised_tax[res.item_code][key]['tax_amount']) if itemised_tax[res.item_code][key]['tax_amount'] else 0
total += flt(res.further_tax)
if 'Advance Tax' in key:
res.advance_tax = flt(itemised_tax[res.item_code][key]['tax_amount'])if itemised_tax[res.item_code][key]['tax_amount'] else 0
res.total_includetaxes = flt(res.sales_tax + res.further_tax + res.advance_tax) + res.amount
else:
for res in doc.items:
res.sales_tax = res.further_tax = res.advance_tax = res.total_includetaxes = 0
def calculate_line_level_tax(doc, method):
for res in doc.items:
if res.item_tax_template:
item_tax_template = frappe.get_doc('Item Tax Template', res.item_tax_template)
for tax in item_tax_template.taxes:
if 'Sales Tax' in tax.tax_type:
res.sales_tax = res.amount * (tax.tax_rate / 100)
if 'Further Tax' in tax.tax_type:
res.further_tax = res.amount * (tax.tax_rate / 100)
if 'Advance Tax' in tax.tax_type:
res.advance_tax = res.amount * (tax.tax_rate / 100)
res.total_includetaxes = flt(res.sales_tax + res.further_tax + res.advance_tax) + res.amount
def calculate_taxes(doc, method):
ord = frappe.get_doc('Order Receiving', doc.order_receiving)
if ord:
doc.set("taxes", [])
total = 0
i = 0
for d in ord.taxes:
if i == 0 and doc.total > 0:
i = 1
total = doc.total + (d.tax_amount/ord.total)*doc.total
else:
total += (d.tax_amount/ord.total)*doc.total
doc.append('taxes', {
'category': d.category,
'add_deduct_tax': d.add_deduct_tax,
'charge_type': d.charge_type,
'account_head': d.account_head,
'description': d.description,
'tax_amount': (d.tax_amount/ord.total)*doc.total,
'total': total
}) | 43.492126 | 154 | 0.543405 |
cbe63681074ec3cfb70720a32870fadb2114a5bf | 1,268 | py | Python | app/core/tests/test_models.py | ghoshrohit72/Recipe-API | d0946671a62c8ad0b38dd5f63fcc35e4953c0f99 | [
"MIT"
] | null | null | null | app/core/tests/test_models.py | ghoshrohit72/Recipe-API | d0946671a62c8ad0b38dd5f63fcc35e4953c0f99 | [
"MIT"
] | null | null | null | app/core/tests/test_models.py | ghoshrohit72/Recipe-API | d0946671a62c8ad0b38dd5f63fcc35e4953c0f99 | [
"MIT"
] | null | null | null | from django.test import TestCase
from django.contrib.auth import get_user_model
class ModelTests(TestCase):
def test_create_user_with_email_successful(self):
#check user email id is valid or not
email="rohitofficial95@outlook.com"
password="1234"
user = get_user_model().objects.create_user(
email=email,
password=password
)
self.assertEqual(user.email, email)
self.assertTrue(user.check_password(password))
def test_new_user_email_normalized(self):
"""Test the email for a new user is normalized"""
email= 'test@GMAIL.com'
user= get_user_model().objects.create_user(email,'test123')
self.assertEqual(user.email, email.lower())
def test_new_user_invalid_email(self):
"""Test user with no email gets error"""
with self.assertRaises(ValueError):
get_user_model().objects.create_user(None,'test123')
def test_create_new_superuser(self):
""" Test Creating new super user"""
user= get_user_model().objects.create_superuser(
'admin@gmail.com',
'test1234'
)
self.assertTrue(user.is_superuser)
self.assertTrue(user.is_staff)
| 27.565217 | 68 | 0.64511 |
ae3352cdabc2d171fbc5e383ba71a98b45bf98b2 | 578 | py | Python | t2s/src/text_preprocessing.py | grestonian/Sign-Language-Translator | 05467000aff9e85da04d1f8ff6e5a92cf30b060d | [
"MIT"
] | null | null | null | t2s/src/text_preprocessing.py | grestonian/Sign-Language-Translator | 05467000aff9e85da04d1f8ff6e5a92cf30b060d | [
"MIT"
] | null | null | null | t2s/src/text_preprocessing.py | grestonian/Sign-Language-Translator | 05467000aff9e85da04d1f8ff6e5a92cf30b060d | [
"MIT"
] | 1 | 2020-12-07T15:55:43.000Z | 2020-12-07T15:55:43.000Z | import spacy
from pprint import pprint
import string
import pickle
nlp = spacy.load("en_core_web_sm")
stop_words = {'a', 'am', 'are', 'the', 'am', 'to', 'for', 'of'}
with open('isl_dictionary', 'rb') as dic:
isl_dict = pickle.load(dic)
# print(isl_dict)
def preprocess_text(text):
# print(spacy.info())
res = []
doc = nlp(text)
for token in doc:
pprint((token.text, token.pos_, token.tag_, token.dep_,
token.shape_, token.is_alpha, token.is_stop), indent=4)
return res
text = 'we are waiting for gabriel'
# preprocess_text(text) | 23.12 | 67 | 0.648789 |
f3e99c6ca95b5644b7ee928570eea48a9f25fdc9 | 2,928 | py | Python | tests/conftest.py | avosper-intellaegis/runway | 757d4e7db269ec16479b044ac82a69f25fa2a450 | [
"Apache-2.0"
] | 134 | 2018-02-26T21:35:23.000Z | 2022-03-03T00:30:27.000Z | tests/conftest.py | asksmruti/runway | 8aca76df9372e3d13eb35e12f81758f618e89e74 | [
"Apache-2.0"
] | 937 | 2018-03-08T22:04:35.000Z | 2022-03-30T12:21:47.000Z | tests/conftest.py | asksmruti/runway | 8aca76df9372e3d13eb35e12f81758f618e89e74 | [
"Apache-2.0"
] | 70 | 2018-02-26T23:48:11.000Z | 2022-03-02T18:44:30.000Z | """Pytest configuration, fixtures, and plugins."""
# pylint: disable=redefined-outer-name
from __future__ import annotations
import os
from pathlib import Path
from typing import TYPE_CHECKING, Generator, Iterator
import pytest
from .factories import cli_runner_factory
if TYPE_CHECKING:
from _pytest.config import Config
from _pytest.config.argparsing import Parser
from _pytest.fixtures import SubRequest
from click.testing import CliRunner
from pytest import TempPathFactory
def pytest_configure(config: Config) -> None:
"""Configure pytest."""
config.addinivalue_line( # cspell:ignore addinivalue
"markers",
"cli_runner(charset:='utf-8', env=None, echo_stdin=False, mix_stderr=True): "
"Pass kwargs to `click.testing.CliRunner` initialization.",
)
def pytest_addoption(parser: Parser) -> None:
"""Add pytest CLI options."""
parser.addoption(
"--functional",
action="store_true",
default=False,
help="run only functional tests",
)
parser.addoption(
"--integration",
action="store_true",
default=False,
help="include integration tests in regular testing",
)
parser.addoption(
"--integration-only",
action="store_true",
default=False,
help="run only integration tests",
)
@pytest.fixture(scope="function")
def cli_runner(request: SubRequest) -> CliRunner:
"""Initialize instance of `click.testing.CliRunner`."""
return cli_runner_factory(request)
@pytest.fixture(scope="function")
def cli_runner_isolated(cli_runner: CliRunner) -> Generator[CliRunner, None, None]:
"""Initialize instance of `click.testing.CliRunner` with `isolate_filesystem()` called."""
with cli_runner.isolated_filesystem():
yield cli_runner
@pytest.fixture(scope="function")
def cd_tmp_path(tmp_path: Path) -> Iterator[Path]:
"""Change directory to a temporary path.
Returns:
Path: Temporary path object.
"""
prev_dir = Path.cwd()
os.chdir(tmp_path)
try:
yield tmp_path
finally:
os.chdir(prev_dir)
@pytest.fixture(scope="function")
def root_dir() -> Path:
"""Return a path object to the root directory."""
return Path(__file__).parent.parent
@pytest.fixture(scope="session", autouse=True)
def sanitize_environment() -> None:
"""Remove variables from the environment that could interfere with tests."""
env_vars = [
"CI",
"DEBUG",
"DEPLOY_ENVIRONMENT",
"CFNGIN_STACK_POLL_TIME",
"RUNWAY_MAX_CONCURRENT_MODULES",
"RUNWAY_MAX_CONCURRENT_REGIONS",
]
for var in env_vars:
os.environ.pop(var, None)
@pytest.fixture(scope="session")
def tfenv_dir(tmp_path_factory: TempPathFactory) -> Path:
"""Directory for storing tfenv between tests."""
return tmp_path_factory.mktemp(".tfenv", numbered=True)
| 27.622642 | 94 | 0.681011 |
4c2b09964fadeda985de08868f2eb8009fba4603 | 3,269 | py | Python | models/pointnet2_part_seg_msg_one_hot.py | zazgf/Tensorflow1.15-pointNet- | a21425c0066c83215d91e1e810418161f2500125 | [
"MIT"
] | 1 | 2021-08-20T10:45:10.000Z | 2021-08-20T10:45:10.000Z | models/pointnet2_part_seg_msg_one_hot.py | zazgf/Tensorflow1.15-pointNet | a21425c0066c83215d91e1e810418161f2500125 | [
"MIT"
] | null | null | null | models/pointnet2_part_seg_msg_one_hot.py | zazgf/Tensorflow1.15-pointNet | a21425c0066c83215d91e1e810418161f2500125 | [
"MIT"
] | null | null | null | import os
import sys
BASE_DIR = os.path.dirname(__file__)
sys.path.append(BASE_DIR)
sys.path.append(os.path.join(BASE_DIR, '../utils'))
import tensorflow as tf
import numpy as np
import tf_util
from pointnet_util import pointnet_sa_module, pointnet_sa_module_msg, pointnet_fp_module
def placeholder_inputs(batch_size, num_point):
pointclouds_pl = tf.compat.v1.placeholder(tf.float32, shape=(batch_size, num_point, 6))
labels_pl = tf.compat.v1.placeholder(tf.int32, shape=(batch_size, num_point))
cls_labels_pl = tf.compat.v1.placeholder(tf.int32, shape=(batch_size))
return pointclouds_pl, labels_pl, cls_labels_pl
NUM_CATEGORIES = 16
def get_model(point_cloud, cls_label, is_training, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
l0_xyz = tf.slice(point_cloud, [0,0,0], [-1,-1,3])
l0_points = tf.slice(point_cloud, [0,0,3], [-1,-1,3])
# Set abstraction layers
l1_xyz, l1_points = pointnet_sa_module_msg(l0_xyz, l0_points, 512, [0.1,0.2,0.4], [32,64,128], [[32,32,64], [64,64,128], [64,96,128]], is_training, bn_decay, scope='layer1')
l2_xyz, l2_points = pointnet_sa_module_msg(l1_xyz, l1_points, 128, [0.4,0.8], [64,128], [[128,128,256],[128,196,256]], is_training, bn_decay, scope='layer2')
l3_xyz, l3_points, l3_indices = pointnet_sa_module(l2_xyz, l2_points, npoint=None, radius=None, nsample=None, mlp=[256,512,1024], mlp2=None, group_all=True, is_training=is_training, bn_decay=bn_decay, scope='layer3')
# Feature propagation layers
l2_points = pointnet_fp_module(l2_xyz, l3_xyz, l2_points, l3_points, [256,256], is_training, bn_decay, scope='fa_layer1')
l1_points = pointnet_fp_module(l1_xyz, l2_xyz, l1_points, l2_points, [256,128], is_training, bn_decay, scope='fa_layer2')
cls_label_one_hot = tf.one_hot(cls_label, depth=NUM_CATEGORIES, on_value=1.0, off_value=0.0)
cls_label_one_hot = tf.reshape(cls_label_one_hot, [batch_size, 1, NUM_CATEGORIES])
cls_label_one_hot = tf.tile(cls_label_one_hot, [1,num_point,1])
l0_points = pointnet_fp_module(l0_xyz, l1_xyz, tf.concat([cls_label_one_hot, l0_xyz, l0_points],axis=-1), l1_points, [128,128], is_training, bn_decay, scope='fp_layer3')
# FC layers
net = tf_util.conv1d(l0_points, 128, 1, padding='VALID', bn=True, is_training=is_training, scope='fc1', bn_decay=bn_decay)
end_points['feats'] = net
net = tf_util.dropout(net, keep_prob=0.5, is_training=is_training, scope='dp1')
net = tf_util.conv1d(net, 50, 1, padding='VALID', activation_fn=None, scope='fc2')
return net, end_points
def get_loss(pred, label):
""" pred: BxNxC,
label: BxN, """
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=pred, labels=label)
classify_loss = tf.reduce_mean(loss)
tf.summary.scalar('classify loss', classify_loss)
tf.add_to_collection('losses', classify_loss)
return classify_loss
if __name__=='__main__':
with tf.Graph().as_default():
inputs = tf.zeros((32,2048,6))
cls_labels = tf.zeros((32),dtype=tf.int32)
output, ep = get_model(inputs, cls_labels, tf.constant(True))
print(output)
| 48.791045 | 220 | 0.722545 |
eef2909a7b5f075cb46feef98377db0144e23f9e | 3,543 | py | Python | script.module.fantastic/lib/resources/lib/sources/gr/gamatotv.py | TheWardoctor/wardoctors-repo | 893f646d9e27251ffc00ca5f918e4eb859a5c8f0 | [
"Apache-2.0"
] | 1 | 2019-03-05T09:38:10.000Z | 2019-03-05T09:38:10.000Z | script.module.fantastic/lib/resources/lib/sources/gr/gamatotv.py | TheWardoctor/wardoctors-repo | 893f646d9e27251ffc00ca5f918e4eb859a5c8f0 | [
"Apache-2.0"
] | null | null | null | script.module.fantastic/lib/resources/lib/sources/gr/gamatotv.py | TheWardoctor/wardoctors-repo | 893f646d9e27251ffc00ca5f918e4eb859a5c8f0 | [
"Apache-2.0"
] | 1 | 2021-11-05T22:16:08.000Z | 2021-11-05T22:16:08.000Z | # -*- coding: utf-8 -*-
'''
Fantastic Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib, urlparse, re
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import source_utils
from resources.lib.modules import dom_parser
from resources.lib.modules import trakt
from resources.lib.modules import tvmaze
class source:
def __init__(self):
self.priority = 1
self.language = ['gr']
self.domains = ['gamatotv.me']
self.base_link = 'http://gamatotv.me/'
self.search_link = '/groups/group/search?q=%s'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = self.__search([localtitle] + source_utils.aliases_to_array(aliases), year)
if not url and title != localtitle: url = self.__search([title] + source_utils.aliases_to_array(aliases),year)
if not url: url = self.__search(self.search_link + trakt.getMovieTranslation(imdb, 'el'), year)
return url
except:
return
def __search(self, titles, year):
try:
query = self.search_link % (urllib.quote_plus(cleantitle.getsearch(titles[0]+' '+year)))
query = urlparse.urljoin(self.base_link, query)
t = [cleantitle.get(i) for i in set(titles) if i][0]
r = client.request(query)
r = client.parseDOM(r, 'div', attrs={'class': 'bd'})
for i in r:
r = dom_parser.parse_dom(i, 'h3')
r = dom_parser.parse_dom(r, 'a')
title = r[0][1]
y = re.findall('(\d{4})', title, re.DOTALL)[0]
title = cleantitle.get(title.split('(')[0])
if title in t and year == y:
return source_utils.strip_domain(r[0][0]['href'])
return
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
query = urlparse.urljoin(self.base_link, url)
r = client.request(query)
links = client.parseDOM(r, 'div', attrs={'class': 'xg_user_generated'})
links = dom_parser.parse_dom(links, 'a')
for i in links:
url = i[0]['href']
if 'youtube' in url: continue
quality = 'SD'
lang, info = 'gr', 'SUB'
valid, host = source_utils.is_host_valid(url, hostDict)
if 'hdvid' in host: valid = True
if not valid: continue
sources.append({'source': host, 'quality': quality, 'language': lang, 'url': url, 'info': info,
'direct':False,'debridonly': False})
return sources
except:
return sources
def resolve(self, url):
return url | 34.735294 | 122 | 0.586509 |
683d318d2afceb214ebf5bd82464aa014255a891 | 1,334 | py | Python | feedjack/urls.py | elpaso/feedjack | 5c1eb2f58fbf2c9dcb120a73f2b4a4aa0d911a2b | [
"BSD-3-Clause"
] | null | null | null | feedjack/urls.py | elpaso/feedjack | 5c1eb2f58fbf2c9dcb120a73f2b4a4aa0d911a2b | [
"BSD-3-Clause"
] | null | null | null | feedjack/urls.py | elpaso/feedjack | 5c1eb2f58fbf2c9dcb120a73f2b4a4aa0d911a2b | [
"BSD-3-Clause"
] | 1 | 2020-01-15T08:58:53.000Z | 2020-01-15T08:58:53.000Z | # -*- coding: utf-8 -*-
"""
feedjack
Gustavo Picón
urls.py
"""
from django.conf.urls import patterns
from django.views.generic.base import RedirectView
from feedjack import views
urlpatterns = patterns('',
(r'^rss20.xml$', RedirectView.as_view(url='/feed/rss/')),
(r'^feed/$', RedirectView,
{'url':'/feed/atom/'}),
(r'^feed/rss/$', views.rssfeed),
(r'^feed/atom/$', views.atomfeed),
(r'^feed/user/(?P<user>\d+)/tag/(?P<tag>.*)/$', RedirectView.as_view(url='/feed/atom/user/%(user)s/tag/%(tag)s/')),
(r'^feed/user/(?P<user>\d+)/$', RedirectView.as_view(url='/feed/atom/user/%(user)s/')),
(r'^feed/tag/(?P<tag>.*)/$', RedirectView.as_view(url='/feed/atom/tag/%(tag)s/')),
(r'^feed/atom/user/(?P<user>\d+)/tag/(?P<tag>.*)/$', views.atomfeed),
(r'^feed/atom/user/(?P<user>\d+)/$', views.atomfeed),
(r'^feed/atom/tag/(?P<tag>.*)/$', views.atomfeed),
(r'^feed/rss/user/(?P<user>\d+)/tag/(?P<tag>.*)/$', views.rssfeed),
(r'^feed/rss/user/(?P<user>\d+)/$', views.rssfeed),
(r'^feed/rss/tag/(?P<tag>.*)/$', views.rssfeed),
(r'^user/(?P<user>\d+)/tag/(?P<tag>.*)/$', views.mainview),
(r'^user/(?P<user>\d+)/$', views.mainview),
(r'^tag/(?P<tag>.*)/$', views.mainview),
(r'^opml/$', views.opml),
(r'^foaf/$', views.foaf),
(r'^$', views.mainview),
)
#~
| 30.318182 | 119 | 0.558471 |
b5597c216167416f214b30d574a6d85dd43dda29 | 7,268 | py | Python | benchmarks/f3_wrong_hints_permutations/scaling_ltl_infinite_state/7-extending_bound_29.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/f3_wrong_hints_permutations/scaling_ltl_infinite_state/7-extending_bound_29.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/f3_wrong_hints_permutations/scaling_ltl_infinite_state/7-extending_bound_29.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z | from typing import Tuple, FrozenSet
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> Tuple[Iterable, msat_term,
msat_term, msat_term]:
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
bool_type = msat_get_bool_type(menv)
real_type = msat_get_rational_type(menv)
i = msat_declare_function(menv, "i", real_type)
i = msat_make_constant(menv, i)
r = msat_declare_function(menv, "r", real_type)
r = msat_make_constant(menv, r)
l = msat_declare_function(menv, "l", real_type)
l = msat_make_constant(menv, l)
inc_i = msat_declare_function(menv, "inc_i", bool_type)
inc_i = msat_make_constant(menv, inc_i)
x_i = msat_declare_function(menv, name_next("i"), real_type)
x_i = msat_make_constant(menv, x_i)
x_r = msat_declare_function(menv, name_next("r"), real_type)
x_r = msat_make_constant(menv, x_r)
x_l = msat_declare_function(menv, name_next("l"), real_type)
x_l = msat_make_constant(menv, x_l)
x_inc_i = msat_declare_function(menv, name_next("inc_i"), bool_type)
x_inc_i = msat_make_constant(menv, x_inc_i)
curr2next = {i: x_i, r: x_r, l: x_l, inc_i: x_inc_i}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
r_gt_0 = msat_make_gt(menv, r, zero)
r_lt_l = msat_make_lt(menv, r, l)
i_geq_0 = msat_make_geq(menv, i, zero)
init = msat_make_and(menv, r_gt_0, r_lt_l)
init = msat_make_and(menv, init,
msat_make_and(menv, i_geq_0,
msat_make_not(menv, inc_i)))
init = msat_make_and(menv, init, msat_make_gt(menv, l, zero))
# r' = r
trans = msat_make_equal(menv, x_r, r)
# i < l -> ((inc_i' & i' = i + 1) | (!inc_i' & i' = i)) & l' = l
i_lt_l = msat_make_lt(menv, i, l)
x_i_eq_i_p_1 = msat_make_and(menv, x_inc_i,
msat_make_equal(menv, x_i,
msat_make_plus(menv, i, one)))
x_i_eq_i = msat_make_and(menv, msat_make_not(menv, x_inc_i),
msat_make_equal(menv, x_i, i))
x_i_eq_i_p_1_or_i = msat_make_or(menv, x_i_eq_i_p_1, x_i_eq_i)
x_l_eq_l = msat_make_equal(menv, x_l, l)
x_i_eq_i_p_1_or_i_and_x_l_eq_l = msat_make_and(menv, x_i_eq_i_p_1_or_i,
x_l_eq_l)
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_lt_l,
x_i_eq_i_p_1_or_i_and_x_l_eq_l))
# i >= l -> i' = 0 & l' = l + 1 & !inc_i'
i_geq_l = msat_make_geq(menv, i, l)
x_i_eq_0 = msat_make_equal(menv, x_i, zero)
x_l_eq_l_p_1 = msat_make_equal(menv, x_l, msat_make_plus(menv, l, one))
x_i_eq_0_and_x_l_eq_l_p_1 = msat_make_and(menv,
msat_make_and(menv, x_i_eq_0,
x_l_eq_l_p_1),
msat_make_not(menv, x_inc_i))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_geq_l,
x_i_eq_0_and_x_l_eq_l_p_1))
# (G F inc_i) -> ! G F r > i
G_F_x_i_gt_i = enc.make_G(enc.make_F(inc_i))
r_gt_i = msat_make_gt(menv, r, i)
n_G_F_r_gt_i = msat_make_not(menv, enc.make_G(enc.make_F(r_gt_i)))
ltl = msat_make_impl(menv, G_F_x_i_gt_i, n_G_F_r_gt_i)
return TermMap(curr2next), init, trans, ltl
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
i = mgr.Symbol("i", types.REAL)
r = mgr.Symbol("r", types.REAL)
l = mgr.Symbol("l", types.REAL)
inc_i = mgr.Symbol("inc_i", types.BOOL)
symbs = frozenset([i, r, l, inc_i])
x_i = symb_to_next(mgr, i)
x_r = symb_to_next(mgr, r)
x_l = symb_to_next(mgr, l)
x_inc_i = symb_to_next(mgr, inc_i)
res = []
n0 = mgr.Real(0)
n1 = mgr.Real(1)
loc0 = Location(env, mgr.GE(l, n0))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l2", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1])
res.append(h_l)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.GE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Plus(i, n1)))
h_i = Hint("h_i0", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc0 = Location(env, mgr.GE(r, n0))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r2", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1])
res.append(h_r)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.LE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Minus(i, n1)))
h_i = Hint("h_i1", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc0 = Location(env, mgr.GE(r, n0))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(2, mgr.Equals(x_r, mgr.Plus(r, n1)))
loc2 = Location(env, mgr.GE(r, n0))
loc2.set_progress(0, mgr.Equals(x_r, r))
h_r = Hint("h_r4", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1, loc2])
res.append(h_r)
loc0 = Location(env, mgr.GE(l, n0))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(2, mgr.Equals(x_l, l))
loc2 = Location(env, mgr.GE(l, n0))
loc2.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l4", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1, loc2])
res.append(h_l)
loc = Location(env, mgr.GE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Plus(l, n1)))
h_l = Hint("h_l0", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
return frozenset(res)
| 36.893401 | 89 | 0.629196 |
532876415b0e26318b817696f8d4476fc49ecf64 | 5,777 | py | Python | pyreportjasper/jasperpy.py | HamburgerJungeJr/pyreportjasper | 131f4371be3b297a779473812e164f29295f6a07 | [
"MIT"
] | null | null | null | pyreportjasper/jasperpy.py | HamburgerJungeJr/pyreportjasper | 131f4371be3b297a779473812e164f29295f6a07 | [
"MIT"
] | null | null | null | pyreportjasper/jasperpy.py | HamburgerJungeJr/pyreportjasper | 131f4371be3b297a779473812e164f29295f6a07 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2017 Jadson Bonfim Ribeiro <contato@jadsonbr.com.br>
#
import os
import subprocess
import re
import xml.etree.ElementTree as ET
FORMATS = (
'pdf',
'rtf',
'xls',
'xlsx',
'docx',
'odt',
'ods',
'pptx',
'csv',
'html',
'xhtml',
'xml',
'jrprint',
)
EXECUTABLE = 'jasperstarter'
class JasperPy:
def __init__(self, resource_dir=False):
self.path_executable = os.path.dirname(os.path.abspath(__file__)) \
+ '/jasperstarter/bin'
self.windows = True if os.name == 'nt' else False
self._command = ''
if not resource_dir:
resource_dir = os.path.dirname(os.path.abspath(__file__)) \
+ '/jasperstarter/bin'
else:
if not os.path.exists(resource_dir):
raise NameError('Invalid resource directory!')
# Path to report resource dir or jar file
self.resource_directory = resource_dir
def compile(self, input_file, output_file=False):
if not input_file:
raise NameError('No input file!')
command = self.path_executable + '/' + EXECUTABLE
command += ' compile '
command += "\"%s\"" % input_file
if output_file:
command += ' -o ' + "\"%s\"" % output_file
self._command = command
return self.execute()
def process(self, input_file, output_file=False, format_list=['pdf'],
parameters={}, db_connection={}, locale='pt_BR', resource=""):
if not input_file:
raise NameError('No input file!')
if isinstance(format_list, list):
if any([key not in FORMATS for key in format_list]):
raise NameError('Invalid format!')
else:
raise NameError("'format_list' value is not list!")
command = self.path_executable + '/' + EXECUTABLE
command += " --locale %s" % locale
command += ' process '
command += "\"%s\"" % input_file
if output_file:
command += ' -o ' + "\"%s\"" % output_file
command += ' -f ' + ' '.join(format_list)
if len(parameters) > 0:
command += ' -P '
for key, value in parameters.items():
param = key + '="' + value + '" '
command += " " + param + " "
if len(db_connection) > 0:
command += ' -t ' + db_connection['driver']
if 'username' in db_connection:
command += " -u " + db_connection['username']
if 'password' in db_connection:
command += ' -p ' + db_connection['password']
if 'host' in db_connection:
command += ' -H ' + db_connection['host']
if 'database' in db_connection:
command += ' -n ' + db_connection['database']
if 'port' in db_connection:
command += ' --db-port ' + db_connection['port']
if 'jdbc_driver' in db_connection:
command += ' --db-driver ' + db_connection['jdbc_driver']
if 'jdbc_url' in db_connection:
command += ' --db-url ' + db_connection['jdbc_url']
if 'jdbc_dir' in db_connection:
command += ' --jdbc-dir ' + db_connection['jdbc_dir']
if 'db_sid' in db_connection:
command += ' --db-sid ' + db_connection['db_sid']
if 'xml_xpath' in db_connection:
command += ' --xml-xpath ' + db_connection['xml_xpath']
if 'data_file' in db_connection:
command += ' --data-file ' + db_connection['data_file']
if 'json_query' in db_connection:
command += ' --json-query ' + db_connection['json_query']
if 'jsonql_query' in db_connection:
command += ' --jsonql-query ' + db_connection['jsonql_query']
if resource != "":
if (resource == "."):
command += " -r "
else:
command += " -r " + resource
self._command = command
return self.execute()
@staticmethod
def list_parameters(input_xml):
if not input_xml:
raise NameError('No input file!')
f = open(input_xml, 'r')
f_content = f.read()
f.close()
xmlstring = re.sub(' xmlns="[^"]+"', '', f_content, count=1)
param_dic = {}
tree = ET.fromstring(xmlstring)
for item in tree.findall(
'parameter'):
if item.get('name'):
param_dic.update({item.get('name'): [item.get('class')]})
if list(item):
param_dic[item.get('name')].append(list(item)[0].text)
else:
param_dic[item.get('name')].append('')
return param_dic
@property
def command(self):
return self._command
def execute(self, run_as_user=False):
if run_as_user and (not self.windows):
self._command = 'su -u ' + run_as_user + " -c \"" + \
self.command + "\""
if os.path.isdir(self.path_executable):
try:
output = subprocess.run(
self.command, shell=True, check=True, encoding='utf-8', stderr=subprocess.PIPE)
except AttributeError:
output = subprocess.check_call(self.command, shell=True)
except subprocess.CalledProcessError as e:
raise NameError('Your report has an error and couldn\'t be processed!\n' + e.stderr)
else:
raise NameError('Invalid resource directory!')
return output.returncode
| 30.088542 | 100 | 0.525359 |
1aecfb03f2a098e9e5ac7a43ae9fee5bb3dd912d | 1,475 | py | Python | tests/hunting/test_cvehunting.py | maorkuriel/kube-hunter | e0bacd6e7bd5e728ad3c6010904574adb1687dc5 | [
"Apache-2.0"
] | 1 | 2019-09-25T12:31:33.000Z | 2019-09-25T12:31:33.000Z | tests/hunting/test_cvehunting.py | maorkuriel/kube-hunter | e0bacd6e7bd5e728ad3c6010904574adb1687dc5 | [
"Apache-2.0"
] | null | null | null | tests/hunting/test_cvehunting.py | maorkuriel/kube-hunter | e0bacd6e7bd5e728ad3c6010904574adb1687dc5 | [
"Apache-2.0"
] | null | null | null | import time
import requests_mock
from src.core.events import handler
from src.core.events.types import K8sVersionDisclosure
from src.modules.hunting.cves import K8sClusterCveHunter, ServerApiVersionEndPointAccessPE, ServerApiVersionEndPointAccessDos
cve_counter = 0
def test_K8sCveHunter():
global cve_counter
# because the hunter unregisters itself, we manually remove this option, so we can test it
K8sClusterCveHunter.__new__ = lambda self, cls: object.__new__(self)
e = K8sVersionDisclosure(version="1.10.1", from_endpoint="/version")
h = K8sClusterCveHunter(e)
h.execute()
time.sleep(0.01)
assert cve_counter == 2
cve_counter = 0
# test complex version
e = K8sVersionDisclosure(version="1.10.1-gke-1", from_endpoint="/version")
h = K8sClusterCveHunter(e)
h.execute()
time.sleep(0.01)
assert cve_counter == 2
cve_counter = 0
# test complex version
e = K8sVersionDisclosure(version="v1.13.6-gke.13", from_endpoint="/version")
h = K8sClusterCveHunter(e)
h.execute()
time.sleep(0.01)
assert cve_counter == 0
cve_counter = 0
@handler.subscribe(ServerApiVersionEndPointAccessPE)
class test_CVE_2018_1002105(object):
def __init__(self, event):
global cve_counter
cve_counter += 1
@handler.subscribe(ServerApiVersionEndPointAccessDos)
class test_CVE_2019_1002100(object):
def __init__(self, event):
global cve_counter
cve_counter += 1 | 28.365385 | 125 | 0.727458 |
09d36c04c5f137c3b31774e8311f2d1498b30cbb | 619 | py | Python | examples/zeromq_receiver.py | msabramo/diesel | a1ed7ed0358d0fec8592e23aafc3b7ec167ab649 | [
"BSD-3-Clause"
] | 224 | 2015-01-03T06:33:05.000Z | 2021-11-22T03:19:02.000Z | examples/zeromq_receiver.py | dowski/diesel | d9824e467805caf40e0ba21b88a27db38e64c352 | [
"BSD-3-Clause"
] | 12 | 2015-01-01T03:35:15.000Z | 2021-05-22T23:37:28.000Z | examples/zeromq_receiver.py | dowski/diesel | d9824e467805caf40e0ba21b88a27db38e64c352 | [
"BSD-3-Clause"
] | 37 | 2015-01-04T01:47:55.000Z | 2022-03-03T02:04:15.000Z | from diesel import quickstart, quickstop, sleep
from diesel.protocols.zeromq import DieselZMQSocket, zctx, zmq
import time
def get_messages():
outsock = DieselZMQSocket(zctx.socket(zmq.DEALER), bind="tcp://127.0.0.1:5000")
t = time.time()
for x in xrange(500000):
msg = outsock.recv()
assert msg == "yo dawg %s" % x
if x % 1000 == 0:
sleep()
delt = time.time() - t
print "500000 messages in %ss (%.1f/s)" % (delt, 500000.0 / delt)
quickstop()
def tick():
while True:
print "Other diesel stuff"
sleep(1)
quickstart(get_messages, tick)
| 24.76 | 83 | 0.612278 |
46ac063bbc8983a5349cb4240567eae80164a2ec | 1,549 | py | Python | nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnssrvrec_args.py | benfinke/ns_python | d651d7aa01d7dc63c1cd435c7b3314d7f5b26659 | [
"Apache-2.0"
] | 1 | 2015-04-05T21:21:26.000Z | 2015-04-05T21:21:26.000Z | nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnssrvrec_args.py | benfinke/ns_python | d651d7aa01d7dc63c1cd435c7b3314d7f5b26659 | [
"Apache-2.0"
] | 1 | 2017-01-20T22:56:58.000Z | 2017-01-20T22:56:58.000Z | nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnssrvrec_args.py | benfinke/ns_python | d651d7aa01d7dc63c1cd435c7b3314d7f5b26659 | [
"Apache-2.0"
] | 6 | 2015-04-21T13:14:08.000Z | 2020-12-03T07:27:52.000Z | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class dnssrvrec_args :
ur""" Provides additional arguments required for fetching the dnssrvrec resource.
"""
def __init__(self) :
self._type = ""
@property
def type(self) :
ur"""Type of records to display. Available settings function as follows:
* ADNS - Display all authoritative address records.
* PROXY - Display all proxy address records.
* ALL - Display all address records.<br/>Possible values = ALL, ADNS, PROXY.
"""
try :
return self._type
except Exception as e:
raise e
@type.setter
def type(self, type) :
ur"""Type of records to display. Available settings function as follows:
* ADNS - Display all authoritative address records.
* PROXY - Display all proxy address records.
* ALL - Display all address records.<br/>Possible values = ALL, ADNS, PROXY
"""
try :
self._type = type
except Exception as e:
raise e
class Type:
ALL = "ALL"
ADNS = "ADNS"
PROXY = "PROXY"
| 29.226415 | 82 | 0.707553 |
6c2303731040cbfef1209578429e4516f0e2880d | 763 | py | Python | src/assignments/assignment11/good_customer.py | acc-cosc-1336/cosc-1336-spring-2018-Aronpond | b37a6be8c0b909859ccf5ac2ce5eaf82c4ba741b | [
"MIT"
] | null | null | null | src/assignments/assignment11/good_customer.py | acc-cosc-1336/cosc-1336-spring-2018-Aronpond | b37a6be8c0b909859ccf5ac2ce5eaf82c4ba741b | [
"MIT"
] | null | null | null | src/assignments/assignment11/good_customer.py | acc-cosc-1336/cosc-1336-spring-2018-Aronpond | b37a6be8c0b909859ccf5ac2ce5eaf82c4ba741b | [
"MIT"
] | null | null | null | #create import statement for Customer class
from src.assignments.assignment11.customer import Customer
'''
Create a GoodCustomer class that inherits from Customer class with parameters first_name, last_name, and phone_number.
Call the Customer(superclass) constructor method and pass all the parameters including self to the method.
In the constructor method create a new class attribute named discount and set value to .10 .
Create a get_discount_rate method that returns the attribute discount.
'''
class GoodCustomer:
def __init__(self, first_name, last_name, phone_number):
Customer.__init__(self, first_name, last_name, phone_number)
self.discount = .10
def get_discount_rate():
return self.discount
| 40.157895 | 119 | 0.760157 |
165c52408e97054f59d3b70cf74e46104f53a45b | 5,275 | py | Python | tensorflow_probability/python/math/__init__.py | wataruhashimoto52/probability | 12e3f256544eadea6e863868da825614f4423eb0 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/math/__init__.py | wataruhashimoto52/probability | 12e3f256544eadea6e863868da825614f4423eb0 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/math/__init__.py | wataruhashimoto52/probability | 12e3f256544eadea6e863868da825614f4423eb0 | [
"Apache-2.0"
] | 1 | 2020-06-04T23:26:31.000Z | 2020-06-04T23:26:31.000Z | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""TensorFlow Probability math functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_probability.python.math import ode
from tensorflow_probability.python.math import psd_kernels
from tensorflow_probability.python.math.custom_gradient import custom_gradient
from tensorflow_probability.python.math.diag_jacobian import diag_jacobian
from tensorflow_probability.python.math.generic import log1mexp
from tensorflow_probability.python.math.generic import log_add_exp
from tensorflow_probability.python.math.generic import log_combinations
from tensorflow_probability.python.math.generic import log_cosh
from tensorflow_probability.python.math.generic import log_cumsum_exp
from tensorflow_probability.python.math.generic import log_sub_exp
from tensorflow_probability.python.math.generic import reduce_logmeanexp
from tensorflow_probability.python.math.generic import reduce_weighted_logsumexp
from tensorflow_probability.python.math.generic import smootherstep
from tensorflow_probability.python.math.generic import soft_sorting_matrix
from tensorflow_probability.python.math.generic import soft_threshold
from tensorflow_probability.python.math.generic import softplus_inverse
from tensorflow_probability.python.math.gradient import value_and_gradient
from tensorflow_probability.python.math.interpolation import batch_interp_regular_1d_grid
from tensorflow_probability.python.math.interpolation import batch_interp_regular_nd_grid
from tensorflow_probability.python.math.interpolation import interp_regular_1d_grid
from tensorflow_probability.python.math.linalg import cholesky_concat
from tensorflow_probability.python.math.linalg import cholesky_update
from tensorflow_probability.python.math.linalg import fill_triangular
from tensorflow_probability.python.math.linalg import fill_triangular_inverse
from tensorflow_probability.python.math.linalg import lu_matrix_inverse
from tensorflow_probability.python.math.linalg import lu_reconstruct
from tensorflow_probability.python.math.linalg import lu_solve
from tensorflow_probability.python.math.linalg import pivoted_cholesky
from tensorflow_probability.python.math.linalg import sparse_or_dense_matmul
from tensorflow_probability.python.math.linalg import sparse_or_dense_matvecmul
from tensorflow_probability.python.math.minimize import minimize
from tensorflow_probability.python.math.minimize import MinimizeTraceableQuantities
from tensorflow_probability.python.math.numeric import clip_by_value_preserve_gradient
from tensorflow_probability.python.math.numeric import log1psquare
from tensorflow_probability.python.math.random_ops import random_rademacher
from tensorflow_probability.python.math.random_ops import random_rayleigh
from tensorflow_probability.python.math.root_search import secant_root
from tensorflow_probability.python.math.scan_associative import scan_associative
from tensorflow_probability.python.math.sparse import dense_to_sparse
from tensorflow_probability.python.math.special import lambertw
from tensorflow_probability.python.math.special import lambertw_winitzki_approx
from tensorflow_probability.python.math.special import lbeta
from tensorflow_probability.python.math.special import log_gamma_correction
from tensorflow_probability.python.math.special import log_gamma_difference
from tensorflow.python.util.all_util import remove_undocumented # pylint: disable=g-direct-tensorflow-import
_allowed_symbols = [
'batch_interp_regular_1d_grid',
'batch_interp_regular_nd_grid',
'cholesky_concat',
'cholesky_update',
'clip_by_value_preserve_gradient',
'custom_gradient',
'dense_to_sparse',
'diag_jacobian',
'fill_triangular',
'fill_triangular_inverse',
'interp_regular_1d_grid',
'lambertw',
'lambertw_winitzki_approx',
'lbeta',
'log1mexp',
'log1psquare',
'log_add_exp',
'log_combinations',
'log_cosh',
'log_cumsum_exp',
'log_gamma_correction',
'log_gamma_difference',
'log_sub_exp',
'lu_matrix_inverse',
'lu_reconstruct',
'lu_solve',
'minimize',
'MinimizeTraceableQuantities',
'ode',
'pivoted_cholesky',
'psd_kernels',
'random_rademacher',
'random_rayleigh',
'reduce_logmeanexp',
'reduce_weighted_logsumexp',
'scan_associative',
'secant_root',
'smootherstep',
'soft_sorting_matrix',
'soft_threshold',
'softplus_inverse',
'sparse_or_dense_matmul',
'sparse_or_dense_matvecmul',
'value_and_gradient',
]
remove_undocumented(__name__, _allowed_symbols)
| 45.474138 | 109 | 0.823128 |
6d20f52b0f6d0f859022bdbf0af934b251bdd00f | 517 | py | Python | tests/unit/test_backup_common.py | cuducos/alchemydumps | 3475fa0a907e2a2b4134433bdce47a5ad52802b3 | [
"MIT"
] | 86 | 2015-02-02T14:51:55.000Z | 2022-03-17T02:43:43.000Z | tests/unit/test_backup_common.py | cuducos/alchemydumps | 3475fa0a907e2a2b4134433bdce47a5ad52802b3 | [
"MIT"
] | 27 | 2015-01-20T14:12:25.000Z | 2021-09-13T16:57:56.000Z | tests/unit/test_backup_common.py | cuducos/alchemydumps | 3475fa0a907e2a2b4134433bdce47a5ad52802b3 | [
"MIT"
] | 42 | 2015-01-21T02:50:35.000Z | 2022-03-17T02:43:27.000Z | from unittest import TestCase
from flask_alchemydumps.backup import CommonTools
class TestBackup(TestCase):
def setUp(self):
self.backup = CommonTools()
def test_get_timestamp(self):
name = "BRA-19940717123000-ITA.gz"
self.assertEqual("19940717123000", self.backup.get_timestamp(name))
def test_parse_timestamp(self):
timestamp = "19940717123000"
expected = "Jul 17, 1994 at 12:30:00"
self.assertEqual(expected, self.backup.parse_timestamp(timestamp))
| 28.722222 | 75 | 0.70793 |
c88d50164688dba5a11b450c6164e29c982be180 | 1,837 | py | Python | rurusetto/users/migrations/0017_auto_20210811_1237.py | siddhantdixit/rurusetto | ed3aad56d20fbdc15e9ab7d2b77335de65009b7f | [
"MIT"
] | 19 | 2021-05-09T12:05:40.000Z | 2022-03-02T19:26:36.000Z | rurusetto/users/migrations/0017_auto_20210811_1237.py | siddhantdixit/rurusetto | ed3aad56d20fbdc15e9ab7d2b77335de65009b7f | [
"MIT"
] | 121 | 2021-05-04T19:18:13.000Z | 2022-03-21T22:11:25.000Z | rurusetto/users/migrations/0017_auto_20210811_1237.py | siddhantdixit/rurusetto | ed3aad56d20fbdc15e9ab7d2b77335de65009b7f | [
"MIT"
] | 12 | 2021-05-03T04:44:02.000Z | 2021-10-19T05:40:08.000Z | # Generated by Django 3.2.5 on 2021-08-11 12:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0016_auto_20210811_0523'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='about_me',
field=models.TextField(blank=True, default='Hello there!', max_length=120),
),
migrations.AlterField(
model_name='profile',
name='discord',
field=models.CharField(blank=True, default='', max_length=50),
),
migrations.AlterField(
model_name='profile',
name='interests',
field=models.CharField(blank=True, default='', max_length=50),
),
migrations.AlterField(
model_name='profile',
name='location',
field=models.CharField(blank=True, default='', max_length=50),
),
migrations.AlterField(
model_name='profile',
name='occupation',
field=models.CharField(blank=True, default='', max_length=50),
),
migrations.AlterField(
model_name='profile',
name='osu_id',
field=models.IntegerField(blank=True, default=0),
),
migrations.AlterField(
model_name='profile',
name='osu_username',
field=models.CharField(blank=True, default='', max_length=50),
),
migrations.AlterField(
model_name='profile',
name='twitter',
field=models.CharField(blank=True, default='', max_length=50),
),
migrations.AlterField(
model_name='profile',
name='website',
field=models.URLField(blank=True, default=''),
),
]
| 31.135593 | 87 | 0.553076 |
8a17d392b297fc83c99a90dabd4586a35628de35 | 1,165 | py | Python | python/set_global_step.py | MarkTakken/KataGo | 0d469e01c14f0a0056649276a683064ed9c5a508 | [
"MIT"
] | null | null | null | python/set_global_step.py | MarkTakken/KataGo | 0d469e01c14f0a0056649276a683064ed9c5a508 | [
"MIT"
] | 1 | 2022-03-03T00:44:18.000Z | 2022-03-03T01:31:59.000Z | python/set_global_step.py | MarkTakken/KataGoTF2MacOS | 7fdbf6b49cd0c56c18e9f2f107a14905ad1761b1 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import sys
import os
import argparse
import tensorflow as tf
import numpy as np
tf.compat.v1.disable_eager_execution()
description = """
Hacky script to set the value of global_step in a tensorflow checkpoint
"""
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-checkpoint', help='Checkpoint file path without the .ckpt or the .meta', required=True)
parser.add_argument('-new-value', help='New value to set to', type=int, required=True)
parser.add_argument('-output', help='Output new checkpoint to here', required=True)
args = vars(parser.parse_args())
checkpoint_path = args["checkpoint"]
new_value = args["new_value"]
output_path = args["output"]
with tf.compat.v1.Session() as sess:
for var_name, shape in tf.train.list_variables(checkpoint_path):
var = tf.train.load_variable(checkpoint_path, var_name)
if var_name == "global_step":
var = tf.Variable(new_value, trainable=False, name=var_name, dtype=tf.int64)
else:
var = tf.Variable(var,name=var_name)
saver = tf.compat.v1.train.Saver()
sess.run(tf.compat.v1.global_variables_initializer())
saver.save(sess, output_path)
| 33.285714 | 109 | 0.75279 |
82a46134529c9dbddda762e243ccfd99e5c0ccb5 | 11,744 | py | Python | fewrel/fewshot_re_kit/sentence_encoder.py | bloomberg/emnlp21_fewrel | 894bee155196f99116e2ff12a01e2a942f861242 | [
"MIT"
] | 5 | 2021-11-04T20:55:42.000Z | 2022-02-26T08:03:23.000Z | fewrel/fewshot_re_kit/sentence_encoder.py | bloomberg/emnlp21_fewrel | 894bee155196f99116e2ff12a01e2a942f861242 | [
"MIT"
] | null | null | null | fewrel/fewshot_re_kit/sentence_encoder.py | bloomberg/emnlp21_fewrel | 894bee155196f99116e2ff12a01e2a942f861242 | [
"MIT"
] | 4 | 2021-11-09T05:13:06.000Z | 2022-02-26T13:10:37.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import numpy as np
import os
from torch import optim
from . import network
from transformers import BertTokenizer, BertModel, BertForMaskedLM, BertForSequenceClassification, RobertaModel, RobertaTokenizer, RobertaForSequenceClassification
class CNNSentenceEncoder(nn.Module):
def __init__(self, word_vec_mat, word2id, max_length, word_embedding_dim=50,
pos_embedding_dim=5, hidden_size=230):
nn.Module.__init__(self)
self.hidden_size = hidden_size
self.max_length = max_length
self.embedding = network.embedding.Embedding(word_vec_mat, max_length,
word_embedding_dim, pos_embedding_dim)
self.encoder = network.encoder.Encoder(max_length, word_embedding_dim,
pos_embedding_dim, hidden_size)
self.word2id = word2id
def forward(self, inputs):
x = self.embedding(inputs)
x = self.encoder(x)
return x
def tokenize(self, raw_tokens, pos_head, pos_tail):
# token -> index
indexed_tokens = []
for token in raw_tokens:
token = token.lower()
if token in self.word2id:
indexed_tokens.append(self.word2id[token])
else:
indexed_tokens.append(self.word2id['[UNK]'])
# padding
while len(indexed_tokens) < self.max_length:
indexed_tokens.append(self.word2id['[PAD]'])
indexed_tokens = indexed_tokens[:self.max_length]
# pos
pos1 = np.zeros((self.max_length), dtype=np.int32)
pos2 = np.zeros((self.max_length), dtype=np.int32)
pos1_in_index = min(self.max_length, pos_head[0])
pos2_in_index = min(self.max_length, pos_tail[0])
for i in range(self.max_length):
pos1[i] = i - pos1_in_index + self.max_length
pos2[i] = i - pos2_in_index + self.max_length
# mask
mask = np.zeros((self.max_length), dtype=np.int32)
mask[:len(indexed_tokens)] = 1
return indexed_tokens, pos1, pos2, mask
class BERTSentenceEncoder(nn.Module):
def __init__(self, pretrain_path, max_length, cat_entity_rep=False, mask_entity=False):
nn.Module.__init__(self)
self.bert = BertModel.from_pretrained(pretrain_path)
self.max_length = max_length
self.tokenizer = BertTokenizer.from_pretrained(pretrain_path)
self.cat_entity_rep = cat_entity_rep
self.mask_entity = mask_entity
def forward(self, inputs):
if not self.cat_entity_rep:
_, x = self.bert(inputs['word'], attention_mask=inputs['mask'])
return x
else:
outputs = self.bert(inputs['word'], attention_mask=inputs['mask'])
tensor_range = torch.arange(inputs['word'].size()[0])
h_state = outputs[0][tensor_range, inputs["pos1"]]
t_state = outputs[0][tensor_range, inputs["pos2"]]
state = torch.cat((h_state, t_state), -1)
return state
def tokenize(self, raw_tokens, pos_head, pos_tail):
# token -> index
tokens = ['[CLS]']
cur_pos = 0
pos1_in_index = 1
pos2_in_index = 1
for token in raw_tokens:
token = token.lower()
if cur_pos == pos_head[0]:
tokens.append('[unused0]')
pos1_in_index = len(tokens)
if cur_pos == pos_tail[0]:
tokens.append('[unused1]')
pos2_in_index = len(tokens)
if self.mask_entity and ((pos_head[0] <= cur_pos and cur_pos <= pos_head[-1]) or (pos_tail[0] <= cur_pos and cur_pos <= pos_tail[-1])):
tokens += ['[unused4]']
else:
tokens += self.tokenizer.tokenize(token)
if cur_pos == pos_head[-1]:
tokens.append('[unused2]')
if cur_pos == pos_tail[-1]:
tokens.append('[unused3]')
cur_pos += 1
indexed_tokens = self.tokenizer.convert_tokens_to_ids(tokens)
# padding
while len(indexed_tokens) < self.max_length:
indexed_tokens.append(0)
indexed_tokens = indexed_tokens[:self.max_length]
# pos
pos1 = np.zeros((self.max_length), dtype=np.int32)
pos2 = np.zeros((self.max_length), dtype=np.int32)
for i in range(self.max_length):
pos1[i] = i - pos1_in_index + self.max_length
pos2[i] = i - pos2_in_index + self.max_length
# mask
mask = np.zeros((self.max_length), dtype=np.int32)
mask[:len(tokens)] = 1
pos1_in_index = min(self.max_length, pos1_in_index)
pos2_in_index = min(self.max_length, pos2_in_index)
return indexed_tokens, pos1_in_index - 1, pos2_in_index - 1, mask
class BERTPAIRSentenceEncoder(nn.Module):
def __init__(self, pretrain_path, max_length):
nn.Module.__init__(self)
self.bert = BertForSequenceClassification.from_pretrained(
pretrain_path,
num_labels=2)
self.max_length = max_length
self.tokenizer = BertTokenizer.from_pretrained(pretrain_path)
def forward(self, inputs):
x = self.bert(inputs['word'], token_type_ids=inputs['seg'], attention_mask=inputs['mask'])[0]
return x
def tokenize(self, raw_tokens, pos_head, pos_tail):
# token -> index
# tokens = ['[CLS]']
tokens = []
cur_pos = 0
pos1_in_index = 0
pos2_in_index = 0
for token in raw_tokens:
token = token.lower()
if cur_pos == pos_head[0]:
tokens.append('[unused0]')
pos1_in_index = len(tokens)
if cur_pos == pos_tail[0]:
tokens.append('[unused1]')
pos2_in_index = len(tokens)
tokens += self.tokenizer.tokenize(token)
if cur_pos == pos_head[-1]:
tokens.append('[unused2]')
if cur_pos == pos_tail[-1]:
tokens.append('[unused3]')
cur_pos += 1
indexed_tokens = self.tokenizer.convert_tokens_to_ids(tokens)
return indexed_tokens
class RobertaSentenceEncoder(nn.Module):
def __init__(self, pretrain_path, max_length, cat_entity_rep=False):
nn.Module.__init__(self)
self.roberta = RobertaModel.from_pretrained(pretrain_path)
self.max_length = max_length
self.tokenizer = RobertaTokenizer.from_pretrained(pretrain_path)
self.cat_entity_rep = cat_entity_rep
def forward(self, inputs):
if not self.cat_entity_rep:
_, x = self.roberta(inputs['word'], attention_mask=inputs['mask'])
return x
else:
outputs = self.roberta(inputs['word'], attention_mask=inputs['mask'])
tensor_range = torch.arange(inputs['word'].size()[0])
h_state = outputs[0][tensor_range, inputs["pos1"]]
t_state = outputs[0][tensor_range, inputs["pos2"]]
state = torch.cat((h_state, t_state), -1)
return state
def tokenize(self, raw_tokens, pos_head, pos_tail):
def getIns(bped, bpeTokens, tokens, L):
resL = 0
tkL = " ".join(tokens[:L])
bped_tkL = " ".join(self.tokenizer.tokenize(tkL))
if bped.find(bped_tkL) == 0:
resL = len(bped_tkL.split())
else:
tkL += " "
bped_tkL = " ".join(self.tokenizer.tokenize(tkL))
if bped.find(bped_tkL) == 0:
resL = len(bped_tkL.split())
else:
raise Exception("Cannot locate the position")
return resL
s = " ".join(raw_tokens)
sst = self.tokenizer.tokenize(s)
headL = pos_head[0]
headR = pos_head[-1] + 1
hiL = getIns(" ".join(sst), sst, raw_tokens, headL)
hiR = getIns(" ".join(sst), sst, raw_tokens, headR)
tailL = pos_tail[0]
tailR = pos_tail[-1] + 1
tiL = getIns(" ".join(sst), sst, raw_tokens, tailL)
tiR = getIns(" ".join(sst), sst, raw_tokens, tailR)
E1b = 'madeupword0000'
E1e = 'madeupword0001'
E2b = 'madeupword0002'
E2e = 'madeupword0003'
ins = [(hiL, E1b), (hiR, E1e), (tiL, E2b), (tiR, E2e)]
ins = sorted(ins)
pE1 = 0
pE2 = 0
pE1_ = 0
pE2_ = 0
for i in range(0, 4):
sst.insert(ins[i][0] + i, ins[i][1])
if ins[i][1] == E1b:
pE1 = ins[i][0] + i
elif ins[i][1] == E2b:
pE2 = ins[i][0] + i
elif ins[i][1] == E1e:
pE1_ = ins[i][0] + i
else:
pE2_ = ins[i][0] + i
pos1_in_index = pE1 + 1
pos2_in_index = pE2 + 1
sst = ['<s>'] + sst
indexed_tokens = self.tokenizer.convert_tokens_to_ids(sst)
# padding
while len(indexed_tokens) < self.max_length:
indexed_tokens.append(1)
indexed_tokens = indexed_tokens[:self.max_length]
# pos
pos1 = np.zeros((self.max_length), dtype=np.int32)
pos2 = np.zeros((self.max_length), dtype=np.int32)
for i in range(self.max_length):
pos1[i] = i - pos1_in_index + self.max_length
pos2[i] = i - pos2_in_index + self.max_length
# mask
mask = np.zeros((self.max_length), dtype=np.int32)
mask[:len(sst)] = 1
pos1_in_index = min(self.max_length, pos1_in_index)
pos2_in_index = min(self.max_length, pos2_in_index)
return indexed_tokens, pos1_in_index - 1, pos2_in_index - 1, mask
class RobertaPAIRSentenceEncoder(nn.Module):
def __init__(self, pretrain_path, max_length):
nn.Module.__init__(self)
self.roberta = RobertaForSequenceClassification.from_pretrained(
pretrain_path,
num_labels=2)
self.max_length = max_length
self.tokenizer = RobertaTokenizer.from_pretrained(pretrain_path)
def forward(self, inputs):
x = self.roberta(inputs['word'], attention_mask=inputs['mask'])[0]
return x
def tokenize(self, raw_tokens, pos_head, pos_tail):
def getIns(bped, bpeTokens, tokens, L):
resL = 0
tkL = " ".join(tokens[:L])
bped_tkL = " ".join(self.tokenizer.tokenize(tkL))
if bped.find(bped_tkL) == 0:
resL = len(bped_tkL.split())
else:
tkL += " "
bped_tkL = " ".join(self.tokenizer.tokenize(tkL))
if bped.find(bped_tkL) == 0:
resL = len(bped_tkL.split())
else:
raise Exception("Cannot locate the position")
return resL
s = " ".join(raw_tokens)
sst = self.tokenizer.tokenize(s)
headL = pos_head[0]
headR = pos_head[-1] + 1
hiL = getIns(" ".join(sst), sst, raw_tokens, headL)
hiR = getIns(" ".join(sst), sst, raw_tokens, headR)
tailL = pos_tail[0]
tailR = pos_tail[-1] + 1
tiL = getIns(" ".join(sst), sst, raw_tokens, tailL)
tiR = getIns(" ".join(sst), sst, raw_tokens, tailR)
E1b = 'madeupword0000'
E1e = 'madeupword0001'
E2b = 'madeupword0002'
E2e = 'madeupword0003'
ins = [(hiL, E1b), (hiR, E1e), (tiL, E2b), (tiR, E2e)]
ins = sorted(ins)
for i in range(0, 4):
sst.insert(ins[i][0] + i, ins[i][1])
indexed_tokens = self.tokenizer.convert_tokens_to_ids(sst)
return indexed_tokens
| 37.164557 | 163 | 0.574166 |
76554c5f71c2e91d78c896873ae492588c884246 | 26,900 | py | Python | azure/durable_functions/models/DurableOrchestrationClient.py | sebastianburckhardt/azure-functions-durable-python | 634f70887e415f0ff9e7ee1e2fb3f58f90112772 | [
"MIT"
] | null | null | null | azure/durable_functions/models/DurableOrchestrationClient.py | sebastianburckhardt/azure-functions-durable-python | 634f70887e415f0ff9e7ee1e2fb3f58f90112772 | [
"MIT"
] | null | null | null | azure/durable_functions/models/DurableOrchestrationClient.py | sebastianburckhardt/azure-functions-durable-python | 634f70887e415f0ff9e7ee1e2fb3f58f90112772 | [
"MIT"
] | null | null | null | import json
from datetime import datetime
from typing import List, Any, Optional, Dict, Union
from time import time
from asyncio import sleep
from urllib.parse import urlparse, quote
import azure.functions as func
from .PurgeHistoryResult import PurgeHistoryResult
from .DurableOrchestrationStatus import DurableOrchestrationStatus
from .RpcManagementOptions import RpcManagementOptions
from .OrchestrationRuntimeStatus import OrchestrationRuntimeStatus
from ..models.DurableOrchestrationBindings import DurableOrchestrationBindings
from .utils.http_utils import get_async_request, post_async_request, delete_async_request
from .utils.entity_utils import EntityId
from azure.functions._durable_functions import _serialize_custom_object
class DurableOrchestrationClient:
"""Durable Orchestration Client.
Client for starting, querying, terminating and raising events to
orchestration instances.
"""
def __init__(self, context: str):
self.task_hub_name: str
self._uniqueWebHookOrigins: List[str]
self._event_name_placeholder: str = "{eventName}"
self._function_name_placeholder: str = "{functionName}"
self._instance_id_placeholder: str = "[/{instanceId}]"
self._reason_placeholder: str = "{text}"
self._created_time_from_query_key: str = "createdTimeFrom"
self._created_time_to_query_key: str = "createdTimeTo"
self._runtime_status_query_key: str = "runtimeStatus"
self._show_history_query_key: str = "showHistory"
self._show_history_output_query_key: str = "showHistoryOutput"
self._show_input_query_key: str = "showInput"
self._orchestration_bindings: DurableOrchestrationBindings = \
DurableOrchestrationBindings.from_json(context)
self._post_async_request = post_async_request
self._get_async_request = get_async_request
self._delete_async_request = delete_async_request
async def start_new(self,
orchestration_function_name: str,
instance_id: Optional[str] = None,
client_input: Optional[Any] = None) -> str:
"""Start a new instance of the specified orchestrator function.
If an orchestration instance with the specified ID already exists, the
existing instance will be silently replaced by this new instance.
Parameters
----------
orchestration_function_name : str
The name of the orchestrator function to start.
instance_id : Optional[str]
The ID to use for the new orchestration instance. If no instance id is specified,
the Durable Functions extension will generate a random GUID (recommended).
client_input : Optional[Any]
JSON-serializable input value for the orchestrator function.
Returns
-------
str
The ID of the new orchestration instance if successful, None if not.
"""
request_url = self._get_start_new_url(
instance_id=instance_id, orchestration_function_name=orchestration_function_name)
response: List[Any] = await self._post_async_request(
request_url, self._get_json_input(client_input))
status_code: int = response[0]
if status_code <= 202 and response[1]:
return response[1]["id"]
elif status_code == 400:
# Orchestrator not found, report clean exception
exception_data: Dict[str, str] = response[1]
exception_message = exception_data["ExceptionMessage"]
raise Exception(exception_message)
else:
# Catch all: simply surfacing the durable-extension exception
# we surface the stack trace too, since this may be a more involed exception
ex_message: Any = response[1]
raise Exception(ex_message)
def create_check_status_response(
self, request: func.HttpRequest, instance_id: str) -> func.HttpResponse:
"""Create a HttpResponse that contains useful information for \
checking the status of the specified instance.
Parameters
----------
request : HttpRequest
The HTTP request that triggered the current orchestration instance.
instance_id : str
The ID of the orchestration instance to check.
Returns
-------
HttpResponse
An HTTP 202 response with a Location header
and a payload containing instance management URLs
"""
http_management_payload = self.get_client_response_links(request, instance_id)
response_args = {
"status_code": 202,
"body": json.dumps(http_management_payload),
"headers": {
"Content-Type": "application/json",
"Location": http_management_payload["statusQueryGetUri"],
"Retry-After": "10",
},
}
return func.HttpResponse(**response_args)
def create_http_management_payload(self, instance_id: str) -> Dict[str, str]:
"""Create a dictionary of orchestrator management urls.
Parameters
----------
instance_id : str
The ID of the orchestration instance to check.
Returns
-------
Dict[str, str]
a dictionary object of orchestrator instance management urls
"""
return self.get_client_response_links(None, instance_id)
def get_client_response_links(
self,
request: Optional[func.HttpRequest], instance_id: str) -> Dict[str, str]:
"""Create a dictionary of orchestrator management urls.
Parameters
----------
request : Optional[HttpRequest]
The HTTP request that triggered the current orchestration instance.
instance_id : str
The ID of the orchestration instance to check.
Returns
-------
Dict[str, str]
a dictionary object of orchestrator instance management urls
"""
payload = self._orchestration_bindings.management_urls.copy()
for key, _ in payload.items():
if not(request is None) and request.url:
payload[key] = self._replace_url_origin(request.url, payload[key])
payload[key] = payload[key].replace(
self._orchestration_bindings.management_urls["id"], instance_id)
return payload
async def raise_event(
self, instance_id: str, event_name: str, event_data: Any = None,
task_hub_name: str = None, connection_name: str = None) -> None:
"""Send an event notification message to a waiting orchestration instance.
In order to handle the event, the target orchestration instance must be
waiting for an event named `eventName` using waitForExternalEvent API.
Parameters
----------
instance_id : str
The ID of the orchestration instance that will handle the event.
event_name : str
The name of the event.
event_data : Any, optional
The JSON-serializable data associated with the event.
task_hub_name : str, optional
The TaskHubName of the orchestration that will handle the event.
connection_name : str, optional
The name of the connection string associated with `taskHubName.`
Raises
------
ValueError
event name must be a valid string.
Exception
Raises an exception if the status code is 404 or 400 when raising the event.
"""
if event_name == "":
raise ValueError("event_name must be a non-empty string.")
request_url = self._get_raise_event_url(
instance_id, event_name, task_hub_name, connection_name)
response = await self._post_async_request(request_url, json.dumps(event_data))
switch_statement = {
202: lambda: None,
410: lambda: f"Instance with ID {instance_id} is gone: either completed or failed",
404: lambda: f"No instance with ID {instance_id} found.",
400: lambda: "Only application/json request content is supported"
}
has_error_message = switch_statement.get(
response[0], lambda: f"Webhook returned unrecognized status code {response[0]}")
error_message = has_error_message()
if error_message:
raise Exception(error_message)
async def get_status(self, instance_id: str, show_history: bool = False,
show_history_output: bool = False,
show_input: bool = False) -> DurableOrchestrationStatus:
"""Get the status of the specified orchestration instance.
Parameters
----------
instance_id : str
The ID of the orchestration instance to query.
show_history: bool
Boolean marker for including execution history in the response.
show_history_output: bool
Boolean marker for including output in the execution history response.
show_input: bool
Boolean marker for including the input in the response.
Returns
-------
DurableOrchestrationStatus
The status of the requested orchestration instance
"""
options = RpcManagementOptions(instance_id=instance_id, show_history=show_history,
show_history_output=show_history_output,
show_input=show_input)
request_url = options.to_url(self._orchestration_bindings.rpc_base_url)
response = await self._get_async_request(request_url)
switch_statement = {
200: lambda: None, # instance completed
202: lambda: None, # instance in progress
400: lambda: None, # instance failed or terminated
404: lambda: None, # instance not found or pending
500: lambda: None # instance failed with unhandled exception
}
has_error_message = switch_statement.get(
response[0],
lambda: f"The operation failed with an unexpected status code {response[0]}")
error_message = has_error_message()
if error_message:
raise Exception(error_message)
else:
return DurableOrchestrationStatus.from_json(response[1])
async def get_status_all(self) -> List[DurableOrchestrationStatus]:
"""Get the status of all orchestration instances.
Returns
-------
DurableOrchestrationStatus
The status of the requested orchestration instances
"""
options = RpcManagementOptions()
request_url = options.to_url(self._orchestration_bindings.rpc_base_url)
response = await self._get_async_request(request_url)
switch_statement = {
200: lambda: None, # instance completed
}
has_error_message = switch_statement.get(
response[0],
lambda: f"The operation failed with an unexpected status code {response[0]}")
error_message = has_error_message()
if error_message:
raise Exception(error_message)
else:
statuses: List[Any] = response[1]
return [DurableOrchestrationStatus.from_json(o) for o in statuses]
async def get_status_by(self, created_time_from: datetime = None,
created_time_to: datetime = None,
runtime_status: List[OrchestrationRuntimeStatus] = None) \
-> List[DurableOrchestrationStatus]:
"""Get the status of all orchestration instances that match the specified conditions.
Parameters
----------
created_time_from : datetime
Return orchestration instances which were created after this Date.
created_time_to: datetime
Return orchestration instances which were created before this Date.
runtime_status: List[OrchestrationRuntimeStatus]
Return orchestration instances which match any of the runtimeStatus values
in this list.
Returns
-------
DurableOrchestrationStatus
The status of the requested orchestration instances
"""
# TODO: do we really want folks to us this without specifying all the args?
options = RpcManagementOptions(created_time_from=created_time_from,
created_time_to=created_time_to,
runtime_status=runtime_status)
request_url = options.to_url(self._orchestration_bindings.rpc_base_url)
response = await self._get_async_request(request_url)
switch_statement = {
200: lambda: None, # instance completed
}
has_error_message = switch_statement.get(
response[0],
lambda: f"The operation failed with an unexpected status code {response[0]}")
error_message = has_error_message()
if error_message:
raise Exception(error_message)
else:
return [DurableOrchestrationStatus.from_json(o) for o in response[1]]
async def purge_instance_history(self, instance_id: str) -> PurgeHistoryResult:
"""Delete the history of the specified orchestration instance.
Parameters
----------
instance_id : str
The ID of the orchestration instance to delete.
Returns
-------
PurgeHistoryResult
The results of the request to delete the orchestration instance
"""
request_url = f"{self._orchestration_bindings.rpc_base_url}instances/{instance_id}"
response = await self._delete_async_request(request_url)
return self._parse_purge_instance_history_response(response)
async def purge_instance_history_by(
self, created_time_from: Optional[datetime] = None,
created_time_to: Optional[datetime] = None,
runtime_status: Optional[List[OrchestrationRuntimeStatus]] = None) \
-> PurgeHistoryResult:
"""Delete the history of all orchestration instances that match the specified conditions.
Parameters
----------
created_time_from : Optional[datetime]
Delete orchestration history which were created after this Date.
created_time_to: Optional[datetime]
Delete orchestration history which were created before this Date.
runtime_status: Optional[List[OrchestrationRuntimeStatus]]
Delete orchestration instances which match any of the runtimeStatus values
in this list.
Returns
-------
PurgeHistoryResult
The results of the request to purge history
"""
options = RpcManagementOptions(created_time_from=created_time_from,
created_time_to=created_time_to,
runtime_status=runtime_status)
request_url = options.to_url(self._orchestration_bindings.rpc_base_url)
response = await self._delete_async_request(request_url)
return self._parse_purge_instance_history_response(response)
async def terminate(self, instance_id: str, reason: str) -> None:
"""Terminate the specified orchestration instance.
Parameters
----------
instance_id : str
The ID of the orchestration instance to query.
reason: str
The reason for terminating the instance.
Raises
------
Exception:
When the terminate call failed with an unexpected status code
Returns
-------
None
"""
request_url = f"{self._orchestration_bindings.rpc_base_url}instances/{instance_id}/" \
f"terminate?reason={quote(reason)}"
response = await self._post_async_request(request_url, None)
switch_statement = {
202: lambda: None, # instance in progress
410: lambda: None, # instance failed or terminated
404: lambda: lambda: f"No instance with ID '{instance_id}' found.",
}
has_error_message = switch_statement.get(
response[0],
lambda: f"The operation failed with an unexpected status code {response[0]}")
error_message = has_error_message()
if error_message:
raise Exception(error_message)
async def wait_for_completion_or_create_check_status_response(
self, request, instance_id: str, timeout_in_milliseconds: int = 10000,
retry_interval_in_milliseconds: int = 1000) -> func.HttpResponse:
"""Create an HTTP response.
The response either contains a payload of management URLs for a non-completed instance or
contains the payload containing the output of the completed orchestration.
If the orchestration does not complete within the specified timeout, then the HTTP response
will be identical to that of [[createCheckStatusResponse]].
Parameters
----------
request
The HTTP request that triggered the current function.
instance_id:
The unique ID of the instance to check.
timeout_in_milliseconds:
Total allowed timeout for output from the durable function.
The default value is 10 seconds.
retry_interval_in_milliseconds:
The timeout between checks for output from the durable function.
The default value is 1 second.
"""
if retry_interval_in_milliseconds > timeout_in_milliseconds:
raise Exception(f'Total timeout {timeout_in_milliseconds} (ms) should be bigger than '
f'retry timeout {retry_interval_in_milliseconds} (ms)')
checking = True
start_time = time()
while checking:
status = await self.get_status(instance_id)
if status:
switch_statement = {
OrchestrationRuntimeStatus.Completed:
lambda: self._create_http_response(200, status.output),
OrchestrationRuntimeStatus.Canceled:
lambda: self._create_http_response(200, status.to_json()),
OrchestrationRuntimeStatus.Terminated:
lambda: self._create_http_response(200, status.to_json()),
OrchestrationRuntimeStatus.Failed:
lambda: self._create_http_response(500, status.to_json()),
}
result = switch_statement.get(status.runtime_status)
if result:
return result()
elapsed = time() - start_time
elapsed_in_milliseconds = elapsed * 1000
if elapsed_in_milliseconds < timeout_in_milliseconds:
remaining_time = timeout_in_milliseconds - elapsed_in_milliseconds
sleep_time = retry_interval_in_milliseconds \
if remaining_time > retry_interval_in_milliseconds else remaining_time
sleep_time /= 1000
await sleep(sleep_time)
else:
return self.create_check_status_response(request, instance_id)
async def signal_entity(self, entityId: EntityId, operation_name: str,
operation_input: Optional[Any] = None,
task_hub_name: Optional[str] = None,
connection_name: Optional[str] = None) -> None:
"""Signals an entity to perform an operation.
Parameters
----------
entityId : EntityId
The EntityId of the targeted entity to perform operation.
operation_name: str
The name of the operation.
operation_input: Optional[Any]
The content for the operation.
task_hub_name: Optional[str]
The task hub name of the target entity.
connection_name: Optional[str]
The name of the connection string associated with [task_hub_name].
Raises
------
Exception:
When the signal entity call failed with an unexpected status code
Returns
-------
None
"""
options = RpcManagementOptions(operation_name=operation_name,
connection_name=connection_name,
task_hub_name=task_hub_name,
entity_Id=entityId)
request_url = options.to_url(self._orchestration_bindings.rpc_base_url)
response = await self._post_async_request(
request_url,
json.dumps(operation_input) if operation_input else None)
switch_statement = {
202: lambda: None # signal accepted
}
has_error_message = switch_statement.get(
response[0],
lambda: f"The operation failed with an unexpected status code {response[0]}")
error_message = has_error_message()
if error_message:
raise Exception(error_message)
@staticmethod
def _create_http_response(
status_code: int, body: Union[str, Any]) -> func.HttpResponse:
body_as_json = body if isinstance(body, str) else json.dumps(body)
response_args = {
"status_code": status_code,
"body": body_as_json,
"mimetype": "application/json",
"headers": {
"Content-Type": "application/json",
}
}
return func.HttpResponse(**response_args)
@staticmethod
def _get_json_input(client_input: object) -> Optional[str]:
"""Serialize the orchestrator input.
Parameters
----------
client_input: object
The client's input, which we need to serialize
Returns
-------
Optional[str]
If `client_input` is not None, return a string representing
the JSON-serialization of `client_input`. Otherwise, returns
None
Exceptions
----------
TypeError
If the JSON serialization failed, see `serialize_custom_object`
"""
if client_input is not None:
return json.dumps(client_input, default=_serialize_custom_object)
return None
@staticmethod
def _replace_url_origin(request_url: str, value_url: str) -> str:
request_parsed_url = urlparse(request_url)
value_parsed_url = urlparse(value_url)
request_url_origin = '{url.scheme}://{url.netloc}/'.format(url=request_parsed_url)
value_url_origin = '{url.scheme}://{url.netloc}/'.format(url=value_parsed_url)
value_url = value_url.replace(value_url_origin, request_url_origin)
return value_url
@staticmethod
def _parse_purge_instance_history_response(
response: List[Any]) -> PurgeHistoryResult:
switch_statement = {
200: lambda: PurgeHistoryResult.from_json(response[1]), # instance completed
404: lambda: PurgeHistoryResult(instancesDeleted=0), # instance not found
}
switch_result = switch_statement.get(
response[0],
lambda: f"The operation failed with an unexpected status code {response[0]}")
result = switch_result()
if isinstance(result, PurgeHistoryResult):
return result
else:
raise Exception(result)
def _get_start_new_url(
self, instance_id: Optional[str], orchestration_function_name: str) -> str:
instance_path = f'/{instance_id}' if instance_id is not None else ''
request_url = f'{self._orchestration_bindings.rpc_base_url}orchestrators/' \
f'{orchestration_function_name}{instance_path}'
return request_url
def _get_raise_event_url(
self, instance_id: str, event_name: str,
task_hub_name: Optional[str], connection_name: Optional[str]) -> str:
request_url = f'{self._orchestration_bindings.rpc_base_url}' \
f'instances/{instance_id}/raiseEvent/{event_name}'
query: List[str] = []
if task_hub_name:
query.append(f'taskHub={task_hub_name}')
if connection_name:
query.append(f'connection={connection_name}')
if len(query) > 0:
request_url += "?" + "&".join(query)
return request_url
async def rewind(self,
instance_id: str,
reason: str,
task_hub_name: Optional[str] = None,
connection_name: Optional[str] = None):
"""Return / "rewind" a failed orchestration instance to a prior "healthy" state.
Parameters
----------
instance_id: str
The ID of the orchestration instance to rewind.
reason: str
The reason for rewinding the orchestration instance.
task_hub_name: Optional[str]
The TaskHub of the orchestration to rewind
connection_name: Optional[str]
Name of the application setting containing the storage
connection string to use.
Raises
------
Exception:
In case of a failure, it reports the reason for the exception
"""
request_url: str = ""
if self._orchestration_bindings.rpc_base_url:
path = f"instances/{instance_id}/rewind?reason={reason}"
query: List[str] = []
if not (task_hub_name is None):
query.append(f"taskHub={task_hub_name}")
if not (connection_name is None):
query.append(f"connection={connection_name}")
if len(query) > 0:
path += "&" + "&".join(query)
request_url = f"{self._orchestration_bindings.rpc_base_url}" + path
else:
raise Exception("The Python SDK only supports RPC endpoints."
+ "Please remove the `localRpcEnabled` setting from host.json")
response = await self._post_async_request(request_url, None)
status: int = response[0]
if status == 200 or status == 202:
return
elif status == 404:
ex_msg = f"No instance with ID {instance_id} found."
raise Exception(ex_msg)
elif status == 410:
ex_msg = "The rewind operation is only supported on failed orchestration instances."
raise Exception(ex_msg)
else:
ex_msg = response[1]
raise Exception(ex_msg)
| 41.131498 | 99 | 0.625204 |
91bc6c743565ac959f741de2f66b11d35de892ca | 10,810 | py | Python | src/lib/datasets/sample/multi_pose_8.20.py | zf020114/CHPDet | b96542aac05c974f9e025ab679cb7e75bc56e268 | [
"MIT"
] | 10 | 2021-04-10T07:03:53.000Z | 2022-02-14T07:13:34.000Z | src/lib/datasets/sample/multi_pose_8.20.py | zf020114/CHPDet | b96542aac05c974f9e025ab679cb7e75bc56e268 | [
"MIT"
] | 3 | 2021-12-02T07:49:56.000Z | 2022-03-22T07:49:11.000Z | src/lib/datasets/sample/multi_pose_8.20.py | zf020114/CHPDet | b96542aac05c974f9e025ab679cb7e75bc56e268 | [
"MIT"
] | 1 | 2021-12-12T06:26:55.000Z | 2021-12-12T06:26:55.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch.utils.data as data
import numpy as np
import torch
import json
import cv2
import os
from utils.image import flip, color_aug
from utils.image import get_affine_transform, affine_transform
from utils.image import gaussian_radius, draw_umich_gaussian, draw_msra_gaussian
from utils.image import draw_dense_reg
from utils.debugger import Debugger
import math
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from opts import opts
opt = opts().parse()
class MultiPoseDataset(data.Dataset):
def _coco_box_to_bbox(self, box):
bbox = np.array([box[0], box[1], box[0] + box[2], box[1] + box[3]],
dtype=np.float32)
return bbox
def _get_border(self, border, size):
i = 1
while size - border // i <= border // i:
i *= 2
return border // i
def debug(self,debugger, img, output, scale=1):
pred = debugger.gen_colormap(output['hm'])
debugger.add_blend_img(img, pred, 'pred_hm')
pred = debugger.gen_colormap_hp(output['hm_hp'])
debugger.add_blend_img(img, pred, 'pred_hp')
debugger.show_all_imgs()
def __getitem__(self, index):
img_id = self.images[index]
file_name = self.coco.loadImgs(ids=[img_id])[0]['file_name']
img_path = os.path.join(self.img_dir, file_name)
ann_ids = self.coco.getAnnIds(imgIds=[img_id])
anns = self.coco.loadAnns(ids=ann_ids)
num_objs = min(len(anns), self.max_objs)
img = cv2.imread(img_path)
height, width = img.shape[0], img.shape[1]
c = np.array([img.shape[1] / 2., img.shape[0] / 2.], dtype=np.float32)
s = max(img.shape[0], img.shape[1]) * 1.0
rot = 0
flipped = False
if self.split == 'train':
if not self.opt.not_rand_crop:
#TODO这里是更改多尺度训练的地方。
s = s#* np.random.choice(np.arange(0.8, 1.5, 0.1))#change 0.6 1.4
w_border = self._get_border(128, img.shape[1])
h_border = self._get_border(128, img.shape[0])
c[0] = np.random.randint(low=w_border, high=img.shape[1] - w_border)
c[1] = np.random.randint(low=h_border, high=img.shape[0] - h_border)
else:
sf = self.opt.scale
cf = self.opt.shift
c[0] += s * np.clip(np.random.randn()*cf, -2*cf, 2*cf)
c[1] += s * np.clip(np.random.randn()*cf, -2*cf, 2*cf)
s = s * np.clip(np.random.randn()*sf + 1, 1 - sf, 1 + sf)
if np.random.random() < self.opt.aug_rot:# roate aug
rf = self.opt.rotate
rot = np.clip(np.random.randn()*rf, -rf*2, rf*2)
if np.random.random() < self.opt.flip:
flipped = True
img = img[:, ::-1, :]
c[0] = width - c[0] - 1
#下面这段代码求旋转的角度
if self.opt.angle_norm and self.split == 'train':
angle_list=np.array(angle_list)%np.pi #首先归一化到np.pi
angle_int=(angle_list// (np.pi/9)).astype('int')
angle_b=np.bincount(angle_int)
index_rot=np.argmax(angle_b)
ind_rot=(angle_list>(index_rot)*np.pi/9) * (angle_list<=(index_rot+1)*np.pi/9)
angle_rot=np.average(angle_list[ind_rot])
#这段代码是旋转图像,和中间点特征图,关键点特征图
angle_img_rot=angle_rot*(-180)/np.pi
hm_rotate=hm.transpose(1, 2, 0)
M = cv2.getRotationMatrix2D(((output_res)/2.0,(output_res)/2.0),angle_img_rot,1)
hm_rotate = cv2.warpAffine(hm_rotate,M,(output_res,output_res))
hm = hm_rotate.transpose(2, 0, 1 )
hp_rotate=hm_hp.transpose(1, 2, 0)
hp_rotate = cv2.warpAffine(hp_rotate,M,(output_res,output_res))
hm_hp = hp_rotate[np.newaxis,:]
M = cv2.getRotationMatrix2D(((self.opt.input_res)/2.0,(self.opt.input_res)/2.0),angle_img_rot,1)
inp = inp.transpose(1, 2, 0)
inp = cv2.warpAffine(inp,M,(self.opt.input_res,self.opt.input_res))
inp = inp.transpose(2, 0, 1 )
# inp1=cv2.warpAffine(inp1,M,(self.opt.input_res,self.opt.input_res))
#结束
trans_input = get_affine_transform(
c, s, rot, [self.opt.input_res, self.opt.input_res])
# inp1 = cv2.warpAffine(img, trans_input,
# (self.opt.input_res, self.opt.input_res),
# flags=cv2.INTER_LINEAR)
inp = cv2.warpAffine(img, trans_input,
(self.opt.input_res, self.opt.input_res),
flags=cv2.INTER_LINEAR)
inp = (inp.astype(np.float32) / 255.)
if self.split == 'train' and not self.opt.no_color_aug:
color_aug(self._data_rng, inp, self._eig_val, self._eig_vec)
inp = (inp - self.mean) / self.std
inp = inp.transpose(2, 0, 1)
output_res = self.opt.output_res
num_joints = self.num_joints
trans_output_rot = get_affine_transform(c, s, rot, [output_res, output_res])
trans_output = get_affine_transform(c, s, 0, [output_res, output_res])
hm = np.zeros((self.num_classes, output_res, output_res), dtype=np.float32)
hm_hp = np.zeros((num_joints, output_res, output_res), dtype=np.float32)
dense_kps = np.zeros((num_joints, 2, output_res, output_res),
dtype=np.float32)
dense_kps_mask = np.zeros((num_joints, output_res, output_res),
dtype=np.float32)
wh = np.zeros((self.max_objs, 2), dtype=np.float32)
kps = np.zeros((self.max_objs, num_joints * 2), dtype=np.float32)
reg = np.zeros((self.max_objs, 2), dtype=np.float32)
ind = np.zeros((self.max_objs), dtype=np.int64)
reg_mask = np.zeros((self.max_objs), dtype=np.uint8)
kps_mask = np.zeros((self.max_objs, self.num_joints * 2), dtype=np.uint8)
hp_offset = np.zeros((self.max_objs * num_joints, 2), dtype=np.float32)
hp_ind = np.zeros((self.max_objs * num_joints), dtype=np.int64)
hp_mask = np.zeros((self.max_objs * num_joints), dtype=np.int64)
draw_gaussian = draw_msra_gaussian if self.opt.mse_loss else \
draw_umich_gaussian
gt_det = []
angle_list=[]
for k in range(num_objs):
ann = anns[k]
bbox = self._coco_box_to_bbox(ann['bbox'])
#TODO change wwlekeuihx
cls_id = int(ann['category_id']) - 1
pts = np.array(ann['keypoints'][0:3], np.float32).reshape(num_joints, 3)#tmjx
if flipped:
bbox[[0, 2]] = width - bbox[[2, 0]] - 1
pts[:, 0] = width - pts[:, 0] - 1
#for e in self.flip_idx:
#pts[e[0]], pts[e[1]] = pts[e[1]].copy(), pts[e[0]].copy()
bbox[:2] = affine_transform(bbox[:2], trans_output)
bbox[2:] = affine_transform(bbox[2:], trans_output)
#bbox = np.clip(bbox, 0, output_res - 1)
h, w = bbox[3] - bbox[1], bbox[2] - bbox[0]
h = np.clip(h , 0, output_res - 1)
w = np.clip(w , 0, output_res - 1)
if (h > 0 and w > 0) or (rot != 0):
radius = gaussian_radius((math.ceil(h), math.ceil(w))) *1.2
sqrt_wh = np.sqrt(np.sqrt(h*w))
radius_w = radius * np.sqrt(w) / sqrt_wh
radius_h = radius * np.sqrt(h) / sqrt_wh
radius_w = self.opt.hm_gauss if self.opt.mse_loss else max(0, np.ceil(radius_w))
radius_h = self.opt.hm_gauss if self.opt.mse_loss else max(0, np.ceil(radius_h))
# radius = self.opt.hm_gauss if self.opt.mse_loss else max(0, int(radius))
ct = np.array(
[(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2], dtype=np.float32)
ct[0] = np.clip(ct[0], 0, output_res - 1)
ct[1] = np.clip(ct[1], 0, output_res - 1)
ct_int = ct.astype(np.int32)
wh[k] = 1. * w, 1. * h
ind[k] = ct_int[1] * output_res + ct_int[0]
reg[k] = ct - ct_int
reg_mask[k] = 1
num_kpts = pts[:, 2].sum()
if num_kpts == 0:
hm[cls_id, ct_int[1], ct_int[0]] = 0.9999
reg_mask[k] = 0
hp_radius = gaussian_radius((math.ceil(h), math.ceil(w)))
hp_radius = self.opt.hm_gauss \
if self.opt.mse_loss else max(0, int(hp_radius))
for j in range(num_joints):
if pts[j, 2] > 0:
pts[j, :2] = affine_transform(pts[j, :2], trans_output_rot)
if pts[j, 0] >= 0 and pts[j, 0] < output_res and \
pts[j, 1] >= 0 and pts[j, 1] < output_res:
kps[k, j * 2: j * 2 + 2] = pts[j, :2] - ct_int
kps_mask[k, j * 2: j * 2 + 2] = 1
pt_int = pts[j, :2].astype(np.int32)
hp_offset[k * num_joints + j] = pts[j, :2] - pt_int
hp_ind[k * num_joints + j] = pt_int[1] * output_res + pt_int[0]
hp_mask[k * num_joints + j] = 1
if self.opt.dense_hp:
# must be before draw center hm gaussian
draw_dense_reg(dense_kps[j], hm[cls_id], ct_int,
pts[j, :2] - ct_int, radius, is_offset=True)
draw_gaussian(dense_kps_mask[j], ct_int, radius)
draw_gaussian(hm_hp[j], pt_int, hp_radius)
#TODO change
angle= math.atan2((pts[0, 0] - ct[0]), (pts[0, 1] - ct[1]))
angle_list.append(angle)
draw_gaussian(hm[cls_id], ct_int, [radius_w,radius_h,angle])
# draw_gaussian(hm[cls_id], ct_int, radiusw,radius)
gt_det.append([ct[0] - w / 2, ct[1] - h / 2,
ct[0] + w / 2, ct[1] + h / 2, 1] +
pts[:, :2].reshape(num_joints * 2).tolist() + [cls_id])
if rot != 0:
hm = hm * 0 + 0.9999
reg_mask *= 0
kps_mask *= 0
ret = {'input': inp, 'hm': hm, 'reg_mask': reg_mask, 'ind': ind, 'wh': wh,
'hps': kps, 'hps_mask': kps_mask}
if self.opt.dense_hp:
dense_kps = dense_kps.reshape(num_joints * 2, output_res, output_res)
dense_kps_mask = dense_kps_mask.reshape(
num_joints, 1, output_res, output_res)
dense_kps_mask = np.concatenate([dense_kps_mask, dense_kps_mask], axis=1)
dense_kps_mask = dense_kps_mask.reshape(
num_joints * 2, output_res, output_res)
ret.update({'dense_hps': dense_kps, 'dense_hps_mask': dense_kps_mask})
del ret['hps'], ret['hps_mask']
if self.opt.reg_offset:
ret.update({'reg': reg})
if self.opt.hm_hp:
ret.update({'hm_hp': hm_hp})
if self.opt.reg_hp_offset:
ret.update({'hp_offset': hp_offset, 'hp_ind': hp_ind, 'hp_mask': hp_mask})
if self.opt.debug > 0 or not self.split == 'train':
gt_det = np.array(gt_det, dtype=np.float32) if len(gt_det) > 0 else \
np.zeros((1, 40), dtype=np.float32)
meta = {'c': c, 's': s, 'gt_det': gt_det, 'img_id': img_id}
ret['meta'] = meta
#这里是调试可视化生成的特征图的程序
# debugger = Debugger(dataset=self.opt.dataset, ipynb=(self.opt.debug==3),
# theme=self.opt.debugger_theme)
# self.debug(debugger, inp1, ret)
return ret
| 44.303279 | 106 | 0.595097 |
a604cabadc6f3cd74defbe8d6912ff832fa625ae | 228 | py | Python | jury/metrics/squad/squad.py | Tiamat-Tech/jury | 566ee2ec6b1a1a502f6e13b84f40274b51ef7d48 | [
"MIT"
] | 64 | 2021-07-16T08:37:45.000Z | 2022-02-05T10:28:27.000Z | jury/metrics/squad/squad.py | Tiamat-Tech/jury | 566ee2ec6b1a1a502f6e13b84f40274b51ef7d48 | [
"MIT"
] | 46 | 2021-07-14T22:41:56.000Z | 2022-03-21T07:51:52.000Z | jury/metrics/squad/squad.py | Tiamat-Tech/jury | 566ee2ec6b1a1a502f6e13b84f40274b51ef7d48 | [
"MIT"
] | 9 | 2021-07-15T21:19:06.000Z | 2022-03-17T21:46:15.000Z | from jury.metrics._core import MetricAlias
from jury.metrics.squad.squad_for_language_generation import SquadForLanguageGeneration
__main_class__ = "Squad"
class Squad(MetricAlias):
_SUBCLASS = SquadForLanguageGeneration
| 25.333333 | 87 | 0.846491 |
edeab1bd168bfcd301457d02494947ec2c44c80c | 2,893 | py | Python | test/python/Pista01.py | titos-carrasco/pyplayground | 353ae797c907311b40ddd68c93576ed76b09bfb5 | [
"MIT"
] | null | null | null | test/python/Pista01.py | titos-carrasco/pyplayground | 353ae797c907311b40ddd68c93576ed76b09bfb5 | [
"MIT"
] | null | null | null | test/python/Pista01.py | titos-carrasco/pyplayground | 353ae797c907311b40ddd68c93576ed76b09bfb5 | [
"MIT"
] | null | null | null | import time
import threading
import subprocess
from pyplayground.client.RobotThymio2 import RobotThymio2
class Pista01():
def __init__( self ):
pass
def run( self ):
# levantamos el playground en otro proceso
try:
#pg = subprocess.Popen( [ "python", "-m", "pyplayground.server.Playground", "../worlds/Pista01.world" ], shell=False )
time.sleep( 1 )
except Exception as e:
print( e )
exit()
# los datos de conexion al playground
host = "127.0.0.1"
port = 44444
# creamos los robots
thymio01 = Thymio01( "Thymio-01", host, port )
thymio02 = Thymio02( "Thymio-02", host, port )
# los levantamos en hilos separados
threading.Thread( target=thymio01.run, args=(), name="Thymio-01" ).start()
threading.Thread( target=thymio02.run, args=(), name="Thymio-02" ).start()
# loop clasico
t = time.time()
while( time.time() - t < 10 ):
time.sleep( 0.0001 )
# detenemos los robots
thymio01.finish()
thymio02.finish()
# detenemos el playground
#pg.send_signal( subprocess.signal.SIGTERM )
class Thymio01( RobotThymio2 ):
def __init__( self, name, host, port ):
super().__init__( name, host, port )
self.running = False
self.me = None
def run( self ):
self.me = threading.current_thread()
speed = 10
self.setSpeed( speed, speed )
self.running = True
while(self.running ):
self.getSensors()
l, r = self.groundSensorValues
if( l >120):
self.setSpeed( speed+5, speed )
elif( r>120 ):
self.setSpeed( speed, speed+5 )
else:
self.setSpeed( speed, speed )
time.sleep( 0.01 )
self.setSpeed( 0, 0 )
self.close()
def finish( self ):
self.running = False
self.me.join()
self.close()
class Thymio02( RobotThymio2 ):
def __init__( self, name, host, port ):
super().__init__( name, host, port )
self.running = False
self.me = None
def run( self ):
self.me = threading.current_thread()
speed = 10
self.setSpeed( speed, speed )
self.running = True
while(self.running ):
self.getSensors()
l, r = self.groundSensorValues
if( l >120):
self.setSpeed( speed+5, speed )
elif( r>120 ):
self.setSpeed( speed, speed+5 )
else:
self.setSpeed( speed, speed )
time.sleep( 0.01 )
self.setSpeed( 0, 0 )
self.close()
def finish( self ):
self.running = False
self.me.join()
self.close()
# show time
Pista01().run()
| 27.292453 | 130 | 0.533356 |
22544f6f7874acaa2812070f5f5451841909393d | 161 | py | Python | test/test_qe.py | vitduck/bmt | 3afa969958bf07ab4fe86868b2feda0a6bc676d6 | [
"BSD-3-Clause"
] | null | null | null | test/test_qe.py | vitduck/bmt | 3afa969958bf07ab4fe86868b2feda0a6bc676d6 | [
"BSD-3-Clause"
] | null | null | null | test/test_qe.py | vitduck/bmt | 3afa969958bf07ab4fe86868b2feda0a6bc676d6 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
from qe import Qe
qe = Qe(
prefix = '../run/QE',
input = '../input/QE/Si_512.in' )
qe.info()
qe.build()
qe.run()
qe.summary()
| 12.384615 | 38 | 0.571429 |
ed00953008c3409002dc337dbd485ceff25aa966 | 8,169 | py | Python | delta/config/extensions.py | parkeraddison/delta | 4e4df1d160edc0e8597fc3450f3e13c0ef755a5f | [
"Apache-2.0"
] | 1 | 2021-07-04T17:39:52.000Z | 2021-07-04T17:39:52.000Z | delta/config/extensions.py | parkeraddison/delta | 4e4df1d160edc0e8597fc3450f3e13c0ef755a5f | [
"Apache-2.0"
] | null | null | null | delta/config/extensions.py | parkeraddison/delta | 4e4df1d160edc0e8597fc3450f3e13c0ef755a5f | [
"Apache-2.0"
] | null | null | null | # Copyright © 2020, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The DELTA (Deep Earth Learning, Tools, and Analysis) platform is
# licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Manage extensions to DELTA.
To extend delta, add the name for your extension to the `extensions` field
in a DELTA config file. It will then be imported when DELTA loads.
The named python module should then call the appropriate registration
function (e.g., `register_layer` to register a custom Keras layer) and
the extensions can be used like existing DELTA options.
All extensions can take keyword arguments that can be specified in the config file.
"""
#pylint:disable=global-statement
import importlib
__extensions_to_load = set()
__layers = {}
__readers = {}
__writers = {}
__losses = {}
__metrics = {}
__callbacks = {}
__prep_funcs = {}
def __initialize():
"""
This function is called before each use of extensions to import
the needed modules. This is only done at first use to not delay loading.
"""
global __extensions_to_load
while __extensions_to_load:
ext = __extensions_to_load.pop()
importlib.import_module(ext)
def register_extension(name : str):
"""
Register an extension python module.
For internal use --- users should use the config files.
Parameters
----------
name: str
Name of the extension to load.
"""
global __extensions_to_load
__extensions_to_load.add(name)
def register_layer(layer_type : str, layer_constructor):
"""
Register a custom layer for use by DELTA.
Parameters
----------
layer_type: str
Name of the layer.
layer_constructor
Either a class extending
[tensorflow.keras.layers.Layer](https://www.tensorflow.org/api_docs/python/tf/keras/layers/LayerFunction),
or a function that returns a function that inputs and outputs tensors.
See Also
--------
delta.ml.train.DeltaLayer : Layer wrapper with Delta extensions
"""
global __layers
__layers[layer_type] = layer_constructor
def register_image_reader(image_type : str, image_class):
"""
Register a custom image type for reading by DELTA.
Parameters
----------
image_type: str
Name of the image type.
image_class: Type[`delta.imagery.delta_image.DeltaImage`]
A class that extends `delta.imagery.delta_image.DeltaImage`.
"""
global __readers
__readers[image_type] = image_class
def register_image_writer(image_type : str, writer_class):
"""
Register a custom image type for writing by DELTA.
Parameters
----------
image_type: str
Name of the image type.
writer_class: Type[`delta.imagery.delta_image.DeltaImageWriter`]
A class that extends `delta.imagery.delta_image.DeltaImageWriter`.
"""
global __writers
__writers[image_type] = writer_class
def register_loss(loss_type : str, custom_loss):
"""
Register a custom loss function for use by DELTA.
Note that loss functions can also be used as metrics.
Parameters
----------
loss_type: str
Name of the loss function.
custom_loss:
Either a loss extending [Loss](https://www.tensorflow.org/api_docs/python/tf/keras/losses/Loss) or a
function of the form loss(y_true, y_pred) which returns a tensor of the loss.
"""
global __losses
__losses[loss_type] = custom_loss
def register_metric(metric_type : str, custom_metric):
"""
Register a custom metric for use by DELTA.
Parameters
----------
metric_type: str
Name of the metric.
custom_metric: Type[`tensorflow.keras.metrics.Metric`]
A class extending [Metric](https://www.tensorflow.org/api_docs/python/tf/keras/metrics/Metric).
"""
global __metrics
__metrics[metric_type] = custom_metric
def register_callback(cb_type : str, cb):
"""
Register a custom training callback for use by DELTA.
Parameters
----------
cb_type: str
Name of the callback.
cb: Type[`tensorflow.keras.callbacks.Callback`]
A class extending [Callback](https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback)
or a function that returns one.
"""
global __callbacks
__callbacks[cb_type] = cb
def register_preprocess(function_name : str, prep_function):
"""
Register a preprocessing function for use in delta.
Parameters
----------
function_name: str
Name of the preprocessing function.
prep_function:
A function of the form prep_function(data, rectangle, bands_list), where data is an input
numpy array, rectangle a `delta.imagery.rectangle.Rectangle` specifying the region covered by data,
and bands_list is an integer list of bands loaded. The function must return a numpy array.
"""
global __prep_funcs
__prep_funcs[function_name] = prep_function
def layer(layer_type : str):
"""
Retrieve a custom layer by name.
Parameters
----------
layer_type: str
Name of the layer.
Returns
-------
Layer
The previously registered layer.
"""
__initialize()
return __layers.get(layer_type)
def loss(loss_type : str):
"""
Retrieve a custom loss by name.
Parameters
----------
loss_type: str
Name of the loss function.
Returns
-------
Loss
The previously registered loss function.
"""
__initialize()
return __losses.get(loss_type)
def metric(metric_type : str):
"""
Retrieve a custom metric by name.
Parameters
----------
metric_type: str
Name of the metric.
Returns
-------
Metric
The previously registered metric.
"""
__initialize()
return __metrics.get(metric_type)
def callback(cb_type : str):
"""
Retrieve a custom callback by name.
Parameters
----------
cb_type: str
Name of the callback function.
Returns
-------
Callback
The previously registered callback.
"""
__initialize()
return __callbacks.get(cb_type)
def preprocess_function(prep_type : str):
"""
Retrieve a custom preprocessing function by name.
Parameters
----------
prep_type: str
Name of the preprocessing function.
Returns
-------
Preprocessing Function
The previously registered preprocessing function.
"""
__initialize()
return __prep_funcs.get(prep_type)
def image_reader(reader_type : str):
"""
Get the reader of the given type.
Parameters
----------
reader_type: str
Name of the image reader.
Returns
-------
Type[`delta.imagery.delta_image.DeltaImage`]
The previously registered image reader.
"""
__initialize()
return __readers.get(reader_type)
def image_writer(writer_type : str):
"""
Get the writer of the given type.
Parameters
----------
writer_type: str
Name of the image writer.
Returns
-------
Type[`delta.imagery.delta_image.DeltaImageWriter`]
The previously registered image writer.
"""
__initialize()
return __writers.get(writer_type)
def custom_objects():
"""
Returns a dictionary of all supported custom objects for use
by tensorflow. Passed as an argument to load_model.
Returns
-------
dict
A dictionary of registered custom tensorflow objects.
"""
__initialize()
d = __layers.copy()
d.update(__losses.copy())
return d
| 26.522727 | 114 | 0.668013 |
81997ed3e6c2e9ee31b711f2652ed06999dab90a | 57,609 | py | Python | sdk/appplatform/azure-mgmt-appplatform/azure/mgmt/appplatform/v2020_07_01/operations/_deployments_operations.py | mohamedshabanofficial/azure-sdk-for-python | 81c585f310cd2ec23d2ad145173958914a075a58 | [
"MIT"
] | 2 | 2021-03-24T06:26:11.000Z | 2021-04-18T15:55:59.000Z | sdk/appplatform/azure-mgmt-appplatform/azure/mgmt/appplatform/v2020_07_01/operations/_deployments_operations.py | mohamedshabanofficial/azure-sdk-for-python | 81c585f310cd2ec23d2ad145173958914a075a58 | [
"MIT"
] | 2 | 2021-11-03T06:10:36.000Z | 2021-12-01T06:29:39.000Z | sdk/appplatform/azure-mgmt-appplatform/azure/mgmt/appplatform/v2020_07_01/operations/_deployments_operations.py | mohamedshabanofficial/azure-sdk-for-python | 81c585f310cd2ec23d2ad145173958914a075a58 | [
"MIT"
] | 1 | 2021-05-19T02:55:10.000Z | 2021-05-19T02:55:10.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DeploymentsOperations(object):
"""DeploymentsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.appplatform.v2020_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.DeploymentResource"
"""Get a Deployment and its properties.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param app_name: The name of the App resource.
:type app_name: str
:param deployment_name: The name of the Deployment resource.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentResource, or the result of cls(response)
:rtype: ~azure.mgmt.appplatform.v2020_07_01.models.DeploymentResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DeploymentResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
deployment_resource, # type: "_models.DeploymentResource"
**kwargs # type: Any
):
# type: (...) -> "_models.DeploymentResource"
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(deployment_resource, 'DeploymentResource')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('DeploymentResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('DeploymentResource', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('DeploymentResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
deployment_resource, # type: "_models.DeploymentResource"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.DeploymentResource"]
"""Create a new Deployment or update an exiting Deployment.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param app_name: The name of the App resource.
:type app_name: str
:param deployment_name: The name of the Deployment resource.
:type deployment_name: str
:param deployment_resource: Parameters for the create or update operation.
:type deployment_resource: ~azure.mgmt.appplatform.v2020_07_01.models.DeploymentResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either DeploymentResource or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2020_07_01.models.DeploymentResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
service_name=service_name,
app_name=app_name,
deployment_name=deployment_name,
deployment_resource=deployment_resource,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DeploymentResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Operation to delete a Deployment.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param app_name: The name of the App resource.
:type app_name: str
:param deployment_name: The name of the Deployment resource.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
service_name=service_name,
app_name=app_name,
deployment_name=deployment_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore
def _update_initial(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
deployment_resource, # type: "_models.DeploymentResource"
**kwargs # type: Any
):
# type: (...) -> "_models.DeploymentResource"
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(deployment_resource, 'DeploymentResource')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('DeploymentResource', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('DeploymentResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore
def begin_update(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
deployment_resource, # type: "_models.DeploymentResource"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.DeploymentResource"]
"""Operation to update an exiting Deployment.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param app_name: The name of the App resource.
:type app_name: str
:param deployment_name: The name of the Deployment resource.
:type deployment_name: str
:param deployment_resource: Parameters for the update operation.
:type deployment_resource: ~azure.mgmt.appplatform.v2020_07_01.models.DeploymentResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either DeploymentResource or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2020_07_01.models.DeploymentResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
service_name=service_name,
app_name=app_name,
deployment_name=deployment_name,
deployment_resource=deployment_resource,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DeploymentResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
version=None, # type: Optional[List[str]]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DeploymentResourceCollection"]
"""Handles requests to list all resources in an App.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param app_name: The name of the App resource.
:type app_name: str
:param version: Version of the deployments to be listed.
:type version: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeploymentResourceCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2020_07_01.models.DeploymentResourceCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResourceCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if version is not None:
query_parameters['version'] = [self._serialize.query("version", q, 'str') if q is not None else '' for q in version]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DeploymentResourceCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments'} # type: ignore
def list_for_cluster(
self,
resource_group_name, # type: str
service_name, # type: str
version=None, # type: Optional[List[str]]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DeploymentResourceCollection"]
"""List deployments for a certain service.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param version: Version of the deployments to be listed.
:type version: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeploymentResourceCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2020_07_01.models.DeploymentResourceCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResourceCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_for_cluster.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if version is not None:
query_parameters['version'] = [self._serialize.query("version", q, 'str') if q is not None else '' for q in version]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DeploymentResourceCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_for_cluster.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/deployments'} # type: ignore
def _start_initial(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self._start_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start'} # type: ignore
def begin_start(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Start the deployment.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param app_name: The name of the App resource.
:type app_name: str
:param deployment_name: The name of the Deployment resource.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._start_initial(
resource_group_name=resource_group_name,
service_name=service_name,
app_name=app_name,
deployment_name=deployment_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start'} # type: ignore
def _stop_initial(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self._stop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop'} # type: ignore
def begin_stop(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Stop the deployment.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param app_name: The name of the App resource.
:type app_name: str
:param deployment_name: The name of the Deployment resource.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._stop_initial(
resource_group_name=resource_group_name,
service_name=service_name,
app_name=app_name,
deployment_name=deployment_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop'} # type: ignore
def _restart_initial(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self._restart_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart'} # type: ignore
def begin_restart(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Restart the deployment.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param app_name: The name of the App resource.
:type app_name: str
:param deployment_name: The name of the Deployment resource.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._restart_initial(
resource_group_name=resource_group_name,
service_name=service_name,
app_name=app_name,
deployment_name=deployment_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart'} # type: ignore
def get_log_file_url(
self,
resource_group_name, # type: str
service_name, # type: str
app_name, # type: str
deployment_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.LogFileUrlResponse"]
"""Get deployment log file URL.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param app_name: The name of the App resource.
:type app_name: str
:param deployment_name: The name of the Deployment resource.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LogFileUrlResponse, or the result of cls(response)
:rtype: ~azure.mgmt.appplatform.v2020_07_01.models.LogFileUrlResponse or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LogFileUrlResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self.get_log_file_url.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'appName': self._serialize.url("app_name", app_name, 'str'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('LogFileUrlResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_log_file_url.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/getLogFileUrl'} # type: ignore
| 50.269634 | 236 | 0.656686 |
57d7b5a9d2fc3f23ae8a22807bfef45e49d20cd3 | 2,065 | py | Python | tools/devshell/contrib/gerrit-submit-lib/gerrit_util_test.py | EnderNightLord-ChromeBook/zircon-rpi | b09b1eb3aa7a127c65568229fe10edd251869283 | [
"BSD-2-Clause"
] | 14 | 2020-10-25T05:48:36.000Z | 2021-09-20T02:46:20.000Z | tools/devshell/contrib/gerrit-submit-lib/gerrit_util_test.py | DamieFC/fuchsia | f78a4a1326f4a4bb5834500918756173c01bab4f | [
"BSD-2-Clause"
] | null | null | null | tools/devshell/contrib/gerrit-submit-lib/gerrit_util_test.py | DamieFC/fuchsia | f78a4a1326f4a4bb5834500918756173c01bab4f | [
"BSD-2-Clause"
] | 2 | 2020-10-25T01:13:49.000Z | 2020-10-26T02:32:13.000Z | #!/usr/bin/python3
#
# Copyright 2020 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import http.server
import threading
from unittest import mock
from typing import List, Optional
import gerrit_util
# Implements logic to bring up a simple HTTP server that responds to a single JSON
# request and shuts down again.
class JsonResponder:
def __init__(self, response: bytes):
self.response: bytes = response
self.got_request: bool = False
self.url: Optional[str] = None
# Start up a simple HTTP server running on its own thread.
self._server = http.server.HTTPServer(('localhost', 0), self._make_handler())
self._server_thread = threading.Thread(target=self._server.serve_forever, args=())
self._server_thread.daemon = True
self._server_thread.start()
self.port = self._server.server_port
def __del__(self):
self._server.shutdown()
self._server_thread.join()
def _make_handler(self):
# Give access to "self" in the new Handler class under the name "parent".
parent = self
# Create a Handler class that, when instantiated, responds to GET and POST requests
# with the given response.
class Handler(http.server.BaseHTTPRequestHandler):
def do_POST(self) -> None:
self.send_response(200)
self.send_header('Content-type', 'javascript/json')
self.end_headers()
self.wfile.write(b")]}'\n") # Write the JSON header.
self.wfile.write(parent.response)
parent.url = self.path
return Handler
@mock.patch('gerrit_util.GERRIT_PROTOCOL', 'http')
class TestGerritUtil(unittest.TestCase):
def test_post_json(self) -> None:
# Test plumbing through GerritUtil to a HTTP server and back again.
responder = JsonResponder(b'{"labels": {"Commit-Queue": 2}}')
gerrit_util.SetReview(
'localhost:%d' % responder.port, '12345', labels={'Commit-Queue': 2}, notify=False)
if __name__ == '__main__':
unittest.main()
| 31.287879 | 91 | 0.708475 |
52feb723053e85d4cf346f6030c5816608600dc2 | 1,477 | py | Python | test/test_update_contact_by_id_parameters.py | dialmycalls/python-sdk-v2 | ab6ac61d305ea1729b618bc2530d6101136aa6ea | [
"Apache-2.0"
] | 2 | 2020-07-29T08:51:36.000Z | 2021-01-21T11:18:24.000Z | test/test_update_contact_by_id_parameters.py | dialmycalls/python-sdk-v2 | ab6ac61d305ea1729b618bc2530d6101136aa6ea | [
"Apache-2.0"
] | null | null | null | test/test_update_contact_by_id_parameters.py | dialmycalls/python-sdk-v2 | ab6ac61d305ea1729b618bc2530d6101136aa6ea | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
DialMyCalls API
The DialMyCalls API
OpenAPI spec version: 2.0.1
Contact: support@dialmycalls.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import os
import sys
import unittest
import dialmycalls_client
from dialmycalls_client.rest import ApiException
from dialmycalls_client.models.update_contact_by_id_parameters import UpdateContactByIdParameters
class TestUpdateContactByIdParameters(unittest.TestCase):
""" UpdateContactByIdParameters unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testUpdateContactByIdParameters(self):
"""
Test UpdateContactByIdParameters
"""
model = dialmycalls_client.models.update_contact_by_id_parameters.UpdateContactByIdParameters()
if __name__ == '__main__':
unittest.main()
| 27.351852 | 103 | 0.742722 |
cb195865e64372caa6d4d7118fca5051980aca4e | 44,830 | py | Python | pool/pool.py | chiabatta/pool-reference | 36e2890fed21d4c97852398d3a21a8781140c53b | [
"Apache-2.0"
] | 1 | 2021-06-22T08:29:22.000Z | 2021-06-22T08:29:22.000Z | pool/pool.py | chiabatta/pool-reference | 36e2890fed21d4c97852398d3a21a8781140c53b | [
"Apache-2.0"
] | null | null | null | pool/pool.py | chiabatta/pool-reference | 36e2890fed21d4c97852398d3a21a8781140c53b | [
"Apache-2.0"
] | null | null | null | import asyncio
import logging
import pathlib
import time
import traceback
from asyncio import Task
from math import floor
from typing import Dict, Optional, Set, List, Tuple, Callable
from blspy import AugSchemeMPL, G1Element
from chia.consensus.block_rewards import calculate_pool_reward
from chia.pools.pool_wallet_info import PoolState, PoolSingletonState
from chia.protocols.pool_protocol import (
PoolErrorCode,
PostPartialRequest,
PostPartialResponse,
PostFarmerRequest,
PostFarmerResponse,
PutFarmerRequest,
PutFarmerResponse,
POOL_PROTOCOL_VERSION,
)
from chia.rpc.wallet_rpc_client import WalletRpcClient
from chia.types.blockchain_format.coin import Coin
from chia.types.coin_record import CoinRecord
from chia.types.coin_spend import CoinSpend
from chia.util.bech32m import decode_puzzle_hash
from chia.consensus.constants import ConsensusConstants
from chia.util.ints import uint8, uint16, uint32, uint64
from chia.util.byte_types import hexstr_to_bytes
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.rpc.full_node_rpc_client import FullNodeRpcClient
from chia.full_node.signage_point import SignagePoint
from chia.types.end_of_slot_bundle import EndOfSubSlotBundle
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.consensus.pot_iterations import calculate_iterations_quality
from chia.util.lru_cache import LRUCache
from chia.util.chia_logging import initialize_logging
from chia.wallet.transaction_record import TransactionRecord
from chia.pools.pool_puzzles import (
get_most_recent_singleton_coin_from_coin_spend,
get_delayed_puz_info_from_launcher_spend,
launcher_id_to_p2_puzzle_hash,
)
from .difficulty_adjustment import get_new_difficulty
from .singleton import create_absorb_transaction, get_singleton_state, get_coin_spend, get_farmed_height
from .store.abstract import AbstractPoolStore
from .store.sqlite_store import SqlitePoolStore
from .record import FarmerRecord
from .util import error_dict, RequestMetadata
class Pool:
def __init__(
self,
config: Dict,
pool_config: Dict,
constants: ConsensusConstants,
pool_store: Optional[AbstractPoolStore] = None,
difficulty_function: Callable = get_new_difficulty,
):
self.follow_singleton_tasks: Dict[bytes32, asyncio.Task] = {}
self.log = logging
# If you want to log to a file: use filename='example.log', encoding='utf-8'
self.log.basicConfig(level=logging.INFO)
initialize_logging("pool", pool_config["logging"], pathlib.Path(pool_config["logging"]["log_path"]))
# Set our pool info here
self.info_default_res = pool_config["pool_info"]["default_res"]
self.info_name = pool_config["pool_info"]["name"]
self.info_logo_url = pool_config["pool_info"]["logo_url"]
self.info_description = pool_config["pool_info"]["description"]
self.welcome_message = pool_config["welcome_message"]
self.config = config
self.constants = constants
if pool_config.get('store') == "MariadbPoolStore":
from .store.mariadb_store import MariadbPoolStore
self.store: AbstractPoolStore = pool_store or MariadbPoolStore()
else:
self.store: AbstractPoolStore = pool_store or SqlitePoolStore()
self.pool_fee = pool_config["pool_fee"]
# This number should be held constant and be consistent for every pool in the network. DO NOT CHANGE
self.iters_limit = self.constants.POOL_SUB_SLOT_ITERS // 64
# This number should not be changed, since users will put this into their singletons
self.relative_lock_height = uint32(pool_config["relative_lock_height"])
# TODO(pool): potentially tweak these numbers for security and performance
# This is what the user enters into the input field. This exact value will be stored on the blockchain
self.pool_url = pool_config["pool_url"]
self.min_difficulty = uint64(pool_config["min_difficulty"]) # 10 difficulty is about 1 proof a day per plot
self.default_difficulty: uint64 = uint64(pool_config["default_difficulty"])
self.difficulty_function: Callable = difficulty_function
self.pending_point_partials: Optional[asyncio.Queue] = None
self.recent_points_added: LRUCache = LRUCache(20000)
# The time in minutes for an authentication token to be valid. See "Farmer authentication" in SPECIFICATION.md
self.authentication_token_timeout: uint8 = pool_config["authentication_token_timeout"]
# This is where the block rewards will get paid out to. The pool needs to support this address forever,
# since the farmers will encode it into their singleton on the blockchain. WARNING: the default pool code
# completely spends this wallet and distributes it to users, do don't put any additional funds in here
# that you do not want to distribute. Even if the funds are in a different address than this one, they WILL
# be spent by this code! So only put funds that you want to distribute to pool members here.
# Using 2164248527
self.default_target_puzzle_hash: bytes32 = bytes32(decode_puzzle_hash(pool_config["default_target_address"]))
# The pool fees will be sent to this address. This MUST be on a different key than the target_puzzle_hash,
# otherwise, the fees will be sent to the users. Using 690783650
self.pool_fee_puzzle_hash: bytes32 = bytes32(decode_puzzle_hash(pool_config["pool_fee_address"]))
# This is the wallet fingerprint and ID for the wallet spending the funds from `self.default_target_puzzle_hash`
self.wallet_fingerprint = pool_config["wallet_fingerprint"]
self.wallet_id = pool_config["wallet_id"]
# We need to check for slow farmers. If farmers cannot submit proofs in time, they won't be able to win
# any rewards either. This number can be tweaked to be more or less strict. More strict ensures everyone
# gets high rewards, but it might cause some of the slower farmers to not be able to participate in the pool.
self.partial_time_limit: int = pool_config["partial_time_limit"]
# There is always a risk of a reorg, in which case we cannot reward farmers that submitted partials in that
# reorg. That is why we have a time delay before changing any account points.
self.partial_confirmation_delay: int = pool_config["partial_confirmation_delay"]
# Only allow PUT /farmer per launcher_id every n seconds to prevent difficulty change attacks.
self.farmer_update_blocked: set = set()
self.farmer_update_cooldown_seconds: int = 600
# These are the phs that we want to look for on chain, that we can claim to our pool
self.scan_p2_singleton_puzzle_hashes: Set[bytes32] = set()
# Don't scan anything before this height, for efficiency (for example pool start date)
self.scan_start_height: uint32 = uint32(pool_config["scan_start_height"])
# Interval for scanning and collecting the pool rewards
self.collect_pool_rewards_interval = pool_config["collect_pool_rewards_interval"]
# After this many confirmations, a transaction is considered final and irreversible
self.confirmation_security_threshold = pool_config["confirmation_security_threshold"]
# Interval for making payout transactions to farmers
self.payment_interval = pool_config["payment_interval"]
# We will not make transactions with more targets than this, to ensure our transaction gets into the blockchain
# faster.
self.max_additions_per_transaction = pool_config["max_additions_per_transaction"]
# This is the list of payments that we have not sent yet, to farmers
self.pending_payments: Optional[asyncio.Queue] = None
# Keeps track of the latest state of our node
self.blockchain_state = {"peak": None}
# Whether or not the wallet is synced (required to make payments)
self.wallet_synced = False
# We target these many partials for this number of seconds. We adjust after receiving this many partials.
self.number_of_partials_target: int = pool_config["number_of_partials_target"]
self.time_target: int = pool_config["time_target"]
# Tasks (infinite While loops) for different purposes
self.confirm_partials_loop_task: Optional[asyncio.Task] = None
self.collect_pool_rewards_loop_task: Optional[asyncio.Task] = None
self.create_payment_loop_task: Optional[asyncio.Task] = None
self.submit_payment_loop_task: Optional[asyncio.Task] = None
self.get_peak_loop_task: Optional[asyncio.Task] = None
self.node_rpc_client: Optional[FullNodeRpcClient] = None
self.node_rpc_port = pool_config["node_rpc_port"]
self.wallet_rpc_client: Optional[WalletRpcClient] = None
self.wallet_rpc_port = pool_config["wallet_rpc_port"]
async def start(self):
await self.store.connect()
self.pending_point_partials = asyncio.Queue()
self_hostname = self.config["self_hostname"]
self.node_rpc_client = await FullNodeRpcClient.create(
self_hostname, uint16(self.node_rpc_port), DEFAULT_ROOT_PATH, self.config
)
self.wallet_rpc_client = await WalletRpcClient.create(
self.config["self_hostname"], uint16(self.wallet_rpc_port), DEFAULT_ROOT_PATH, self.config
)
self.blockchain_state = await self.node_rpc_client.get_blockchain_state()
res = await self.wallet_rpc_client.log_in_and_skip(fingerprint=self.wallet_fingerprint)
if not res["success"]:
raise ValueError(f"Error logging in: {res['error']}. Make sure your config fingerprint is correct.")
self.log.info(f"Logging in: {res}")
res = await self.wallet_rpc_client.get_wallet_balance(self.wallet_id)
self.log.info(f"Obtaining balance: {res}")
self.scan_p2_singleton_puzzle_hashes = await self.store.get_pay_to_singleton_phs()
self.confirm_partials_loop_task = asyncio.create_task(self.confirm_partials_loop())
self.collect_pool_rewards_loop_task = asyncio.create_task(self.collect_pool_rewards_loop())
self.create_payment_loop_task = asyncio.create_task(self.create_payment_loop())
self.submit_payment_loop_task = asyncio.create_task(self.submit_payment_loop())
self.get_peak_loop_task = asyncio.create_task(self.get_peak_loop())
self.pending_payments = asyncio.Queue()
async def stop(self):
if self.confirm_partials_loop_task is not None:
self.confirm_partials_loop_task.cancel()
if self.collect_pool_rewards_loop_task is not None:
self.collect_pool_rewards_loop_task.cancel()
if self.create_payment_loop_task is not None:
self.create_payment_loop_task.cancel()
if self.submit_payment_loop_task is not None:
self.submit_payment_loop_task.cancel()
if self.get_peak_loop_task is not None:
self.get_peak_loop_task.cancel()
self.wallet_rpc_client.close()
await self.wallet_rpc_client.await_closed()
self.node_rpc_client.close()
await self.node_rpc_client.await_closed()
await self.store.connection.close()
async def get_peak_loop(self):
"""
Periodically contacts the full node to get the latest state of the blockchain
"""
while True:
try:
self.blockchain_state = await self.node_rpc_client.get_blockchain_state()
self.wallet_synced = await self.wallet_rpc_client.get_synced()
await asyncio.sleep(30)
except asyncio.CancelledError:
self.log.info("Cancelled get_peak_loop, closing")
return
except Exception as e:
self.log.error(f"Unexpected error in get_peak_loop: {e}")
await asyncio.sleep(30)
async def collect_pool_rewards_loop(self):
"""
Iterates through the blockchain, looking for pool rewards, and claims them, creating a transaction to the
pool's puzzle_hash.
"""
while True:
try:
if not self.blockchain_state["sync"]["synced"]:
await asyncio.sleep(60)
continue
scan_phs: List[bytes32] = list(self.scan_p2_singleton_puzzle_hashes)
peak_height = self.blockchain_state["peak"].height
# Only get puzzle hashes with a certain number of confirmations or more, to avoid reorg issues
coin_records: List[CoinRecord] = await self.node_rpc_client.get_coin_records_by_puzzle_hashes(
scan_phs,
include_spent_coins=False,
start_height=self.scan_start_height,
)
self.log.info(
f"Scanning for block rewards from {self.scan_start_height} to {peak_height}. "
f"Found: {len(coin_records)}"
)
ph_to_amounts: Dict[bytes32, int] = {}
ph_to_coins: Dict[bytes32, List[CoinRecord]] = {}
not_buried_amounts = 0
for cr in coin_records:
if not cr.coinbase:
self.log.info(f"Non coinbase coin: {cr.coin}, ignoring")
continue
if cr.confirmed_block_index > peak_height - self.confirmation_security_threshold:
not_buried_amounts += cr.coin.amount
continue
if cr.coin.puzzle_hash not in ph_to_amounts:
ph_to_amounts[cr.coin.puzzle_hash] = 0
ph_to_coins[cr.coin.puzzle_hash] = []
ph_to_amounts[cr.coin.puzzle_hash] += cr.coin.amount
ph_to_coins[cr.coin.puzzle_hash].append(cr)
# For each p2sph, get the FarmerRecords
farmer_records = await self.store.get_farmer_records_for_p2_singleton_phs(
set(ph for ph in ph_to_amounts.keys())
)
# For each singleton, create, submit, and save a claim transaction
claimable_amounts = 0
not_claimable_amounts = 0
for rec in farmer_records:
if rec.is_pool_member:
claimable_amounts += ph_to_amounts[rec.p2_singleton_puzzle_hash]
else:
not_claimable_amounts += ph_to_amounts[rec.p2_singleton_puzzle_hash]
if len(coin_records) > 0:
self.log.info(f"Claimable amount: {claimable_amounts / (10**12)}")
self.log.info(f"Not claimable amount: {not_claimable_amounts / (10**12)}")
self.log.info(f"Not buried amounts: {not_buried_amounts / (10**12)}")
for rec in farmer_records:
if rec.is_pool_member:
singleton_tip: Optional[Coin] = get_most_recent_singleton_coin_from_coin_spend(
rec.singleton_tip
)
if singleton_tip is None:
continue
singleton_coin_record: Optional[
CoinRecord
] = await self.node_rpc_client.get_coin_record_by_name(singleton_tip.name())
if singleton_coin_record is None:
continue
if singleton_coin_record.spent:
self.log.warning(
f"Singleton coin {singleton_coin_record.coin.name()} is spent, will not "
f"claim rewards"
)
continue
spend_bundle = await create_absorb_transaction(
self.node_rpc_client,
rec,
self.blockchain_state["peak"].height,
ph_to_coins[rec.p2_singleton_puzzle_hash],
self.constants.GENESIS_CHALLENGE,
)
if spend_bundle is None:
continue
push_tx_response: Dict = await self.node_rpc_client.push_tx(spend_bundle)
if push_tx_response["status"] == "SUCCESS":
# TODO(pool): save transaction in records
self.log.info(f"Submitted transaction successfully: {spend_bundle.name().hex()}")
else:
self.log.error(f"Error submitting transaction: {push_tx_response}")
await asyncio.sleep(self.collect_pool_rewards_interval)
except asyncio.CancelledError:
self.log.info("Cancelled collect_pool_rewards_loop, closing")
return
except Exception as e:
error_stack = traceback.format_exc()
self.log.error(f"Unexpected error in collect_pool_rewards_loop: {e} {error_stack}")
await asyncio.sleep(self.collect_pool_rewards_interval)
async def create_payment_loop(self):
"""
Calculates the points of each farmer, and splits the total funds received into coins for each farmer.
Saves the transactions that we should make, to `amount_to_distribute`.
"""
while True:
try:
if not self.blockchain_state["sync"]["synced"]:
self.log.warning("Not synced, waiting")
await asyncio.sleep(60)
continue
if self.pending_payments.qsize() != 0:
self.log.warning(f"Pending payments ({self.pending_payments.qsize()}), waiting")
await asyncio.sleep(60)
continue
self.log.info("Starting to create payment")
coin_records: List[CoinRecord] = await self.node_rpc_client.get_coin_records_by_puzzle_hash(
self.default_target_puzzle_hash,
include_spent_coins=False,
start_height=self.scan_start_height,
)
if len(coin_records) == 0:
self.log.info("No funds to distribute.")
await asyncio.sleep(120)
continue
total_amount_claimed = sum([c.coin.amount for c in coin_records])
pool_coin_amount = int(total_amount_claimed * self.pool_fee)
amount_to_distribute = total_amount_claimed - pool_coin_amount
if total_amount_claimed < calculate_pool_reward(uint32(1)): # 1.75 XCH
self.log.info(f"Do not have enough funds to distribute: {total_amount_claimed}, skipping payout")
await asyncio.sleep(120)
continue
self.log.info(f"Total amount claimed: {total_amount_claimed / (10 ** 12)}")
self.log.info(f"Pool coin amount (includes blockchain fee) {pool_coin_amount / (10 ** 12)}")
self.log.info(f"Total amount to distribute: {amount_to_distribute / (10 ** 12)}")
async with self.store.lock:
# Get the points of each farmer, as well as payout instructions. Here a chia address is used,
# but other blockchain addresses can also be used.
points_and_ph: List[
Tuple[uint64, bytes]
] = await self.store.get_farmer_points_and_payout_instructions()
total_points = sum([pt for (pt, ph) in points_and_ph])
if total_points > 0:
mojo_per_point = floor(amount_to_distribute / total_points)
self.log.info(f"Paying out {mojo_per_point} mojo / point")
additions_sub_list: List[Dict] = [
{"puzzle_hash": self.pool_fee_puzzle_hash, "amount": pool_coin_amount}
]
for points, ph in points_and_ph:
if points > 0:
additions_sub_list.append({"puzzle_hash": ph, "amount": points * mojo_per_point})
if len(additions_sub_list) == self.max_additions_per_transaction:
await self.pending_payments.put(additions_sub_list.copy())
self.log.info(f"Will make payments: {additions_sub_list}")
additions_sub_list = []
if len(additions_sub_list) > 0:
self.log.info(f"Will make payments: {additions_sub_list}")
await self.pending_payments.put(additions_sub_list.copy())
# Subtract the points from each farmer
await self.store.clear_farmer_points()
else:
self.log.info(f"No points for any farmer. Waiting {self.payment_interval}")
await asyncio.sleep(self.payment_interval)
except asyncio.CancelledError:
self.log.info("Cancelled create_payments_loop, closing")
return
except Exception as e:
error_stack = traceback.format_exc()
self.log.error(f"Unexpected error in create_payments_loop: {e} {error_stack}")
await asyncio.sleep(self.payment_interval)
async def submit_payment_loop(self):
while True:
try:
peak_height = self.blockchain_state["peak"].height
await self.wallet_rpc_client.log_in_and_skip(fingerprint=self.wallet_fingerprint)
if not self.blockchain_state["sync"]["synced"] or not self.wallet_synced:
self.log.warning("Waiting for wallet sync")
await asyncio.sleep(60)
continue
payment_targets = await self.pending_payments.get()
assert len(payment_targets) > 0
self.log.info(f"Submitting a payment: {payment_targets}")
# TODO(pool): make sure you have enough to pay the blockchain fee, this will be taken out of the pool
# fee itself. Alternatively you can set it to 0 and wait longer
# blockchain_fee = 0.00001 * (10 ** 12) * len(payment_targets)
blockchain_fee: uint64 = uint64(0)
try:
transaction: TransactionRecord = await self.wallet_rpc_client.send_transaction_multi(
self.wallet_id, payment_targets, fee=blockchain_fee
)
except ValueError as e:
self.log.error(f"Error making payment: {e}")
await asyncio.sleep(10)
await self.pending_payments.put(payment_targets)
continue
self.log.info(f"Transaction: {transaction}")
while (
not transaction.confirmed
or not (peak_height - transaction.confirmed_at_height) > self.confirmation_security_threshold
):
transaction = await self.wallet_rpc_client.get_transaction(self.wallet_id, transaction.name)
peak_height = self.blockchain_state["peak"].height
self.log.info(
f"Waiting for transaction to obtain {self.confirmation_security_threshold} confirmations"
)
if not transaction.confirmed:
self.log.info(f"Not confirmed. In mempool? {transaction.is_in_mempool()}")
else:
self.log.info(f"Confirmations: {peak_height - transaction.confirmed_at_height}")
await asyncio.sleep(10)
# TODO(pool): persist in DB
self.log.info(f"Successfully confirmed payments {payment_targets}")
except asyncio.CancelledError:
self.log.info("Cancelled submit_payment_loop, closing")
return
except Exception as e:
# TODO(pool): retry transaction if failed
self.log.error(f"Unexpected error in submit_payment_loop: {e}")
await asyncio.sleep(60)
async def confirm_partials_loop(self):
"""
Pulls things from the queue of partials one at a time, and adjusts balances.
"""
while True:
try:
# The points are based on the difficulty at the time of partial submission, not at the time of
# confirmation
partial, time_received, points_received = await self.pending_point_partials.get()
# Wait a few minutes to check if partial is still valid in the blockchain (no reorgs)
await asyncio.sleep((max(0, time_received + self.partial_confirmation_delay - time.time() - 5)))
# Starts a task to check the remaining things for this partial and optionally update points
asyncio.create_task(self.check_and_confirm_partial(partial, points_received))
except asyncio.CancelledError:
self.log.info("Cancelled confirm partials loop, closing")
return
except Exception as e:
self.log.error(f"Unexpected error: {e}")
async def check_and_confirm_partial(self, partial: PostPartialRequest, points_received: uint64) -> None:
try:
# TODO(pool): these lookups to the full node are not efficient and can be cached, especially for
# scaling to many users
if partial.payload.end_of_sub_slot:
response = await self.node_rpc_client.get_recent_signage_point_or_eos(None, partial.payload.sp_hash)
if response is None or response["reverted"]:
self.log.info(f"Partial EOS reverted: {partial.payload.sp_hash}")
return
else:
response = await self.node_rpc_client.get_recent_signage_point_or_eos(partial.payload.sp_hash, None)
if response is None or response["reverted"]:
self.log.info(f"Partial SP reverted: {partial.payload.sp_hash}")
return
# Now we know that the partial came on time, but also that the signage point / EOS is still in the
# blockchain. We need to check for double submissions.
pos_hash = partial.payload.proof_of_space.get_hash()
if self.recent_points_added.get(pos_hash):
self.log.info(f"Double signage point submitted for proof: {partial.payload}")
return
self.recent_points_added.put(pos_hash, uint64(1))
# Now we need to check to see that the singleton in the blockchain is still assigned to this pool
singleton_state_tuple: Optional[
Tuple[CoinSpend, PoolState, bool]
] = await self.get_and_validate_singleton_state(partial.payload.launcher_id)
if singleton_state_tuple is None:
self.log.info(f"Invalid singleton {partial.payload.launcher_id}")
return
_, _, is_member = singleton_state_tuple
if not is_member:
self.log.info(f"Singleton is not assigned to this pool")
return
async with self.store.lock:
farmer_record: Optional[FarmerRecord] = await self.store.get_farmer_record(partial.payload.launcher_id)
assert (
partial.payload.proof_of_space.pool_contract_puzzle_hash == farmer_record.p2_singleton_puzzle_hash
)
if farmer_record.is_pool_member:
await self.store.add_partial(partial.payload.launcher_id, uint64(int(time.time())), points_received)
self.log.info(
f"Farmer {farmer_record.launcher_id} updated points to: "
f"{farmer_record.points + points_received}"
)
except Exception as e:
error_stack = traceback.format_exc()
self.log.error(f"Exception in confirming partial: {e} {error_stack}")
async def add_farmer(self, request: PostFarmerRequest, metadata: RequestMetadata) -> Dict:
async with self.store.lock:
farmer_record: Optional[FarmerRecord] = await self.store.get_farmer_record(request.payload.launcher_id)
if farmer_record is not None:
return error_dict(
PoolErrorCode.FARMER_ALREADY_KNOWN,
f"Farmer with launcher_id {request.payload.launcher_id} already known.",
)
singleton_state_tuple: Optional[
Tuple[CoinSpend, PoolState, bool]
] = await self.get_and_validate_singleton_state(request.payload.launcher_id)
if singleton_state_tuple is None:
return error_dict(PoolErrorCode.INVALID_SINGLETON, f"Invalid singleton {request.payload.launcher_id}")
last_spend, last_state, is_member = singleton_state_tuple
if is_member is None:
return error_dict(PoolErrorCode.INVALID_SINGLETON, f"Singleton is not assigned to this pool")
if (
request.payload.suggested_difficulty is None
or request.payload.suggested_difficulty < self.min_difficulty
):
difficulty: uint64 = self.default_difficulty
else:
difficulty = request.payload.suggested_difficulty
if len(hexstr_to_bytes(request.payload.payout_instructions)) != 32:
return error_dict(
PoolErrorCode.INVALID_PAYOUT_INSTRUCTIONS,
f"Payout instructions must be an xch address for this pool.",
)
if not AugSchemeMPL.verify(last_state.owner_pubkey, request.payload.get_hash(), request.signature):
return error_dict(PoolErrorCode.INVALID_SIGNATURE, f"Invalid signature")
launcher_coin: Optional[CoinRecord] = await self.node_rpc_client.get_coin_record_by_name(
request.payload.launcher_id
)
assert launcher_coin is not None and launcher_coin.spent
launcher_solution: Optional[CoinSpend] = await get_coin_spend(self.node_rpc_client, launcher_coin)
delay_time, delay_puzzle_hash = get_delayed_puz_info_from_launcher_spend(launcher_solution)
if delay_time < 3600:
return error_dict(PoolErrorCode.DELAY_TIME_TOO_SHORT, f"Delay time too short, must be at least 1 hour")
p2_singleton_puzzle_hash = launcher_id_to_p2_puzzle_hash(
request.payload.launcher_id, delay_time, delay_puzzle_hash
)
farmer_record = FarmerRecord(
request.payload.launcher_id,
p2_singleton_puzzle_hash,
delay_time,
delay_puzzle_hash,
request.payload.authentication_public_key,
last_spend,
last_state,
uint64(0),
difficulty,
request.payload.payout_instructions,
True,
)
self.scan_p2_singleton_puzzle_hashes.add(p2_singleton_puzzle_hash)
await self.store.add_farmer_record(farmer_record, metadata)
return PostFarmerResponse(self.welcome_message).to_json_dict()
async def update_farmer(self, request: PutFarmerRequest, metadata: RequestMetadata) -> Dict:
launcher_id = request.payload.launcher_id
# First check if this launcher_id is currently blocked for farmer updates, if so there is no reason to validate
# all the stuff below
if launcher_id in self.farmer_update_blocked:
return error_dict(PoolErrorCode.REQUEST_FAILED, f"Cannot update farmer yet.")
farmer_record: Optional[FarmerRecord] = await self.store.get_farmer_record(launcher_id)
self.log.info(f"Get_farmer_TAG: {launcher_id}")
if farmer_record is None:
return error_dict(PoolErrorCode.FARMER_NOT_KNOWN, f"Farmer with launcher_id {launcher_id} not known.")
singleton_state_tuple: Optional[
Tuple[CoinSpend, PoolState, bool]
] = await self.get_and_validate_singleton_state(launcher_id)
if singleton_state_tuple is None:
return error_dict(PoolErrorCode.INVALID_SINGLETON, f"Invalid singleton {request.payload.launcher_id}")
last_spend, last_state, is_member = singleton_state_tuple
if is_member is None:
return error_dict(PoolErrorCode.INVALID_SINGLETON, f"Singleton is not assigned to this pool")
if not AugSchemeMPL.verify(last_state.owner_pubkey, request.payload.get_hash(), request.signature):
return error_dict(PoolErrorCode.INVALID_SIGNATURE, f"Invalid signature")
farmer_dict = farmer_record.to_json_dict()
response_dict = {}
if request.payload.authentication_public_key is not None:
is_new_value = farmer_record.authentication_public_key != request.payload.authentication_public_key
response_dict["authentication_public_key"] = is_new_value
if is_new_value:
farmer_dict["authentication_public_key"] = request.payload.authentication_public_key
if request.payload.payout_instructions is not None:
is_new_value = (
farmer_record.payout_instructions != request.payload.payout_instructions
and request.payload.payout_instructions is not None
and len(hexstr_to_bytes(request.payload.payout_instructions)) == 32
)
response_dict["payout_instructions"] = is_new_value
if is_new_value:
farmer_dict["payout_instructions"] = request.payload.payout_instructions
if request.payload.suggested_difficulty is not None:
is_new_value = (
farmer_record.difficulty != request.payload.suggested_difficulty
and request.payload.suggested_difficulty is not None
and request.payload.suggested_difficulty >= self.min_difficulty
)
response_dict["suggested_difficulty"] = is_new_value
if is_new_value:
farmer_dict["difficulty"] = request.payload.suggested_difficulty
async def update_farmer_later():
await asyncio.sleep(self.farmer_update_cooldown_seconds)
await self.store.add_farmer_record(FarmerRecord.from_json_dict(farmer_dict), metadata)
self.farmer_update_blocked.remove(launcher_id)
self.log.info(f"Updated farmer: {response_dict}")
self.log.info(f"Update_farmer_TAG: {launcher_id}")
self.farmer_update_blocked.add(launcher_id)
asyncio.create_task(update_farmer_later())
# TODO Fix chia-blockchain's Streamable implementation to support Optional in `from_json_dict`, then use
# PutFarmerResponse here and in the trace up.
return response_dict
async def get_and_validate_singleton_state(
self, launcher_id: bytes32
) -> Optional[Tuple[CoinSpend, PoolState, bool]]:
"""
:return: the state of the singleton, if it currently exists in the blockchain, and if it is assigned to
our pool, with the correct parameters. Otherwise, None. Note that this state must be buried (recent state
changes are not returned)
"""
singleton_task: Optional[Task] = self.follow_singleton_tasks.get(launcher_id, None)
remove_after = False
farmer_rec = None
if singleton_task is None or singleton_task.done():
farmer_rec: Optional[FarmerRecord] = await self.store.get_farmer_record(launcher_id)
singleton_task = asyncio.create_task(
get_singleton_state(
self.node_rpc_client,
launcher_id,
farmer_rec,
self.blockchain_state["peak"].height,
self.confirmation_security_threshold,
self.constants.GENESIS_CHALLENGE,
)
)
self.follow_singleton_tasks[launcher_id] = singleton_task
remove_after = True
optional_result: Optional[Tuple[CoinSpend, PoolState, PoolState]] = await singleton_task
if remove_after and launcher_id in self.follow_singleton_tasks:
await self.follow_singleton_tasks.pop(launcher_id)
if optional_result is None:
return None
buried_singleton_tip, buried_singleton_tip_state, singleton_tip_state = optional_result
# Validate state of the singleton
is_pool_member = True
if singleton_tip_state.target_puzzle_hash != self.default_target_puzzle_hash:
self.log.info(
f"Wrong target puzzle hash: {singleton_tip_state.target_puzzle_hash} for launcher_id {launcher_id}"
)
is_pool_member = False
elif singleton_tip_state.relative_lock_height != self.relative_lock_height:
self.log.info(
f"Wrong relative lock height: {singleton_tip_state.relative_lock_height} for launcher_id {launcher_id}"
)
is_pool_member = False
elif singleton_tip_state.version != POOL_PROTOCOL_VERSION:
self.log.info(f"Wrong version {singleton_tip_state.version} for launcher_id {launcher_id}")
is_pool_member = False
elif singleton_tip_state.state == PoolSingletonState.SELF_POOLING.value:
self.log.info(f"Invalid singleton state {singleton_tip_state.state} for launcher_id {launcher_id}")
is_pool_member = False
elif singleton_tip_state.state == PoolSingletonState.LEAVING_POOL.value:
coin_record: Optional[CoinRecord] = await self.node_rpc_client.get_coin_record_by_name(
buried_singleton_tip.coin.name()
)
assert coin_record is not None
if self.blockchain_state["peak"].height - coin_record.confirmed_block_index > self.relative_lock_height:
self.log.info(f"launcher_id {launcher_id} got enough confirmations to leave the pool")
is_pool_member = False
self.log.info(f"Is {launcher_id} pool member: {is_pool_member}")
if farmer_rec is not None and (
farmer_rec.singleton_tip != buried_singleton_tip
or farmer_rec.singleton_tip_state != buried_singleton_tip_state
):
# This means the singleton has been changed in the blockchain (either by us or someone else). We
# still keep track of this singleton if the farmer has changed to a different pool, in case they
# switch back.
self.log.info(f"Updating singleton state for {launcher_id}")
await self.store.update_singleton(
launcher_id, buried_singleton_tip, buried_singleton_tip_state, is_pool_member
)
return buried_singleton_tip, buried_singleton_tip_state, is_pool_member
async def process_partial(
self,
partial: PostPartialRequest,
farmer_record: FarmerRecord,
time_received_partial: uint64,
) -> Dict:
# Validate signatures
message: bytes32 = partial.payload.get_hash()
pk1: G1Element = partial.payload.proof_of_space.plot_public_key
pk2: G1Element = farmer_record.authentication_public_key
valid_sig = AugSchemeMPL.aggregate_verify([pk1, pk2], [message, message], partial.aggregate_signature)
if not valid_sig:
return error_dict(
PoolErrorCode.INVALID_SIGNATURE,
f"The aggregate signature is invalid {partial.aggregate_signature}",
)
if partial.payload.proof_of_space.pool_contract_puzzle_hash != farmer_record.p2_singleton_puzzle_hash:
return error_dict(
PoolErrorCode.INVALID_P2_SINGLETON_PUZZLE_HASH,
f"Invalid pool contract puzzle hash {partial.payload.proof_of_space.pool_contract_puzzle_hash}",
)
async def get_signage_point_or_eos():
if partial.payload.end_of_sub_slot:
return await self.node_rpc_client.get_recent_signage_point_or_eos(None, partial.payload.sp_hash)
else:
return await self.node_rpc_client.get_recent_signage_point_or_eos(partial.payload.sp_hash, None)
response = await get_signage_point_or_eos()
if response is None:
# Try again after 10 seconds in case we just didn't yet receive the signage point
await asyncio.sleep(10)
response = await get_signage_point_or_eos()
if response is None or response["reverted"]:
return error_dict(
PoolErrorCode.NOT_FOUND, f"Did not find signage point or EOS {partial.payload.sp_hash}, {response}"
)
node_time_received_sp = response["time_received"]
signage_point: Optional[SignagePoint] = response.get("signage_point", None)
end_of_sub_slot: Optional[EndOfSubSlotBundle] = response.get("eos", None)
if time_received_partial - node_time_received_sp > self.partial_time_limit:
return error_dict(
PoolErrorCode.TOO_LATE,
f"Received partial in {time_received_partial - node_time_received_sp}. "
f"Make sure your proof of space lookups are fast, and network connectivity is good."
f"Response must happen in less than {self.partial_time_limit} seconds. NAS or network"
f" farming can be an issue",
)
# Validate the proof
if signage_point is not None:
challenge_hash: bytes32 = signage_point.cc_vdf.challenge
else:
challenge_hash = end_of_sub_slot.challenge_chain.get_hash()
quality_string: Optional[bytes32] = partial.payload.proof_of_space.verify_and_get_quality_string(
self.constants, challenge_hash, partial.payload.sp_hash
)
if quality_string is None:
return error_dict(PoolErrorCode.INVALID_PROOF, f"Invalid proof of space {partial.payload.sp_hash}")
current_difficulty = farmer_record.difficulty
required_iters: uint64 = calculate_iterations_quality(
self.constants.DIFFICULTY_CONSTANT_FACTOR,
quality_string,
partial.payload.proof_of_space.size,
current_difficulty,
partial.payload.sp_hash,
)
if required_iters >= self.iters_limit:
return error_dict(
PoolErrorCode.PROOF_NOT_GOOD_ENOUGH,
f"Proof of space has required iters {required_iters}, too high for difficulty " f"{current_difficulty}",
)
await self.pending_point_partials.put((partial, time_received_partial, current_difficulty))
async with self.store.lock:
# Obtains the new record in case we just updated difficulty
farmer_record: Optional[FarmerRecord] = await self.store.get_farmer_record(partial.payload.launcher_id)
if farmer_record is not None:
current_difficulty = farmer_record.difficulty
# Decide whether to update the difficulty
recent_partials = await self.store.get_recent_partials(
partial.payload.launcher_id, self.number_of_partials_target
)
# Only update the difficulty if we meet certain conditions
new_difficulty: uint64 = self.difficulty_function(
recent_partials,
int(self.number_of_partials_target),
int(self.time_target),
current_difficulty,
time_received_partial,
self.min_difficulty,
)
if current_difficulty != new_difficulty:
await self.store.update_difficulty(partial.payload.launcher_id, new_difficulty)
current_difficulty = new_difficulty
return PostPartialResponse(current_difficulty).to_json_dict()
| 50.370787 | 120 | 0.641379 |
0d508969f250b4a752a67b2490d2c3772aca8399 | 395 | py | Python | setup.py | rvinas/nnn | 50bc78e745ef7b085b46c3138b4cd2ed9472f4e5 | [
"Apache-2.0"
] | 7 | 2017-09-11T19:25:26.000Z | 2021-12-17T15:01:25.000Z | setup.py | rvinas/python_nn | 50bc78e745ef7b085b46c3138b4cd2ed9472f4e5 | [
"Apache-2.0"
] | null | null | null | setup.py | rvinas/python_nn | 50bc78e745ef7b085b46c3138b4cd2ed9472f4e5 | [
"Apache-2.0"
] | 3 | 2018-03-26T23:55:25.000Z | 2019-04-05T10:44:03.000Z | from setuptools import setup
from setuptools import find_packages
import pip
setup(name='nnn',
version='0.0.1',
description='Numpy Neural Network',
author='Ramon Viñas',
author_email='rvinast@gmail.com',
url='https://github.com/rvinas/nnn',
license='Apache',
packages=find_packages())
pip.main(['install', 'numpy'])
pip.main(['install', 'matplotlib']) | 26.333333 | 42 | 0.668354 |
309c2a951f3fea7d7631ca7794433765aaab41dc | 3,365 | py | Python | src/poetry/publishing/publisher.py | danieleades/poetry | 9957f6faa3c311533456104bbf35aadce420c32f | [
"MIT"
] | 1 | 2020-07-31T10:42:34.000Z | 2020-07-31T10:42:34.000Z | src/poetry/publishing/publisher.py | danieleades/poetry | 9957f6faa3c311533456104bbf35aadce420c32f | [
"MIT"
] | 1 | 2021-12-31T19:44:26.000Z | 2022-03-08T20:52:13.000Z | src/poetry/publishing/publisher.py | Anselmoo/poetry | f6022eade7485a3b017ef0c8060dffed12e3cdb2 | [
"MIT"
] | 2 | 2020-12-07T04:26:21.000Z | 2021-09-25T21:46:36.000Z | import logging
from typing import TYPE_CHECKING
from typing import List
from typing import Optional
from typing import Union
from poetry.publishing.uploader import Uploader
from poetry.utils.authenticator import Authenticator
from poetry.utils.helpers import get_cert
from poetry.utils.helpers import get_client_cert
if TYPE_CHECKING:
from pathlib import Path
from cleo.io import BufferedIO
from cleo.io import ConsoleIO
from poetry.poetry import Poetry
logger = logging.getLogger(__name__)
class Publisher:
"""
Registers and publishes packages to remote repositories.
"""
def __init__(self, poetry: "Poetry", io: Union["BufferedIO", "ConsoleIO"]) -> None:
self._poetry = poetry
self._package = poetry.package
self._io = io
self._uploader = Uploader(poetry, io)
self._authenticator = Authenticator(poetry.config, self._io)
@property
def files(self) -> List["Path"]:
return self._uploader.files
def publish(
self,
repository_name: Optional[str],
username: Optional[str],
password: Optional[str],
cert: Optional["Path"] = None,
client_cert: Optional["Path"] = None,
dry_run: bool = False,
) -> None:
if not repository_name:
url = "https://upload.pypi.org/legacy/"
repository_name = "pypi"
else:
# Retrieving config information
url = self._poetry.config.get(f"repositories.{repository_name}.url")
if url is None:
raise RuntimeError(f"Repository {repository_name} is not defined")
if not (username and password):
# Check if we have a token first
token = self._authenticator.get_pypi_token(repository_name)
if token:
logger.debug(f"Found an API token for {repository_name}.")
username = "__token__"
password = token
else:
auth = self._authenticator.get_http_auth(repository_name)
if auth:
logger.debug(
f"Found authentication information for {repository_name}."
)
username = auth["username"]
password = auth["password"]
resolved_client_cert = client_cert or get_client_cert(
self._poetry.config, repository_name
)
# Requesting missing credentials but only if there is not a client cert defined.
if not resolved_client_cert:
if username is None:
username = self._io.ask("Username:")
# skip password input if no username is provided, assume unauthenticated
if username and password is None:
password = self._io.ask_hidden("Password:")
self._uploader.auth(username, password)
if repository_name == "pypi":
repository_name = "PyPI"
self._io.write_line(
f"Publishing <c1>{self._package.pretty_name}</c1> (<c2>{self._package.pretty_version}</c2>) "
f"to <info>{repository_name}</info>"
)
self._uploader.upload(
url,
cert=cert or get_cert(self._poetry.config, repository_name),
client_cert=resolved_client_cert,
dry_run=dry_run,
)
| 32.990196 | 105 | 0.611887 |
7e9135e736b7f39abd3440d7dd3b56a62117333d | 2,759 | py | Python | rnn.py | apaszke/odsc-2020-workshop | 226e1b49824493a35686d112c7a4fb592b80f092 | [
"BSD-3-Clause"
] | 3 | 2020-04-16T14:42:06.000Z | 2020-09-17T15:51:55.000Z | rnn.py | apaszke/odsc-2020-workshop | 226e1b49824493a35686d112c7a4fb592b80f092 | [
"BSD-3-Clause"
] | null | null | null | rnn.py | apaszke/odsc-2020-workshop | 226e1b49824493a35686d112c7a4fb592b80f092 | [
"BSD-3-Clause"
] | 2 | 2020-04-16T14:22:46.000Z | 2021-03-30T14:45:25.000Z | import time
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from typing import Tuple
from torch import Tensor
LSTMState = Tuple[Tensor, Tensor]
class LSTMCell(nn.Module):
def __init__(self, input_size, hidden_size):
super().__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.weight_ih = nn.Parameter(torch.empty(4 * hidden_size, input_size))
self.weight_hh = nn.Parameter(torch.empty(4 * hidden_size, hidden_size))
self.bias = nn.Parameter(torch.empty(4 * hidden_size))
self.reset_parameters()
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.hidden_size)
nn.init.uniform_(self.weight_ih, -stdv, stdv)
nn.init.uniform_(self.weight_hh, -stdv, stdv)
nn.init.constant_(self.bias, 0)
def forward(self, input, state: LSTMState):
hx, cx = state
gates = (torch.mm(input, self.weight_ih.t()) +
torch.mm(hx, self.weight_hh.t()) + self.bias)
ingate, forgetgate, cellgate, outgate = gates.chunk(4, 1)
ingate = torch.sigmoid(ingate)
forgetgate = torch.sigmoid(forgetgate)
cellgate = torch.tanh(cellgate)
outgate = torch.sigmoid(outgate)
cy = (forgetgate * cx) + (ingate * cellgate)
hy = outgate * torch.tanh(cy)
return hy, cy
class LSTMLayer(nn.Module):
def __init__(self, *cell_args):
super().__init__()
self.cell = LSTMCell(*cell_args)
def forward(self, input, state: LSTMState):
outputs = []
for i in input.unbind(0):
state = self.cell(i, state)
outputs.append(state[0])
return torch.stack(outputs), state
class LSTM(nn.Module):
def __init__(self, input_size, hidden_size, num_layers, dropout):
super().__init__()
assert num_layers >= 1
self.layers = nn.ModuleList([LSTMLayer(input_size, hidden_size)] +
[LSTMLayer(hidden_size, hidden_size)
for _ in range(num_layers - 1)])
self.dropout = nn.Dropout(dropout)
self.num_layers = num_layers
def forward(self, input, states: LSTMState):
output_states: List[LSTMState] = []
output = input
for i, layer in enumerate(self.layers):
output, out_state = layer(output, (states[0][i], states[1][i]))
# Apply the dropout layer except the last layer
if i < self.num_layers - 1:
output = self.dropout(output)
output_states.append(out_state)
return output, (torch.stack([s[0] for s in output_states]),
torch.stack([s[1] for s in output_states]))
| 34.4875 | 80 | 0.609641 |
aecf22e06ffb0c74dfae571860e5205199352ceb | 21,897 | py | Python | epinet_fun/func_generate_traindata.py | MFerrugem/epinet | c01151660b28683a18060222a9a5ff0e29a06f05 | [
"MIT"
] | 79 | 2018-04-05T07:19:19.000Z | 2022-03-23T06:37:23.000Z | epinet_fun/func_generate_traindata.py | MFerrugem/epinet | c01151660b28683a18060222a9a5ff0e29a06f05 | [
"MIT"
] | 17 | 2018-07-06T06:23:01.000Z | 2022-03-20T13:32:15.000Z | epinet_fun/func_generate_traindata.py | MFerrugem/epinet | c01151660b28683a18060222a9a5ff0e29a06f05 | [
"MIT"
] | 38 | 2018-04-11T02:08:41.000Z | 2022-03-30T14:09:05.000Z | # -*- coding: utf-8 -*-
"""
Created on Thu Nov 15 10:32:22 2018
@author: Shin2018
"""
import numpy as np
def generate_traindata_for_train(traindata_all,traindata_label,input_size,label_size,batch_size,Setting02_AngualrViews,boolmask_img4,boolmask_img6,boolmask_img15):
"""
input: traindata_all (16x512x512x9x9x3) uint8
traindata_label (16x512x512x9x9) float32
input_size 23~ int
label_size 1~ int
batch_size 16 int
Setting02_AngualrViews [0,1,2,3,4,5,6,7,8] for 9x9
boolmask_img4 (512x512) bool // reflection mask for images[4]
boolmask_img6 (512x512) bool // reflection mask for images[6]
boolmask_img15 (512x512) bool // reflection mask for images[15]
Generate traindata using LF image and disparity map
by randomly chosen variables.
1. gray image: random R,G,B --> R*img_R + G*img_G + B*imgB
2. patch-wise learning: random x,y --> LFimage[x:x+size1,y:y+size2]
3. scale augmentation: scale 1,2,3 --> ex> LFimage[x:x+2*size1:2,y:y+2*size2:2]
output: traindata_batch_90d (batch_size x input_size x input_size x len(Setting02_AngualrViews)) float32
traindata_batch_0d (batch_size x input_size x input_size x len(Setting02_AngualrViews)) float32
traindata_batch_45d (batch_size x input_size x input_size x len(Setting02_AngualrViews)) float32
traindata_batch_m45d (batch_size x input_size x input_size x len(Setting02_AngualrViews)) float32
traindata_batch_label (batch_size x label_size x label_size ) float32
"""
""" initialize image_stack & label """
traindata_batch_90d=np.zeros((batch_size,input_size,input_size,len(Setting02_AngualrViews)),dtype=np.float32)
traindata_batch_0d=np.zeros((batch_size,input_size,input_size,len(Setting02_AngualrViews)),dtype=np.float32)
traindata_batch_45d=np.zeros((batch_size,input_size,input_size,len(Setting02_AngualrViews)),dtype=np.float32)
traindata_batch_m45d=np.zeros((batch_size,input_size,input_size,len(Setting02_AngualrViews)),dtype=np.float32)
traindata_batch_label=np.zeros((batch_size,label_size,label_size))
""" inital variable """
start1=Setting02_AngualrViews[0]
end1=Setting02_AngualrViews[-1]
crop_half1=int(0.5*(input_size-label_size))
""" Generate image stacks"""
for ii in range(0,batch_size):
sum_diff=0
valid=0
while( sum_diff<0.01*input_size*input_size or valid<1 ):
"""//Variable for gray conversion//"""
rand_3color=0.05+np.random.rand(3)
rand_3color=rand_3color/np.sum(rand_3color)
R=rand_3color[0]
G=rand_3color[1]
B=rand_3color[2]
"""
We use totally 16 LF images,(0 to 15)
Since some images(4,6,15) have a reflection region,
We decrease frequency of occurrence for them.
Details in our epinet paper.
"""
aa_arr =np.array([0,1,2,3,5,7,8,9,10,11,12,13,14,
0,1,2,3,5,7,8,9,10,11,12,13,14,
0,1,2,3,5,7,8,9,10,11,12,13,14,
0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15])
image_id=np.random.choice(aa_arr)
"""
//Shift augmentation for 7x7, 5x5 viewpoints,.. //
Details in our epinet paper.
"""
if(len(Setting02_AngualrViews)==7):
ix_rd = np.random.randint(0,3)-1
iy_rd = np.random.randint(0,3)-1
if(len(Setting02_AngualrViews)==9):
ix_rd = 0
iy_rd = 0
kk=np.random.randint(17)
if(kk<8):
scale=1
elif(kk<14):
scale=2
elif(kk<17):
scale=3
idx_start = np.random.randint(0,512-scale*input_size)
idy_start = np.random.randint(0,512-scale*input_size)
valid=1
"""
boolmask: reflection masks for images(4,6,15)
"""
if(image_id==4 or 6 or 15):
if(image_id==4):
a_tmp=boolmask_img4
if(image_id==6):
a_tmp=boolmask_img6
if(image_id==15):
a_tmp=boolmask_img15
if( np.sum(a_tmp[idx_start+scale*crop_half1: idx_start+scale*crop_half1+scale*label_size:scale,
idy_start+scale*crop_half1: idy_start+scale*crop_half1+scale*label_size:scale])>0
or np.sum(a_tmp[idx_start: idx_start+scale*input_size:scale,
idy_start: idy_start+scale*input_size:scale])>0 ):
valid=0
if(valid>0):
seq0to8=np.array(Setting02_AngualrViews)+ix_rd
seq8to0=np.array(Setting02_AngualrViews[::-1])+iy_rd
image_center=(1/255)*np.squeeze(R*traindata_all[image_id, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, 4+ix_rd, 4+iy_rd,0].astype('float32')+
G*traindata_all[image_id, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, 4+ix_rd, 4+iy_rd,1].astype('float32')+
B*traindata_all[image_id, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, 4+ix_rd, 4+iy_rd,2].astype('float32'))
sum_diff=np.sum(np.abs(image_center-np.squeeze(image_center[int(0.5*input_size),int(0.5*input_size)])))
'''
Four image stacks are selected from LF full(512x512) images.
gray-scaled, cropped and scaled
traindata_batch_0d <-- RGBtoGray( traindata_all[random_index, scaled_input_size, scaled_input_size, 4(center), 0to8 ] )
traindata_batch_90d <-- RGBtoGray( traindata_all[random_index, scaled_input_size, scaled_input_size, 8to0, 4(center) ] )
traindata_batch_45d <-- RGBtoGray( traindata_all[random_index, scaled_input_size, scaled_input_size, 8to0, 0to8 ] )
traindata_batch_m45d <-- RGBtoGray( traindata_all[random_index, scaled_input_size, scaled_input_size, 0to8, 0to8 ] )
'''
traindata_batch_0d[ii,:,:,:]=np.squeeze(R*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, 4+ix_rd, seq0to8.tolist(),0].astype('float32')+
G*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, 4+ix_rd, seq0to8.tolist(),1].astype('float32')+
B*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, 4+ix_rd, seq0to8.tolist(),2].astype('float32'))
traindata_batch_90d[ii,:,:,:]=np.squeeze(R*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, seq8to0.tolist(), 4+iy_rd,0].astype('float32')+
G*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, seq8to0.tolist(), 4+iy_rd,1].astype('float32')+
B*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, seq8to0.tolist(), 4+iy_rd,2].astype('float32'))
for kkk in range(start1,end1+1):
traindata_batch_45d[ii,:,:,kkk-start1]=np.squeeze(R*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, (8)-kkk+ix_rd, kkk+iy_rd,0].astype('float32')+
G*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, (8)-kkk+ix_rd, kkk+iy_rd,1].astype('float32')+
B*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, (8)-kkk+ix_rd, kkk+iy_rd,2].astype('float32'))
traindata_batch_m45d[ii,:,:,kkk-start1]=np.squeeze(R*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, kkk+ix_rd, kkk+iy_rd,0].astype('float32')+
G*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, kkk+ix_rd, kkk+iy_rd,1].astype('float32')+
B*traindata_all[image_id:image_id+1, idx_start: idx_start+scale*input_size:scale, idy_start: idy_start+scale*input_size:scale, kkk+ix_rd, kkk+iy_rd,2].astype('float32'))
'''
traindata_batch_label <-- scale_factor*traindata_label[random_index, scaled_label_size, scaled_label_size]
'''
if(len(traindata_label.shape)==5):
traindata_batch_label[ii,:,:]=(1.0/scale)*traindata_label[image_id, idx_start+scale*crop_half1: idx_start+scale*crop_half1+scale*label_size:scale,
idy_start+scale*crop_half1: idy_start+scale*crop_half1+scale*label_size:scale,4+ix_rd,4+iy_rd]
else:
traindata_batch_label[ii,:,:]=(1.0/scale)*traindata_label[image_id, idx_start+scale*crop_half1: idx_start+scale*crop_half1+scale*label_size:scale,
idy_start+scale*crop_half1: idy_start+scale*crop_half1+scale*label_size:scale]
traindata_batch_90d=np.float32((1/255)*traindata_batch_90d)
traindata_batch_0d =np.float32((1/255)*traindata_batch_0d)
traindata_batch_45d=np.float32((1/255)*traindata_batch_45d)
traindata_batch_m45d=np.float32((1/255)*traindata_batch_m45d)
return traindata_batch_90d,traindata_batch_0d,traindata_batch_45d,traindata_batch_m45d, traindata_batch_label #,usage_check
def data_augmentation_for_train(traindata_batch_90d, traindata_batch_0d,
traindata_batch_45d,traindata_batch_m45d,
traindata_label_batchNxN, batch_size):
"""
For Data augmentation
(rotation, transpose and gamma)
"""
for batch_i in range(batch_size):
gray_rand=0.4*np.random.rand()+0.8
traindata_batch_90d[batch_i,:,:,:]=pow(traindata_batch_90d[batch_i,:,:,:],gray_rand)
traindata_batch_0d[batch_i,:,:,:]=pow(traindata_batch_0d[batch_i,:,:,:],gray_rand)
traindata_batch_45d[batch_i,:,:,:]=pow(traindata_batch_45d[batch_i,:,:,:],gray_rand)
traindata_batch_m45d[batch_i,:,:,:]=pow(traindata_batch_m45d[batch_i,:,:,:],gray_rand)
rotation_or_transp_rand=np.random.randint(0,5)
if rotation_or_transp_rand==4:
traindata_batch_90d_tmp6=np.copy(np.transpose(np.squeeze(traindata_batch_90d[batch_i,:,:,:]),(1, 0, 2)) )
traindata_batch_0d_tmp6=np.copy(np.transpose(np.squeeze(traindata_batch_0d[batch_i,:,:,:]),(1, 0, 2)) )
traindata_batch_45d_tmp6=np.copy(np.transpose(np.squeeze(traindata_batch_45d[batch_i,:,:,:]),(1, 0, 2)) )
traindata_batch_m45d_tmp6=np.copy(np.transpose(np.squeeze(traindata_batch_m45d[batch_i,:,:,:]),(1, 0, 2)) )
traindata_batch_0d[batch_i,:,:,:]=np.copy(traindata_batch_90d_tmp6[:,:,::-1])
traindata_batch_90d[batch_i,:,:,:]=np.copy(traindata_batch_0d_tmp6[:,:,::-1])
traindata_batch_45d[batch_i,:,:,:]=np.copy(traindata_batch_45d_tmp6[:,:,::-1])
traindata_batch_m45d[batch_i,:,:,:]=np.copy(traindata_batch_m45d_tmp6)#[:,:,::-1])
traindata_label_batchNxN[batch_i,:,:]=np.copy(np.transpose(traindata_label_batchNxN[batch_i,:,:],(1, 0)))
if(rotation_or_transp_rand==1): # 90도
traindata_batch_90d_tmp3=np.copy(np.rot90(traindata_batch_90d[batch_i,:,:,:],1,(0,1)))
traindata_batch_0d_tmp3=np.copy(np.rot90(traindata_batch_0d[batch_i,:,:,:],1,(0,1)))
traindata_batch_45d_tmp3=np.copy(np.rot90(traindata_batch_45d[batch_i,:,:,:],1,(0,1)))
traindata_batch_m45d_tmp3=np.copy(np.rot90(traindata_batch_m45d[batch_i,:,:,:],1,(0,1)))
traindata_batch_90d[batch_i,:,:,:]=traindata_batch_0d_tmp3
traindata_batch_45d[batch_i,:,:,:]=traindata_batch_m45d_tmp3
traindata_batch_0d[batch_i,:,:,:]=traindata_batch_90d_tmp3[:,:,::-1]
traindata_batch_m45d[batch_i,:,:,:]=traindata_batch_45d_tmp3[:,:,::-1]
traindata_label_batchNxN[batch_i,:,:]=np.copy(np.rot90(traindata_label_batchNxN[batch_i,:,:],1,(0,1)))
if(rotation_or_transp_rand==2): # 180도
traindata_batch_90d_tmp4=np.copy(np.rot90(traindata_batch_90d[batch_i,:,:,:],2,(0,1)))
traindata_batch_0d_tmp4=np.copy(np.rot90(traindata_batch_0d[batch_i,:,:,:],2,(0,1)))
traindata_batch_45d_tmp4=np.copy(np.rot90(traindata_batch_45d[batch_i,:,:,:],2,(0,1)))
traindata_batch_m45d_tmp4=np.copy(np.rot90(traindata_batch_m45d[batch_i,:,:,:],2,(0,1)))
traindata_batch_90d[batch_i,:,:,:]=traindata_batch_90d_tmp4[:,:,::-1]
traindata_batch_0d[batch_i,:,:,:]=traindata_batch_0d_tmp4[:,:,::-1]
traindata_batch_45d[batch_i,:,:,:]=traindata_batch_45d_tmp4[:,:,::-1]
traindata_batch_m45d[batch_i,:,:,:]=traindata_batch_m45d_tmp4[:,:,::-1]
traindata_label_batchNxN[batch_i,:,:]=np.copy(np.rot90(traindata_label_batchNxN[batch_i,:,:],2,(0,1)))
if(rotation_or_transp_rand==3): # 270도
traindata_batch_90d_tmp5=np.copy(np.rot90(traindata_batch_90d[batch_i,:,:,:],3,(0,1)))
traindata_batch_0d_tmp5=np.copy(np.rot90(traindata_batch_0d[batch_i,:,:,:],3,(0,1)))
traindata_batch_45d_tmp5=np.copy(np.rot90(traindata_batch_45d[batch_i,:,:,:],3,(0,1)))
traindata_batch_m45d_tmp5=np.copy(np.rot90(traindata_batch_m45d[batch_i,:,:,:],3,(0,1)))
traindata_batch_90d[batch_i,:,:,:]=traindata_batch_0d_tmp5[:,:,::-1]
traindata_batch_0d[batch_i,:,:,:]=traindata_batch_90d_tmp5
traindata_batch_45d[batch_i,:,:,:]=traindata_batch_m45d_tmp5[:,:,::-1]
traindata_batch_m45d[batch_i,:,:,:]=traindata_batch_45d_tmp5
traindata_label_batchNxN[batch_i,:,:]=np.copy(np.rot90(traindata_label_batchNxN[batch_i,:,:],3,(0,1)))
return traindata_batch_90d, traindata_batch_0d,traindata_batch_45d,traindata_batch_m45d, traindata_label_batchNxN
def generate_traindata512(traindata_all,traindata_label,Setting02_AngualrViews):
"""
Generate validation or test set( = full size(512x512) LF images)
input: traindata_all (16x512x512x9x9x3) uint8
traindata_label (16x512x512x9x9) float32
Setting02_AngualrViews [0,1,2,3,4,5,6,7,8] for 9x9
output: traindata_batch_90d (batch_size x 512 x 512 x len(Setting02_AngualrViews)) float32
traindata_batch_0d (batch_size x 512 x 512 x len(Setting02_AngualrViews)) float32
traindata_batch_45d (batch_size x 512 x 512 x len(Setting02_AngualrViews)) float32
traindata_batch_m45d (batch_size x 512 x 512 x len(Setting02_AngualrViews)) float32
traindata_label_batchNxN (batch_size x 512 x 512 ) float32
"""
# else:
input_size=512; label_size=512;
traindata_batch_90d=np.zeros((len(traindata_all),input_size,input_size,len(Setting02_AngualrViews)),dtype=np.float32)
traindata_batch_0d=np.zeros((len(traindata_all),input_size,input_size,len(Setting02_AngualrViews)),dtype=np.float32)
traindata_batch_45d=np.zeros((len(traindata_all),input_size,input_size,len(Setting02_AngualrViews)),dtype=np.float32)
traindata_batch_m45d=np.zeros((len(traindata_all),input_size,input_size,len(Setting02_AngualrViews)),dtype=np.float32)
traindata_label_batchNxN=np.zeros((len(traindata_all),label_size,label_size))
""" inital setting """
### sz = (16, 27, 9, 512, 512)
crop_half1=int(0.5*(input_size-label_size))
start1=Setting02_AngualrViews[0]
end1=Setting02_AngualrViews[-1]
# starttime=time.process_time() 0.375초 정도 걸림. i5 기준
for ii in range(0,len(traindata_all)):
R = 0.299 ### 0,1,2,3 = R, G, B, Gray // 0.299 0.587 0.114
G = 0.587
B = 0.114
image_id = ii
ix_rd = 0
iy_rd = 0
idx_start = 0
idy_start = 0
seq0to8=np.array(Setting02_AngualrViews)+ix_rd
seq8to0=np.array(Setting02_AngualrViews[::-1])+iy_rd
traindata_batch_0d[ii,:,:,:]=np.squeeze(R*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size, idy_start: idy_start+input_size, 4+ix_rd, seq0to8,0].astype('float32')+
G*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size, idy_start: idy_start+input_size, 4+ix_rd, seq0to8,1].astype('float32')+
B*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size, idy_start: idy_start+input_size, 4+ix_rd, seq0to8,2].astype('float32'))
traindata_batch_90d[ii,:,:,:]=np.squeeze(R*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size,idy_start: idy_start+input_size, seq8to0, 4+iy_rd,0].astype('float32')+
G*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size,idy_start: idy_start+input_size, seq8to0, 4+iy_rd,1].astype('float32')+
B*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size,idy_start: idy_start+input_size, seq8to0, 4+iy_rd,2].astype('float32'))
for kkk in range(start1,end1+1):
traindata_batch_45d[ii,:,:,kkk-start1]=np.squeeze(R*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size,idy_start: idy_start+input_size, (8)-kkk+ix_rd, kkk+iy_rd,0].astype('float32')+
G*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size,idy_start: idy_start+input_size, (8)-kkk+ix_rd, kkk+iy_rd,1].astype('float32')+
B*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size,idy_start: idy_start+input_size, (8)-kkk+ix_rd, kkk+iy_rd,2].astype('float32'))
traindata_batch_m45d[ii,:,:,kkk-start1]=np.squeeze(R*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size,idy_start: idy_start+input_size, kkk+ix_rd, kkk+iy_rd,0].astype('float32')+
G*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size,idy_start: idy_start+input_size, kkk+ix_rd, kkk+iy_rd,1].astype('float32')+
B*traindata_all[image_id:image_id+1, idx_start: idx_start+input_size,idy_start: idy_start+input_size, kkk+ix_rd, kkk+iy_rd,2].astype('float32'))
if(len(traindata_all)>=12 and traindata_label.shape[-1]==9):
traindata_label_batchNxN[ii,:,:]=traindata_label[image_id,idx_start+crop_half1: idx_start+crop_half1+label_size,idy_start+crop_half1: idy_start+crop_half1+label_size, 4+ix_rd, 4+iy_rd]
elif(len(traindata_label.shape)==5):
traindata_label_batchNxN[ii,:,:]=traindata_label[image_id ,idx_start+crop_half1: idx_start+crop_half1+label_size,idy_start+crop_half1: idy_start+crop_half1+label_size,0,0]
else:
traindata_label_batchNxN[ii,:,:]=traindata_label[image_id ,idx_start+crop_half1: idx_start+crop_half1+label_size,idy_start+crop_half1: idy_start+crop_half1+label_size]
traindata_batch_90d=np.float32((1/255)*traindata_batch_90d)
traindata_batch_0d =np.float32((1/255)*traindata_batch_0d)
traindata_batch_45d=np.float32((1/255)*traindata_batch_45d)
traindata_batch_m45d=np.float32((1/255)*traindata_batch_m45d)
traindata_batch_90d=np.minimum(np.maximum(traindata_batch_90d,0),1)
traindata_batch_0d=np.minimum(np.maximum(traindata_batch_0d,0),1)
traindata_batch_45d=np.minimum(np.maximum(traindata_batch_45d,0),1)
traindata_batch_m45d=np.minimum(np.maximum(traindata_batch_m45d,0),1)
return traindata_batch_90d,traindata_batch_0d,traindata_batch_45d,traindata_batch_m45d, traindata_label_batchNxN | 63.103746 | 244 | 0.621957 |
73b8fd51708592f2eed3f14dcc6c7b3b62430524 | 791 | py | Python | LeetCodeSolutions/python/203_Remove_Linked_List_Elements.py | ChuanleiGuo/AlgorithmsPlayground | 90b6287b742c8bfd3797540c408d679be2821a40 | [
"MIT"
] | 1 | 2017-03-27T13:38:37.000Z | 2017-03-27T13:38:37.000Z | LeetCodeSolutions/python/203_Remove_Linked_List_Elements.py | ChuanleiGuo/AlgorithmsPlayground | 90b6287b742c8bfd3797540c408d679be2821a40 | [
"MIT"
] | null | null | null | LeetCodeSolutions/python/203_Remove_Linked_List_Elements.py | ChuanleiGuo/AlgorithmsPlayground | 90b6287b742c8bfd3797540c408d679be2821a40 | [
"MIT"
] | null | null | null | class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def removeElements(self, head, val):
"""
:type head: ListNode
:type val: int
:rtype: ListNode
"""
if not head:
return None
dummy = ListNode(0)
dummy.next = head
pre = dummy
cur = pre.next
while cur:
if cur.val == val:
pre.next = cur.next
cur = pre.next
continue
pre = pre.next
cur = pre.next
return dummy.next
l = ListNode(1)
cur = l
l.next = ListNode(2)
cur = l.next
cur.next = ListNode(6)
cur = cur.next
cur.next = ListNode(3)
cur = cur.next
cur.next = ListNode(6)
| 18.833333 | 40 | 0.499368 |
ea6df9981f4473def24c4fa6680ecdc4dd01b9d9 | 2,137 | py | Python | calendar_w_token.py | LeGmask/sbrcalendar | 664fcf50998a2a49e242f0a93522e903ba285686 | [
"MIT"
] | null | null | null | calendar_w_token.py | LeGmask/sbrcalendar | 664fcf50998a2a49e242f0a93522e903ba285686 | [
"MIT"
] | null | null | null | calendar_w_token.py | LeGmask/sbrcalendar | 664fcf50998a2a49e242f0a93522e903ba285686 | [
"MIT"
] | null | null | null | from __future__ import print_function
import datetime
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/calendar.events.owned']
def main():
"""Shows basic usage of the Google Calendar API.
Prints the start and name of the next 10 events on the user's calendar.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
service = build('calendar', 'v3', credentials=creds)
# Call the Calendar API
now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
print('Getting the upcoming 10 events')
events_result = service.events().list(calendarId='jbp9sq5m1tj9f4rvoofjlgvcm8@group.calendar.google.com', timeMin=now,
maxResults=10, singleEvents=True,
orderBy='startTime').execute()
events = events_result.get('items', [])
if not events:
print('No upcoming events found.')
for event in events:
start = event['start'].get('dateTime', event['start'].get('date'))
print(start, event['summary'])
if __name__ == '__main__':
main()
| 38.160714 | 121 | 0.662143 |
ef97e272cac3d4717f18a76e8d66c0cf87e4bc76 | 1,756 | py | Python | bin/calculations/scratch/SSIM_example.py | michaelneuder/image_quality_analysis | 4b4c934454ced945bdec165e1da99b17688e1608 | [
"MIT"
] | 11 | 2017-06-14T22:14:12.000Z | 2019-03-02T12:36:30.000Z | bin/calculations/scratch/SSIM_example.py | michaelneuder/image_quality_analysis | 4b4c934454ced945bdec165e1da99b17688e1608 | [
"MIT"
] | null | null | null | bin/calculations/scratch/SSIM_example.py | michaelneuder/image_quality_analysis | 4b4c934454ced945bdec165e1da99b17688e1608 | [
"MIT"
] | 1 | 2018-11-22T12:31:18.000Z | 2018-11-22T12:31:18.000Z | #!/usr/bin/env python3
import numpy as np
import scipy as sp
def main():
orig = np.asarray(
[[61,67,70,75,74,70,70,70,70,70,70],
[73,69,68,65,71,72,72,72,72,72,72],
[67,68,69,69,73,71,71,71,71,71,71],
[68,68,70,76,81,82,82,82,82,82,82],
[74,77,79,78,80,85,85,85,85,85,85],
[71,77,78,78,76,80,80,80,80,80,80],
[71,77,78,78,76,80,80,80,80,80,80],
[71,77,78,78,76,80,80,80,80,80,80],
[71,77,78,78,76,80,80,80,80,80,80],
[71,77,78,78,76,80,80,80,80,80,80],
[71,77,78,78,76,80,80,80,80,80,80]])
recon = np.asarray(
[[69,68,71,80,82,77,77,77,77,77,77],
[77,71,71,79,88,79,79,79,79,79,79],
[73,71,74,81,87,79,79,79,79,79,79],
[73,69,76,81,89,82,82,82,82,82,82],
[73,68,74,81,83,80,80,80,80,80,80],
[70,72,72,79,80,79,79,79,79,79,79],
[70,72,72,79,80,79,79,79,79,79,79],
[70,72,72,79,80,79,79,79,79,79,79],
[70,72,72,79,80,79,79,79,79,79,79],
[70,72,72,79,80,79,79,79,79,79,79],
[70,72,72,79,80,79,79,79,79,79,79]])
orig_data = orig.flatten()
recon_data = recon.flatten()
k_1 = 0.01
k_2 = 0.03
L = 255
mean_x = np.mean(orig_data)
mean_y = np.mean(recon_data)
print('means: ', mean_x, ', ', mean_y)
var_x = np.var(orig_data)
var_y = np.var(recon_data)
print('vars : ', var_x, ', ', var_y)
covar = np.cov(orig_data, recon_data)[0][1]
print('covar: ', covar)
c_1 = (L*k_1)**2
c_2 = (L*k_2)**2
print('c1, c2 : ', c_1, ', ', c_2)
num = (2*mean_x*mean_y+c_1)*(2*covar+c_2)
den = (mean_x**2+mean_y**2+c_1)*(var_x+var_y+c_2)
print('SSIM : ', num/den)
if __name__ == '__main__':
main()
| 33.132075 | 53 | 0.523918 |
c753f885bc4fb076c3ad0dda9c7044e55772c6df | 203 | py | Python | 000989letpy/letpy_081_loop_else_20200504.py | SafonovMikhail/python_000577 | 739f764e80f1ca354386f00b8e9db1df8c96531d | [
"Apache-2.0"
] | null | null | null | 000989letpy/letpy_081_loop_else_20200504.py | SafonovMikhail/python_000577 | 739f764e80f1ca354386f00b8e9db1df8c96531d | [
"Apache-2.0"
] | null | null | null | 000989letpy/letpy_081_loop_else_20200504.py | SafonovMikhail/python_000577 | 739f764e80f1ca354386f00b8e9db1df8c96531d | [
"Apache-2.0"
] | null | null | null | # string = input()
string = 'Восклицательного'
i = 0
while i < len(string):
if string[i] == '!':
break
print(string[i])
i = i + 1
else:
print('Восклицательного знака не найдено') | 18.454545 | 46 | 0.581281 |
60d76e9863fcbb0dce4070f0db1c7cc945b3ef32 | 12,306 | py | Python | try2/lib/python3.9/site-packages/kgx/cli/__init__.py | diatomsRcool/eco-kg | 4251f42ca2ab353838a39b640cb97593db76d4f4 | [
"BSD-3-Clause"
] | 1 | 2021-10-04T18:22:12.000Z | 2021-10-04T18:22:12.000Z | try2/lib/python3.9/site-packages/kgx/cli/__init__.py | diatomsRcool/eco-kg | 4251f42ca2ab353838a39b640cb97593db76d4f4 | [
"BSD-3-Clause"
] | 10 | 2021-06-16T20:48:32.000Z | 2021-10-04T18:22:02.000Z | try2/lib/python3.9/site-packages/kgx/cli/__init__.py | diatomsRcool/eco-kg | 4251f42ca2ab353838a39b640cb97593db76d4f4 | [
"BSD-3-Clause"
] | null | null | null | import kgx
import click
from typing import List, Tuple, Optional, Set
from kgx.config import get_logger, get_config
from kgx.cli.cli_utils import (
get_input_file_types,
parse_source,
apply_operations,
graph_summary,
validate,
neo4j_download,
neo4j_upload,
transform,
merge,
summary_report_types,
get_report_format_types,
)
log = get_logger()
config = get_config()
def error(msg):
log.error(msg)
quit()
@click.group()
@click.version_option(version=kgx.__version__, prog_name=kgx.__name__)
def cli():
"""
Knowledge Graph Exchange CLI entrypoint.
\f
"""
pass
@cli.command(name='graph-summary')
@click.argument('inputs', required=True, type=click.Path(exists=True), nargs=-1)
@click.option(
'--input-format',
'-i',
required=True,
help=f'The input format. Can be one of {get_input_file_types()}',
)
@click.option('--input-compression', '-c', required=False, help='The input compression type')
@click.option('--output', '-o', required=True, type=click.Path(exists=False))
@click.option(
'--report-type',
'-r',
required=False,
type=str,
help=f'The summary report type. Must be one of {tuple(summary_report_types.keys())}',
default='kgx-map',
)
@click.option(
'--report-format',
'-f',
help=f'The input format. Can be one of {get_report_format_types()}',
)
@click.option('--stream', '-s', is_flag=True, help='Parse input as a stream')
@click.option(
'--node-facet-properties',
required=False,
multiple=True,
help='A list of node properties from which to generate counts per value for those properties',
)
@click.option(
'--edge-facet-properties',
required=False,
multiple=True,
help='A list of edge properties from which to generate counts per value for those properties',
)
@click.option(
'--error-log',
'-l',
required=False,
type=click.Path(exists=False),
help='File within which to report graph data parsing errors (default: "stderr")'
)
def graph_summary_wrapper(
inputs: List[str],
input_format: str,
input_compression: str,
output: str,
report_type: str,
report_format: str,
stream: bool,
node_facet_properties: Optional[Set],
edge_facet_properties: Optional[Set],
error_log: str = ''
):
graph_summary(
inputs,
input_format,
input_compression,
output,
report_type,
report_format,
stream,
node_facet_properties=list(node_facet_properties),
edge_facet_properties=list(edge_facet_properties),
error_log=error_log
)
@cli.command(name='validate')
@click.argument('inputs', required=True, type=click.Path(exists=True), nargs=-1)
@click.option(
'--input-format',
'-i',
required=True,
help=f'The input format. Can be one of {get_input_file_types()}',
)
@click.option('--input-compression', '-c', required=False, help='The input compression type')
@click.option(
'--output',
'-o',
required=False,
type=click.Path(exists=False),
help='File to write validation reports to',
)
@click.option('--stream', '-s', is_flag=True, help='Parse input as a stream')
def validate_wrapper(
inputs: List[str], input_format: str, input_compression: str, output: str, stream: bool
):
"""
Run KGX validator on an input file to check for Biolink Model compliance.
Parameters
----------
inputs: List[str]
Input files
input_format: str
The input format
input_compression: str
The input compression type
output: str
Path to output file
stream: bool
Whether to parse input as a stream
"""
validate(inputs, input_format, input_compression, output, stream)
@cli.command(name='neo4j-download')
@click.option(
'--uri',
'-l',
required=True,
type=str,
help='Neo4j URI to download from. For example, https://localhost:7474',
)
@click.option('--username', '-u', required=True, type=str, help='Neo4j username')
@click.option('--password', '-p', required=True, type=str, help='Neo4j password')
@click.option('--output', '-o', required=True, type=click.Path(exists=False), help='Output')
@click.option(
'--output-format',
'-f',
required=True,
help=f'The output format. Can be one of {get_input_file_types()}',
)
@click.option('--output-compression', '-d', required=False, help='The output compression type')
@click.option('--stream', '-s', is_flag=True, help='Parse input as a stream')
@click.option(
'--node-filters',
'-n',
required=False,
type=click.Tuple([str, str]),
multiple=True,
help=f'Filters for filtering nodes from the input graph',
)
@click.option(
'--edge-filters',
'-e',
required=False,
type=click.Tuple([str, str]),
multiple=True,
help=f'Filters for filtering edges from the input graph',
)
def neo4j_download_wrapper(
uri: str,
username: str,
password: str,
output: str,
output_format: str,
output_compression: str,
stream: bool,
node_filters: Tuple,
edge_filters: Tuple,
):
"""
Download nodes and edges from Neo4j database.
\f
Parameters
----------
uri: str
Neo4j URI. For example, https://localhost:7474
username: str
Username for authentication
password: str
Password for authentication
output: str
Where to write the output (stdout, by default)
output_format: str
The output type (``tsv``, by default)
output_compression: str
The output compression type
stream: bool
Whether to parse input as a stream
node_filters: Tuple[str, str]
Node filters
edge_filters: Tuple[str, str]
Edge filters
"""
neo4j_download(
uri,
username,
password,
output,
output_format,
output_compression,
stream,
node_filters,
edge_filters,
)
@cli.command(name='neo4j-upload')
@click.argument('inputs', required=True, type=click.Path(exists=True), nargs=-1)
@click.option(
'--input-format',
'-i',
required=True,
help=f'The input format. Can be one of {get_input_file_types()}',
)
@click.option('--input-compression', '-c', required=False, help='The input compression type')
@click.option(
'--uri',
'-l',
required=True,
type=str,
help='Neo4j URI to upload to. For example, https://localhost:7474',
)
@click.option('--username', '-u', required=True, type=str, help='Neo4j username')
@click.option('--password', '-p', required=True, type=str, help='Neo4j password')
@click.option('--stream', '-s', is_flag=True, help='Parse input as a stream')
@click.option(
'--node-filters',
'-n',
required=False,
type=click.Tuple([str, str]),
multiple=True,
help=f'Filters for filtering nodes from the input graph',
)
@click.option(
'--edge-filters',
'-e',
required=False,
type=click.Tuple([str, str]),
multiple=True,
help=f'Filters for filtering edges from the input graph',
)
def neo4j_upload_wrapper(
inputs: List[str],
input_format: str,
input_compression: str,
uri: str,
username: str,
password: str,
stream: bool,
node_filters: Tuple[str, str],
edge_filters: Tuple[str, str],
):
"""
Upload a set of nodes/edges to a Neo4j database.
\f
Parameters
----------
inputs: List[str]
A list of files that contains nodes/edges
input_format: str
The input format
input_compression: str
The input compression type
uri: str
The full HTTP address for Neo4j database
username: str
Username for authentication
password: str
Password for authentication
stream: bool
Whether to parse input as a stream
node_filters: Tuple[str, str]
Node filters
edge_filters: Tuple[str, str]
Edge filters
"""
neo4j_upload(
inputs,
input_format,
input_compression,
uri,
username,
password,
stream,
node_filters,
edge_filters,
)
@cli.command(name='transform')
@click.argument('inputs', required=False, type=click.Path(exists=True), nargs=-1)
@click.option(
'--input-format',
'-i',
required=False,
help=f'The input format. Can be one of {get_input_file_types()}',
)
@click.option('--input-compression', '-c', required=False, help='The input compression type')
@click.option('--output', '-o', required=False, type=click.Path(exists=False), help='Output')
@click.option(
'--output-format',
'-f',
required=False,
help=f'The output format. Can be one of {get_input_file_types()}',
)
@click.option('--output-compression', '-d', required=False, help='The output compression type')
@click.option('--stream', is_flag=True, help='Parse input as a stream')
@click.option(
'--node-filters',
'-n',
required=False,
type=click.Tuple([str, str]),
multiple=True,
help=f'Filters for filtering nodes from the input graph',
)
@click.option(
'--edge-filters',
'-e',
required=False,
type=click.Tuple([str, str]),
multiple=True,
help=f'Filters for filtering edges from the input graph',
)
@click.option('--transform-config', required=False, type=str, help=f'Transform config YAML')
@click.option(
'--source', required=False, type=str, multiple=True, help='Source(s) from the YAML to process'
)
@click.option('--processes', '-p', required=False, type=int, default=1, help='Number of processes to use')
def transform_wrapper(
inputs: List[str],
input_format: str,
input_compression: str,
output: str,
output_format: str,
output_compression: str,
stream: bool,
node_filters: Tuple[str, str],
edge_filters: Tuple[str, str],
transform_config: str,
source: List,
processes: int,
):
"""
Transform a Knowledge Graph from one serialization form to another.
Parameters
----------
inputs: List[str]
A list of files that contains nodes/edges
input_format: str
The input format
input_compression: str
The input compression type
output: str
The output file
output_format: str
The output format
output_compression: str
The output compression typ
stream: bool
Wheter or not to stream
node_filters: Tuple[str, str]
Node input filters
edge_filters: Tuple[str, str]
Edge input filters
transform_config: str
Transform config YAML
source: List
A list of source(s) to load from the YAML
processes: int
Number of processes to use
"""
transform(
inputs,
input_format=input_format,
input_compression=input_compression,
output=output,
output_format=output_format,
output_compression=output_compression,
stream=stream,
node_filters=node_filters,
edge_filters=edge_filters,
transform_config=transform_config,
source=source,
processes=processes,
)
@cli.command(name='merge')
@click.option('--merge-config', required=True, type=str)
@click.option(
'--source', required=False, type=str, multiple=True, help='Source(s) from the YAML to process'
)
@click.option(
'--destination',
required=False,
type=str,
multiple=True,
help='Destination(s) from the YAML to process',
)
@click.option('--processes', '-p', required=False, type=int, default=1, help='Number of processes to use')
def merge_wrapper(merge_config: str, source: List, destination: List, processes: int):
"""
Load nodes and edges from files and KGs, as defined in a config YAML, and merge them into a single graph.
The merged graph can then be written to a local/remote Neo4j instance OR be serialized into a file.
\f
.. note::
Everything here is driven by the ``merge-config`` YAML.
Parameters
----------
merge_config: str
Merge config YAML
source: List
A list of source to load from the YAML
destination: List
A list of destination to write to, as defined in the YAML
processes: int
Number of processes to use
"""
merge(merge_config, source, destination, processes)
| 27.105727 | 109 | 0.647164 |
dc3ce564a6b799a13e0554f3b62b6a7dcfd66520 | 380 | py | Python | settings/mail.py | skylifewww/handball | 853190e44037086b7749cb8f62d9df6577b379fd | [
"MIT"
] | null | null | null | settings/mail.py | skylifewww/handball | 853190e44037086b7749cb8f62d9df6577b379fd | [
"MIT"
] | null | null | null | settings/mail.py | skylifewww/handball | 853190e44037086b7749cb8f62d9df6577b379fd | [
"MIT"
] | null | null | null | ADMINS = (
('Admin', 'admin@handball.com'),
)
# DEFAULT_FROM_EMAIL = 'robot@tvoy_style.com'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
DEFAULT_FROM_EMAIL = 'skylifewww@gmail.com'
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = 'skylifewww@gmail.com'
EMAIL_HOST_PASSWORD = 'skywww123'
SERVER_EMAIL = DEFAULT_FROM_EMAIL
EMAIL_PORT = 587
| 27.142857 | 61 | 0.771053 |
2a14e4dd4783517773bc5b9a18402311b674bf0c | 3,020 | py | Python | sopel/__init__.py | Thermi/strongswan-sopel | 4f858b51d296a54a285a0ba044f1810871e3bb4c | [
"EFL-2.0"
] | null | null | null | sopel/__init__.py | Thermi/strongswan-sopel | 4f858b51d296a54a285a0ba044f1810871e3bb4c | [
"EFL-2.0"
] | null | null | null | sopel/__init__.py | Thermi/strongswan-sopel | 4f858b51d296a54a285a0ba044f1810871e3bb4c | [
"EFL-2.0"
] | null | null | null | # coding=utf-8
"""
__init__.py - Sopel Init Module
Copyright 2008, Sean B. Palmer, inamidst.com
Copyright 2012, Edward Powell, http://embolalia.net
Copyright © 2012, Elad Alfassa <elad@fedoraproject.org>
Licensed under the Eiffel Forum License 2.
http://sopel.chat/
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from collections import namedtuple
import os
import re
import time
import traceback
import signal
__version__ = '6.0.0b0'
def _version_info(version=__version__):
regex = re.compile(r'(\d+)\.(\d+)\.(\d+)(?:(a|b|rc)(\d+))?.*')
version_groups = regex.match(__version__).groups()
major, minor, micro = (int(piece) for piece in version_groups[0:3])
level = version_groups[3]
serial = int(version_groups[4] or 0)
if level == 'a':
level = 'alpha'
elif level == 'b':
level = 'beta'
elif level == 'rc':
level = 'candidate'
elif not level and version_groups[5] is None:
level = 'final'
else:
level = 'alpha'
version_type = namedtuple('version_info',
'major, minor, micro, releaselevel, serial')
return version_type(major, minor, micro, level, serial)
version_info = _version_info()
def run(config, pid_file, daemon=False):
import sopel.bot as bot
import sopel.web as web
import sopel.logger
from sopel.tools import stderr
delay = 20
# Inject ca_certs from config to web for SSL validation of web requests
if not config.core.ca_certs:
stderr('Could not open CA certificates file. SSL will not '
'work properly.')
web.ca_certs = config.core.ca_certs
def signal_handler(sig, frame):
if sig == signal.SIGUSR1 or sig == signal.SIGTERM:
stderr('Got quit signal, shutting down.')
p.quit('Closing')
while True:
try:
p = bot.Sopel(config, daemon=daemon)
if hasattr(signal, 'SIGUSR1'):
signal.signal(signal.SIGUSR1, signal_handler)
if hasattr(signal, 'SIGTERM'):
signal.signal(signal.SIGTERM, signal_handler)
sopel.logger.setup_logging(p)
p.run(config.core.host, int(config.core.port))
except KeyboardInterrupt:
break
except Exception:
trace = traceback.format_exc()
try:
stderr(trace)
except:
pass
logfile = open(os.path.join(config.core.logdir, 'exceptions.log'), 'a')
logfile.write('Critical exception in core')
logfile.write(trace)
logfile.write('----------------------------------------\n\n')
logfile.close()
os.unlink(pid_file)
os._exit(1)
if not isinstance(delay, int):
break
if p.hasquit:
break
stderr('Warning: Disconnected. Reconnecting in %s seconds...' % delay)
time.sleep(delay)
os.unlink(pid_file)
os._exit(0)
| 31.458333 | 83 | 0.600993 |
a8de29ac8acf26f0c75f7ee30ae00d19c56f3cc2 | 25,353 | py | Python | src/mkdocstrings/handlers/base.py | in03/mkdocstrings | 5c0e4d1290e4b54d3098281405b680291fc3639e | [
"ISC"
] | null | null | null | src/mkdocstrings/handlers/base.py | in03/mkdocstrings | 5c0e4d1290e4b54d3098281405b680291fc3639e | [
"ISC"
] | null | null | null | src/mkdocstrings/handlers/base.py | in03/mkdocstrings | 5c0e4d1290e4b54d3098281405b680291fc3639e | [
"ISC"
] | null | null | null | """Base module for handlers.
This module contains the base classes for implementing collectors, renderers, and the combination of the two: handlers.
It also provides two methods:
- `get_handler`, that will cache handlers into the `HANDLERS_CACHE` dictionary.
- `teardown`, that will teardown all the cached handlers, and then clear the cache.
"""
from __future__ import annotations
import importlib
import warnings
from contextlib import suppress
from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional, Sequence
from xml.etree.ElementTree import Element, tostring
from jinja2 import Environment, FileSystemLoader
from markdown import Markdown
from markupsafe import Markup
from mkdocstrings.handlers.rendering import (
HeadingShiftingTreeprocessor,
Highlighter,
IdPrependingTreeprocessor,
MkdocstringsInnerExtension,
ParagraphStrippingTreeprocessor,
)
from mkdocstrings.inventory import Inventory
from mkdocstrings.loggers import get_template_logger
CollectorItem = Any
class CollectionError(Exception):
"""An exception raised when some collection of data failed."""
class ThemeNotSupported(Exception):
"""An exception raised to tell a theme is not supported."""
def do_any(seq: Sequence, attribute: str = None) -> bool:
"""Check if at least one of the item in the sequence evaluates to true.
The `any` builtin as a filter for Jinja templates.
Arguments:
seq: An iterable object.
attribute: The attribute name to use on each object of the iterable.
Returns:
A boolean telling if any object of the iterable evaluated to True.
"""
if attribute is None:
return any(seq)
return any(_[attribute] for _ in seq)
class BaseRenderer:
"""The base renderer class.
Inherit from this class to implement a renderer.
You will have to implement the `render` method.
You can also override the `update_env` method, to add more filters to the Jinja environment,
making them available in your Jinja templates.
To define a fallback theme, add a `fallback_theme` class-variable.
To add custom CSS, add an `extra_css` variable or create an 'style.css' file beside the templates.
"""
fallback_theme: str = ""
extra_css = ""
def __init__(self, handler: str, theme: str, custom_templates: Optional[str] = None) -> None:
"""Initialize the object.
If the given theme is not supported (it does not exist), it will look for a `fallback_theme` attribute
in `self` to use as a fallback theme.
Arguments:
handler: The name of the handler.
theme: The name of theme to use.
custom_templates: Directory containing custom templates.
"""
paths = []
# TODO: remove once BaseRenderer is merged into BaseHandler
self._handler = handler
self._theme = theme
self._custom_templates = custom_templates
themes_dir = self.get_templates_dir(handler)
paths.append(themes_dir / theme)
if self.fallback_theme and self.fallback_theme != theme:
paths.append(themes_dir / self.fallback_theme)
for path in paths:
css_path = path / "style.css"
if css_path.is_file():
self.extra_css += "\n" + css_path.read_text(encoding="utf-8") # noqa: WPS601
break
if custom_templates is not None:
paths.insert(0, Path(custom_templates) / handler / theme)
self.env = Environment(
autoescape=True,
loader=FileSystemLoader(paths),
auto_reload=False, # Editing a template in the middle of a build is not useful.
)
self.env.filters["any"] = do_any
self.env.globals["log"] = get_template_logger()
self._headings: List[Element] = []
self._md: Markdown = None # type: ignore # To be populated in `update_env`.
def render(self, data: CollectorItem, config: dict) -> str:
"""Render a template using provided data and configuration options.
Arguments:
data: The collected data to render.
config: The rendering options.
Returns:
The rendered template as HTML.
""" # noqa: DAR202 (excess return section)
def get_templates_dir(self, handler: str) -> Path:
"""Return the path to the handler's templates directory.
Override to customize how the templates directory is found.
Arguments:
handler: The name of the handler to get the templates directory of.
Raises:
FileNotFoundError: When the templates directory cannot be found.
Returns:
The templates directory path.
"""
# Templates can be found in 2 different logical locations:
# - in mkdocstrings_handlers/HANDLER/templates: our new migration target
# - in mkdocstrings/templates/HANDLER: current situation, this should be avoided
# These two other locations are forbidden:
# - in mkdocstrings_handlers/templates/HANDLER: sub-namespace packages are too annoying to deal with
# - in mkdocstrings/handlers/HANDLER/templates: not currently supported,
# and mkdocstrings will stop being a namespace
with suppress(ModuleNotFoundError): # TODO: catch at some point to warn about missing handlers
import mkdocstrings_handlers
for path in mkdocstrings_handlers.__path__: # noqa: WPS609
theme_path = Path(path, handler, "templates")
if theme_path.exists():
return theme_path
# TODO: remove import and loop at some point,
# as mkdocstrings will stop being a namespace package
import mkdocstrings
for path in mkdocstrings.__path__: # noqa: WPS609,WPS440
theme_path = Path(path, "templates", handler)
if theme_path.exists():
if handler != "python":
warnings.warn(
"Exposing templates in the mkdocstrings.templates namespace is deprecated. "
"Put them in a templates folder inside your handler package instead.",
DeprecationWarning,
)
return theme_path
raise FileNotFoundError(f"Can't find 'templates' folder for handler '{handler}'")
def get_anchors(self, data: CollectorItem) -> Sequence[str]:
"""Return the possible identifiers (HTML anchors) for a collected item.
Arguments:
data: The collected data.
Returns:
The HTML anchors (without '#'), or an empty tuple if this item doesn't have an anchor.
"""
# TODO: remove this at some point
try:
return (self.get_anchor(data),) # type: ignore
except AttributeError:
return ()
def do_convert_markdown(
self, text: str, heading_level: int, html_id: str = "", *, strip_paragraph: bool = False
) -> Markup:
"""Render Markdown text; for use inside templates.
Arguments:
text: The text to convert.
heading_level: The base heading level to start all Markdown headings from.
html_id: The HTML id of the element that's considered the parent of this element.
strip_paragraph: Whether to exclude the <p> tag from around the whole output.
Returns:
An HTML string.
"""
treeprocessors = self._md.treeprocessors
treeprocessors[HeadingShiftingTreeprocessor.name].shift_by = heading_level
treeprocessors[IdPrependingTreeprocessor.name].id_prefix = html_id and html_id + "--"
treeprocessors[ParagraphStrippingTreeprocessor.name].strip = strip_paragraph
try:
return Markup(self._md.convert(text))
finally:
treeprocessors[HeadingShiftingTreeprocessor.name].shift_by = 0
treeprocessors[IdPrependingTreeprocessor.name].id_prefix = ""
treeprocessors[ParagraphStrippingTreeprocessor.name].strip = False
self._md.reset()
def do_heading(
self,
content: str,
heading_level: int,
*,
role: Optional[str] = None,
hidden: bool = False,
toc_label: Optional[str] = None,
**attributes: str,
) -> Markup:
"""Render an HTML heading and register it for the table of contents. For use inside templates.
Arguments:
content: The HTML within the heading.
heading_level: The level of heading (e.g. 3 -> `h3`).
role: An optional role for the object bound to this heading.
hidden: If True, only register it for the table of contents, don't render anything.
toc_label: The title to use in the table of contents ('data-toc-label' attribute).
**attributes: Any extra HTML attributes of the heading.
Returns:
An HTML string.
"""
# First, produce the "fake" heading, for ToC only.
el = Element(f"h{heading_level}", attributes)
if toc_label is None:
toc_label = content.unescape() if isinstance(el, Markup) else content # type: ignore
el.set("data-toc-label", toc_label)
if role:
el.set("data-role", role)
self._headings.append(el)
if hidden:
return Markup('<a id="{0}"></a>').format(attributes["id"])
# Now produce the actual HTML to be rendered. The goal is to wrap the HTML content into a heading.
# Start with a heading that has just attributes (no text), and add a placeholder into it.
el = Element(f"h{heading_level}", attributes)
el.append(Element("mkdocstrings-placeholder"))
# Tell the 'toc' extension to make its additions if configured so.
toc = self._md.treeprocessors["toc"]
if toc.use_anchors:
toc.add_anchor(el, attributes["id"])
if toc.use_permalinks:
toc.add_permalink(el, attributes["id"])
# The content we received is HTML, so it can't just be inserted into the tree. We had marked the middle
# of the heading with a placeholder that can never occur (text can't directly contain angle brackets).
# Now this HTML wrapper can be "filled" by replacing the placeholder.
html_with_placeholder = tostring(el, encoding="unicode")
assert (
html_with_placeholder.count("<mkdocstrings-placeholder />") == 1
), f"Bug in mkdocstrings: failed to replace in {html_with_placeholder!r}"
html = html_with_placeholder.replace("<mkdocstrings-placeholder />", content)
return Markup(html)
def get_headings(self) -> Sequence[Element]:
"""Return and clear the headings gathered so far.
Returns:
A list of HTML elements.
"""
result = list(self._headings)
self._headings.clear()
return result
def update_env(self, md: Markdown, config: dict) -> None: # noqa: W0613 (unused argument 'config')
"""Update the Jinja environment.
Arguments:
md: The Markdown instance. Useful to add functions able to convert Markdown into the environment filters.
config: Configuration options for `mkdocs` and `mkdocstrings`, read from `mkdocs.yml`. See the source code
of [mkdocstrings.plugin.MkdocstringsPlugin.on_config][] to see what's in this dictionary.
"""
self._md = md
self.env.filters["highlight"] = Highlighter(md).highlight
self.env.filters["convert_markdown"] = self.do_convert_markdown
self.env.filters["heading"] = self.do_heading
def _update_env(self, md: Markdown, config: dict):
extensions = config["mdx"] + [MkdocstringsInnerExtension(self._headings)]
new_md = Markdown(extensions=extensions, extension_configs=config["mdx_configs"])
# MkDocs adds its own (required) extension that's not part of the config. Propagate it.
if "relpath" in md.treeprocessors:
new_md.treeprocessors.register(md.treeprocessors["relpath"], "relpath", priority=0)
self.update_env(new_md, config)
class BaseCollector:
"""The base collector class.
Inherit from this class to implement a collector.
You will have to implement the `collect` method.
You can also implement the `teardown` method.
"""
def collect(self, identifier: str, config: dict) -> CollectorItem:
"""Collect data given an identifier and selection configuration.
In the implementation, you typically call a subprocess that returns JSON, and load that JSON again into
a Python dictionary for example, though the implementation is completely free.
Arguments:
identifier: An identifier for which to collect data. For example, in Python,
it would be 'mkdocstrings.handlers' to collect documentation about the handlers module.
It can be anything that you can feed to the tool of your choice.
config: Configuration options for the tool you use to collect data. Typically called "selection" because
these options modify how the objects or documentation are "selected" in the source code.
Returns:
Anything you want, as long as you can feed it to the renderer's `render` method.
""" # noqa: DAR202 (excess return section)
def teardown(self) -> None:
"""Teardown the collector.
This method should be implemented to, for example, terminate a subprocess
that was started when creating the collector instance.
"""
class BaseHandler(BaseCollector, BaseRenderer):
"""The base handler class.
Inherit from this class to implement a handler.
It's usually just a combination of a collector and a renderer, but you can make it as complex as you need.
Attributes:
domain: The cross-documentation domain/language for this handler.
enable_inventory: Whether this handler is interested in enabling the creation
of the `objects.inv` Sphinx inventory file.
fallback_config: The configuration used to collect item during autorefs fallback.
"""
domain: str = "default"
enable_inventory: bool = False
fallback_config: dict = {}
# TODO: once the BaseCollector and BaseRenderer classes are removed,
# stop accepting the 'handler' parameter, and instead set a 'name' attribute on the Handler class.
# Then make the 'handler' parameter in 'get_templates_dir' optional, and use the class 'name' by default.
def __init__(self, *args: str | BaseCollector | BaseRenderer, **kwargs: str | BaseCollector | BaseRenderer) -> None:
"""Initialize the object.
Arguments:
*args: Collector and renderer, or handler name, theme and custom_templates.
**kwargs: Same thing, but with keyword arguments.
Raises:
ValueError: When the givin parameters are invalid.
"""
# The method accepts *args and **kwargs temporarily,
# to support the transition period where the BaseCollector
# and BaseRenderer are deprecated, and the BaseHandler
# can be instantiated with both instances of collector/renderer,
# or renderer parameters, as positional parameters.
# Supported:
# handler = Handler(collector, renderer)
# handler = Handler(collector=collector, renderer=renderer)
# handler = Handler("python", "material")
# handler = Handler("python", "material", "templates")
# handler = Handler(handler="python", theme="material")
# handler = Handler(handler="python", theme="material", custom_templates="templates")
# Invalid:
# handler = Handler("python", "material", collector, renderer)
# handler = Handler("python", theme="material", collector=collector)
# handler = Handler(collector, renderer, "material")
# handler = Handler(collector, renderer, theme="material")
# handler = Handler(collector)
# handler = Handler(renderer)
# etc.
collector = None
renderer = None
# parsing positional arguments
str_args = []
for arg in args:
if isinstance(arg, BaseCollector):
collector = arg
elif isinstance(arg, BaseRenderer):
renderer = arg
elif isinstance(arg, str):
str_args.append(arg)
while len(str_args) != 3:
str_args.append(None) # type: ignore[arg-type]
handler, theme, custom_templates = str_args
# fetching values from keyword arguments
if "collector" in kwargs:
collector = kwargs.pop("collector") # type: ignore[assignment]
if "renderer" in kwargs:
renderer = kwargs.pop("renderer") # type: ignore[assignment]
if "handler" in kwargs:
handler = kwargs.pop("handler") # type: ignore[assignment]
if "theme" in kwargs:
theme = kwargs.pop("theme") # type: ignore[assignment]
if "custom_templates" in kwargs:
custom_templates = kwargs.pop("custom_templates") # type: ignore[assignment]
if collector is None and renderer is not None or collector is not None and renderer is None:
raise ValueError("both 'collector' and 'renderer' must be provided")
if collector is not None:
warnings.warn(
DeprecationWarning(
"The BaseCollector class is deprecated, and passing an instance of it "
"to your handler is deprecated as well. Instead, define the `collect` and `teardown` "
"methods directly on your handler class."
)
)
self.collector = collector
self.collect = collector.collect # type: ignore[assignment]
self.teardown = collector.teardown # type: ignore[assignment]
if renderer is not None:
if {handler, theme, custom_templates} != {None}:
raise ValueError(
"'handler', 'theme' and 'custom_templates' must all be None when providing a renderer instance"
)
warnings.warn(
DeprecationWarning(
"The BaseRenderer class is deprecated, and passing an instance of it "
"to your handler is deprecated as well. Instead, define the `render` method "
"directly on your handler class (as well as other methods and attributes like "
"`get_templates_dir`, `get_anchors`, `update_env` and `fallback_theme`, `extra_css`)."
)
)
self.renderer = renderer
self.render = renderer.render # type: ignore[assignment]
self.get_templates_dir = renderer.get_templates_dir # type: ignore[assignment]
self.get_anchors = renderer.get_anchors # type: ignore[assignment]
self.do_convert_markdown = renderer.do_convert_markdown # type: ignore[assignment]
self.do_heading = renderer.do_heading # type: ignore[assignment]
self.get_headings = renderer.get_headings # type: ignore[assignment]
self.update_env = renderer.update_env # type: ignore[assignment]
self._update_env = renderer._update_env # type: ignore[assignment] # noqa: WPS437
self.fallback_theme = renderer.fallback_theme
self.extra_css = renderer.extra_css
renderer.__class__.__init__( # noqa: WPS609
self,
renderer._handler, # noqa: WPS437
renderer._theme, # noqa: WPS437
renderer._custom_templates, # noqa: WPS437
)
else:
if handler is None or theme is None:
raise ValueError("'handler' and 'theme' cannot be None")
BaseRenderer.__init__(self, handler, theme, custom_templates) # noqa: WPS609
class Handlers:
"""A collection of handlers.
Do not instantiate this directly. [The plugin][mkdocstrings.plugin.MkdocstringsPlugin] will keep one instance of
this for the purpose of caching. Use [mkdocstrings.plugin.MkdocstringsPlugin.get_handler][] for convenient access.
"""
def __init__(self, config: dict) -> None:
"""Initialize the object.
Arguments:
config: Configuration options for `mkdocs` and `mkdocstrings`, read from `mkdocs.yml`. See the source code
of [mkdocstrings.plugin.MkdocstringsPlugin.on_config][] to see what's in this dictionary.
"""
self._config = config
self._handlers: Dict[str, BaseHandler] = {}
self.inventory: Inventory = Inventory(project=self._config["site_name"])
def get_anchors(self, identifier: str) -> Sequence[str]:
"""Return the canonical HTML anchor for the identifier, if any of the seen handlers can collect it.
Arguments:
identifier: The identifier (one that [collect][mkdocstrings.handlers.base.BaseCollector.collect] can accept).
Returns:
A tuple of strings - anchors without '#', or an empty tuple if there isn't any identifier familiar with it.
"""
for handler in self._handlers.values():
fallback_config = getattr(handler, "fallback_config", {})
try:
anchors = handler.get_anchors(handler.collect(identifier, fallback_config))
except CollectionError:
continue
if anchors:
return anchors
return ()
def get_handler_name(self, config: dict) -> str:
"""Return the handler name defined in an "autodoc" instruction YAML configuration, or the global default handler.
Arguments:
config: A configuration dictionary, obtained from YAML below the "autodoc" instruction.
Returns:
The name of the handler to use.
"""
global_config = self._config["mkdocstrings"]
if "handler" in config:
return config["handler"]
return global_config["default_handler"]
def get_handler_config(self, name: str) -> dict:
"""Return the global configuration of the given handler.
Arguments:
name: The name of the handler to get the global configuration of.
Returns:
The global configuration of the given handler. It can be an empty dictionary.
"""
handlers = self._config["mkdocstrings"].get("handlers", {})
if handlers:
return handlers.get(name, {})
return {}
def get_handler(self, name: str, handler_config: Optional[dict] = None) -> BaseHandler:
"""Get a handler thanks to its name.
This function dynamically imports a module named "mkdocstrings.handlers.NAME", calls its
`get_handler` method to get an instance of a handler, and caches it in dictionary.
It means that during one run (for each reload when serving, or once when building),
a handler is instantiated only once, and reused for each "autodoc" instruction asking for it.
Arguments:
name: The name of the handler. Really, it's the name of the Python module holding it.
handler_config: Configuration passed to the handler.
Returns:
An instance of a subclass of [`BaseHandler`][mkdocstrings.handlers.base.BaseHandler],
as instantiated by the `get_handler` method of the handler's module.
"""
if name not in self._handlers:
if handler_config is None:
handler_config = self.get_handler_config(name)
try:
module = importlib.import_module(f"mkdocstrings_handlers.{name}")
except ModuleNotFoundError:
module = importlib.import_module(f"mkdocstrings.handlers.{name}")
if name != "python":
warnings.warn(
DeprecationWarning(
"Using the mkdocstrings.handlers namespace is deprecated. "
"Handlers must now use the mkdocstrings_handlers namespace."
)
)
self._handlers[name] = module.get_handler(
theme=self._config["theme_name"],
custom_templates=self._config["mkdocstrings"]["custom_templates"],
config_file_path=self._config["config_file_path"],
**handler_config,
)
return self._handlers[name]
@property
def seen_handlers(self) -> Iterable[BaseHandler]:
"""Get the handlers that were encountered so far throughout the build.
Returns:
An iterable of instances of [`BaseHandler`][mkdocstrings.handlers.base.BaseHandler]
(usable only to loop through it).
"""
return self._handlers.values()
def teardown(self) -> None:
"""Teardown all cached handlers and clear the cache."""
for handler in self.seen_handlers:
handler.teardown()
self._handlers.clear()
| 42.538591 | 121 | 0.638149 |
24e60dd3e152b424fac9c28042bcbe8cb790f125 | 13,311 | py | Python | oscarapi/serializers/fields.py | jayvdb/django-oscar-api | c77c6130d3d1946265461c78278e414b098e69fa | [
"BSD-3-Clause"
] | 311 | 2015-01-30T14:58:01.000Z | 2022-03-27T16:57:38.000Z | oscarapi/serializers/fields.py | jayvdb/django-oscar-api | c77c6130d3d1946265461c78278e414b098e69fa | [
"BSD-3-Clause"
] | 211 | 2015-01-26T23:03:55.000Z | 2022-03-26T19:33:26.000Z | oscarapi/serializers/fields.py | jayvdb/django-oscar-api | c77c6130d3d1946265461c78278e414b098e69fa | [
"BSD-3-Clause"
] | 202 | 2015-01-07T09:20:08.000Z | 2022-03-26T15:21:04.000Z | import logging
import operator
from os.path import basename, join
from urllib.parse import urlsplit, parse_qs
from urllib.request import urlretrieve
from urllib.error import HTTPError
from django.conf import settings
from django.db import IntegrityError
from django.utils.translation import ugettext as _
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.files import File
from django.utils.functional import cached_property
from rest_framework import serializers, relations
from rest_framework.fields import get_attribute
from oscar.core.loading import get_model, get_class
from oscarapi.utils.loading import get_api_class
from oscarapi.utils.exists import bound_unique_together_get_or_create
from .exceptions import FieldError
logger = logging.getLogger(__name__)
ProductAttribute = get_model("catalogue", "ProductAttribute")
Category = get_model("catalogue", "Category")
create_from_breadcrumbs = get_class("catalogue.categories", "create_from_breadcrumbs")
entity_internal_value = get_api_class("serializers.hooks", "entity_internal_value")
attribute_details = operator.itemgetter("code", "value")
class TaxIncludedDecimalField(serializers.DecimalField):
def __init__(self, excl_tax_field=None, excl_tax_value=None, **kwargs):
self.excl_tax_field = excl_tax_field
self.excl_tax_value = excl_tax_value
super(TaxIncludedDecimalField, self).__init__(**kwargs)
def get_attribute(self, instance):
if instance.is_tax_known:
return super(TaxIncludedDecimalField, self).get_attribute(instance)
if self.excl_tax_field:
return get_attribute(instance, (self.excl_tax_field,))
return self.excl_tax_value
class DrillDownHyperlinkedMixin:
def __init__(self, *args, **kwargs):
try:
self.extra_url_kwargs = kwargs.pop("extra_url_kwargs")
except KeyError:
msg = "DrillDownHyperlink Fields require an 'extra_url_kwargs' argument"
raise ValueError(msg)
super().__init__(*args, **kwargs)
def get_extra_url_kwargs(self, obj):
return {
key: operator.attrgetter(path)(obj)
for key, path in self.extra_url_kwargs.items()
}
def get_url(
self, obj, view_name, request, format
): # pylint: disable=redefined-builtin
"""
Given an object, return the URL that hyperlinks to the object.
May raise a `NoReverseMatch` if the `view_name` and `lookup_field`
attributes are not configured to correctly match the URL conf.
"""
if hasattr(obj, "pk") and obj.pk in (None, ""):
return None
lookup_value = getattr(obj, self.lookup_field)
kwargs = {self.lookup_url_kwarg: lookup_value}
kwargs.update(self.get_extra_url_kwargs(obj))
return self.reverse(view_name, kwargs=kwargs, request=request, format=format)
class DrillDownHyperlinkedIdentityField(
DrillDownHyperlinkedMixin, relations.HyperlinkedIdentityField
):
pass
class DrillDownHyperlinkedRelatedField(
DrillDownHyperlinkedMixin, relations.HyperlinkedRelatedField
):
def use_pk_only_optimization(self):
# we always want the full object so the mixin can filter on the attributes
# specified with get_extra_url_kwargs
return False
class AttributeValueField(serializers.Field):
"""
This field is used to handle the value of the ProductAttributeValue model
Because the value is dependant on the type of the corresponding attribute,
it is not fixed. This field solves the problem of handling the different
types.
"""
def __init__(self, **kwargs):
# this field always needs the full object
kwargs["source"] = "*"
kwargs["error_messages"] = {
"no_such_option": _("{code}: Option {value} does not exist."),
"invalid": _("Wrong type, {error}."),
"attribute_validation_error": _(
"Error assigning `{value}` to {code}, {error}."
),
"attribute_required": _("Attribute {code} is required."),
"attribute_missing": _(
"No attribute exist with code={code}, "
"please define it in the product_class first."
),
"child_without_parent": _(
"Can not find attribute if product_class is empty and "
"parent is empty as well, child without parent?"
),
}
super(AttributeValueField, self).__init__(**kwargs)
def get_value(self, dictionary):
# return all the data because this field uses everything
return dictionary
def to_internal_value(self, data): # noqa
assert "product" in data or "product_class" in data or "parent" in data
try:
code, value = attribute_details(data)
internal_value = value
if "product" in data:
# we need the attribute to determine the type of the value
attribute = ProductAttribute.objects.get(
code=code, product_class__products__id=data["product"]
)
elif "product_class" in data and data["product_class"] is not None:
attribute = ProductAttribute.objects.get(
code=code, product_class__slug=data.get("product_class")
)
elif "parent" in data:
attribute = ProductAttribute.objects.get(
code=code, product_class__products__id=data["parent"]
)
if attribute.required and value is None:
self.fail("attribute_required", code=code)
# some of these attribute types need special processing, or their
# validation will fail
if attribute.type == attribute.OPTION:
internal_value = attribute.option_group.options.get(option=value)
elif attribute.type == attribute.MULTI_OPTION:
if attribute.required and not value:
self.fail("attribute_required", code=code)
internal_value = attribute.option_group.options.filter(option__in=value)
if len(value) != internal_value.count():
non_existing = set(value) - set(
internal_value.values_list("option", flat=True)
)
non_existing_as_error = ",".join(sorted(non_existing))
self.fail("no_such_option", value=non_existing_as_error, code=code)
elif attribute.type == attribute.DATE:
date_field = serializers.DateField()
internal_value = date_field.to_internal_value(value)
elif attribute.type == attribute.DATETIME:
date_field = serializers.DateTimeField()
internal_value = date_field.to_internal_value(value)
elif attribute.type == attribute.ENTITY:
internal_value = entity_internal_value(attribute, value)
# the rest of the attribute types don't need special processing
try:
attribute.validate_value(internal_value)
except TypeError as e:
self.fail(
"attribute_validation_error",
code=code,
value=internal_value,
error=e,
)
except ValidationError as e:
self.fail(
"attribute_validation_error",
code=code,
value=internal_value,
error=",".join(e.messages),
)
return {"value": internal_value, "attribute": attribute}
except ProductAttribute.DoesNotExist:
if (
"product_class" in data
and "parent" in data
and data["product_class"] is None
and data["parent"] is None
):
self.fail("child_without_parent")
else:
self.fail("attribute_missing", **data)
except ObjectDoesNotExist:
self.fail("no_such_option", value=value, code=code)
except KeyError as e:
(field_name,) = e.args
raise FieldError(
detail={field_name: self.error_messages["required"]}, code="required"
)
def to_representation(self, value):
obj_type = value.attribute.type
if obj_type == value.attribute.OPTION:
return value.value.option
elif obj_type == value.attribute.MULTI_OPTION:
return value.value.values_list("option", flat=True)
elif obj_type == value.attribute.FILE:
return value.value.url
elif obj_type == value.attribute.IMAGE:
return value.value.url
elif obj_type == value.attribute.ENTITY:
if hasattr(value.value, "json"):
return value.value.json()
else:
return _(
"%(entity)s has no json method, can not convert to json"
% {"entity": repr(value.value)}
)
# return the value as stored on ProductAttributeValue in the correct type
return value.value
class CategoryField(serializers.RelatedField):
def __init__(self, **kwargs):
kwargs["queryset"] = Category.objects
super(CategoryField, self).__init__(**kwargs)
def to_internal_value(self, data):
return create_from_breadcrumbs(data)
def to_representation(self, value):
return value.full_name
class SingleValueSlugRelatedField(serializers.SlugRelatedField):
"""
Represents a queryset as a list of slugs, and can be used to create new
items, as long as only the slug_field is required
"""
def get_bound_queryset(self):
parent = self.parent
source_name = parent.source
if hasattr(parent, "child_relation"):
parent = parent.parent
return getattr(parent.instance, source_name, None)
def to_internal_value(self, data):
qs = self.get_bound_queryset()
if qs is not None: # first try to obtain a bound item.
try:
return bound_unique_together_get_or_create(qs, {self.slug_field: data})
except IntegrityError:
pass
# if no bound item can be found, return an unbound unsaved instance.
qs = self.get_queryset()
return {self.slug_field: data}
class LazyRemoteFile(File):
"""
This file will defer downloading untill the file data is accessed.
It will also try to parsed a sha1 hash from the url, and store it as an
attribute, so the file_hash function will use it. You can use this feature
to avoid unnescessary downloading of files. Just compute the hash on the
client side and send it along in the url like this::
http://example.com/image.jpg?sha1=751499a82438277cb3cfb5db268bd41696739b3b
It will only download if not allready available locally.
"""
def __init__(self, url, name=None, mode="rb"):
parsed_url = urlsplit(url)
self.mode = mode
self.name = name
self.size = 1
self.url = url
# compute a hash if available
sha1_hash = next(iter(parse_qs(parsed_url.query).get("sha1", [])), None)
if sha1_hash:
self.sha1 = sha1_hash
def read(self, size=-1):
try:
return self.file.read(size)
except HTTPError as e:
raise serializers.ValidationError(
"Error when downloading image %s, %s: %s" % (self.url, e.code, e.reason)
)
@cached_property
def file(self):
local_filename, _ = urlretrieve(self.url, self.name)
return open(local_filename, self.mode)
def __str__(self):
return self.url or ""
def __bool__(self):
return bool(self.url)
def open(self, mode="rb"):
if not self.closed:
self.seek(0)
return self
class ImageUrlField(serializers.ImageField):
def __init__(self, **kwargs):
super(ImageUrlField, self).__init__(**kwargs)
self.use_url = True
def to_internal_value(self, data):
http_prefix = data.startswith(("http:", "https:"))
if http_prefix:
request = self.context.get("request", None)
if request: # if there is a request, we can get the hostname from that
parsed_url = urlsplit(data)
host = request.get_host()
if (
host != parsed_url.netloc
): # we are only downloading files from a foreign server
# download only when needed
return LazyRemoteFile(data, name=basename(parsed_url.path))
else:
location = parsed_url.path
path = join(
settings.MEDIA_ROOT, location.replace(settings.MEDIA_URL, "", 1)
)
file_object = File(open(path, "rb"))
return super(ImageUrlField, self).to_internal_value(file_object)
return super(ImageUrlField, self).to_internal_value(data)
| 37.708215 | 88 | 0.620389 |
9a9cd3b5fb0dbf1f64e5aa7e3f526f253509721d | 7,703 | py | Python | test/functional/wallet_hd.py | btc20/bitcoin2.0 | e7a54b00ebd0e8d20f4d5315cc9a21c77ded25cd | [
"MIT"
] | null | null | null | test/functional/wallet_hd.py | btc20/bitcoin2.0 | e7a54b00ebd0e8d20f4d5315cc9a21c77ded25cd | [
"MIT"
] | null | null | null | test/functional/wallet_hd.py | btc20/bitcoin2.0 | e7a54b00ebd0e8d20f4d5315cc9a21c77ded25cd | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
import os
import shutil
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes_bi,
assert_raises_rpc_error
)
from test_framework.bitcoin2config import COINBASE_MATURITY
class WalletHDTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [[], ['-keypool=0']]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Make sure we use hd, keep masterkeyid
masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert_equal(len(masterkeyid), 40)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].getaddressinfo(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/88'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, "hd.bak"))
#self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, "hd.dump"))
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(COINBASE_MATURITY+1)
hd_add = None
NUM_HD_ADDS = 10
for i in range(NUM_HD_ADDS):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].getaddressinfo(hd_add)
assert_equal(hd_info["hdkeypath"], "m/88'/0'/"+str(i)+"'")
assert_equal(hd_info["hdseedid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].getaddressinfo(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/88'/1'/1'") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
self.log.info("Restore backup ...")
self.stop_node(1)
# we need to delete the complete regtest directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "chainstate"))
shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, "regtest", "wallets", "wallet.dat"))
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
for i in range(NUM_HD_ADDS):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/88'/0'/"+str(i)+"'")
assert_equal(hd_info_2["hdseedid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
# Needs rescan
self.stop_node(1)
self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# Try a RPC based rescan
self.stop_node(1)
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "chainstate"))
shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, "regtest", "wallets", "wallet.dat"))
self.start_node(1, extra_args=self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
# Wallet automatically scans blocks older than key on startup
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
out = self.nodes[1].rescanblockchain(0, 1)
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], 1)
out = self.nodes[1].rescanblockchain()
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], self.nodes[1].getblockcount())
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout']
keypath = ""
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['addresses'][0])['hdkeypath']
assert_equal(keypath[0:8], "m/88'/1'")
# Generate a new HD seed on node 1 and make sure it is set
orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
self.nodes[1].sethdseed()
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert orig_masterkeyid != new_masterkeyid
addr = self.nodes[1].getnewaddress()
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/88\'/0\'/0\'') # Make sure the new address is the first from the keypool
self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key
# Set a new HD seed on node 1 without flushing the keypool
new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
orig_masterkeyid = new_masterkeyid
self.nodes[1].sethdseed(False, new_seed)
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert orig_masterkeyid != new_masterkeyid
addr = self.nodes[1].getnewaddress()
assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid'])
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/88\'/0\'/1\'') # Make sure the new address continues previous keypool
# Check that the next address is from the new seed
self.nodes[1].keypoolrefill(1)
next_addr = self.nodes[1].getnewaddress()
assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid'])
assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/88\'/0\'/0\'') # Make sure the new address is not from previous keypool
assert next_addr != addr
# Sethdseed parameter validity
assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0)
assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif")
assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool")
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True)
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed)
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
if __name__ == '__main__':
WalletHDTest().main ()
| 48.753165 | 150 | 0.662339 |
6d3904027a76f058eef7771848d0d5a8b4be4530 | 751 | py | Python | 20180729/Excel_02.py | bijitchakraborty12/MyProjects01 | 503af4cd6e8fa0576add7ac64393f1b4a16456c7 | [
"MIT"
] | null | null | null | 20180729/Excel_02.py | bijitchakraborty12/MyProjects01 | 503af4cd6e8fa0576add7ac64393f1b4a16456c7 | [
"MIT"
] | null | null | null | 20180729/Excel_02.py | bijitchakraborty12/MyProjects01 | 503af4cd6e8fa0576add7ac64393f1b4a16456c7 | [
"MIT"
] | null | null | null | # In your python terminal cd into the directory having the excel file.
import xlrd
book = xlrd.open_workbook("myBook_01.xlsx") # in my case the directory contains the excel file named excel.xls
# Now to print the number of worksheets in the excel file
print("The number of worksheets are ", book.nsheets)
# Now the names of the worksheets in excel file
print("The names of worksheets are", book.sheet_names()) # returns an array of names
# Choose a specific workbook to import data
sheet = book.sheet_by_index(0)
# viola you have it
# Now lets say in my excel sheet data starts with rows = 1 to 3 , and columns =0 to 2
# PS the first row are the titles
for j in range(0,4):
for i in range(1,4):
print("%r" %sheet.cell_value(i,j)) | 37.55 | 110 | 0.727031 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.