hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c41ca0f80c0c8744f19e9559fad60259042fc7e
| 1,095
|
py
|
Python
|
turbo_transformers/python/turbo_transformers/utils.py
|
chengduoZH/TurboTransformers
|
a47bbf14c8ec2637375863ebc8d2c41a8b91a7ea
|
[
"BSD-3-Clause"
] | 1
|
2020-06-07T06:24:41.000Z
|
2020-06-07T06:24:41.000Z
|
turbo_transformers/python/turbo_transformers/utils.py
|
chengduoZH/TurboTransformers
|
a47bbf14c8ec2637375863ebc8d2c41a8b91a7ea
|
[
"BSD-3-Clause"
] | null | null | null |
turbo_transformers/python/turbo_transformers/utils.py
|
chengduoZH/TurboTransformers
|
a47bbf14c8ec2637375863ebc8d2c41a8b91a7ea
|
[
"BSD-3-Clause"
] | 1
|
2021-01-04T11:10:40.000Z
|
2021-01-04T11:10:40.000Z
|
# Copyright (C) 2020 THL A29 Limited, a Tencent company.
# All rights reserved.
# Licensed under the BSD 3-Clause License (the "License"); you may
# not use this file except in compliance with the License. You may
# obtain a copy of the License at
# https://opensource.org/licenses/BSD-3-Clause
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
# See the AUTHORS file for names of contributors.
try:
# `turbo_transformers_cxxd` is the name on debug mode
import turbo_transformers.turbo_transformers_cxxd as cxx
except ImportError:
import turbo_transformers.turbo_transformers_cxx as cxx
import contextlib
__all__ = ['gperf_guard', 'set_num_threads']
set_num_threads = cxx.set_num_threads
@contextlib.contextmanager
def gperf_guard(filename: str):
cxx.enable_gperf(filename)
yield
cxx.disable_gperf()
| 35.322581
| 69
| 0.778995
|
try:
import turbo_transformers.turbo_transformers_cxxd as cxx
except ImportError:
import turbo_transformers.turbo_transformers_cxx as cxx
import contextlib
__all__ = ['gperf_guard', 'set_num_threads']
set_num_threads = cxx.set_num_threads
@contextlib.contextmanager
def gperf_guard(filename: str):
cxx.enable_gperf(filename)
yield
cxx.disable_gperf()
| true
| true
|
1c41ca6089625826646913378f1acdafab3f3824
| 4,499
|
py
|
Python
|
huaweicloud-sdk-meeting/huaweicloudsdkmeeting/v1/model/validate_token_req_dto.py
|
Adek06/huaweicloud-sdk-python-v3
|
3d13b27d089e04a1ae567cd649b3c5509e0391d2
|
[
"Apache-2.0"
] | null | null | null |
huaweicloud-sdk-meeting/huaweicloudsdkmeeting/v1/model/validate_token_req_dto.py
|
Adek06/huaweicloud-sdk-python-v3
|
3d13b27d089e04a1ae567cd649b3c5509e0391d2
|
[
"Apache-2.0"
] | null | null | null |
huaweicloud-sdk-meeting/huaweicloudsdkmeeting/v1/model/validate_token_req_dto.py
|
Adek06/huaweicloud-sdk-python-v3
|
3d13b27d089e04a1ae567cd649b3c5509e0391d2
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
import pprint
import re
import six
class ValidateTokenReqDTO:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'token': 'str',
'need_gen_new_token': 'bool',
'need_account_info': 'bool'
}
attribute_map = {
'token': 'token',
'need_gen_new_token': 'needGenNewToken',
'need_account_info': 'needAccountInfo'
}
def __init__(self, token=None, need_gen_new_token=False, need_account_info=True):
"""ValidateTokenReqDTO - a model defined in huaweicloud sdk"""
self._token = None
self._need_gen_new_token = None
self._need_account_info = None
self.discriminator = None
self.token = token
self.need_gen_new_token = need_gen_new_token
if need_account_info is not None:
self.need_account_info = need_account_info
@property
def token(self):
"""Gets the token of this ValidateTokenReqDTO.
登录用账号的token字符串
:return: The token of this ValidateTokenReqDTO.
:rtype: str
"""
return self._token
@token.setter
def token(self, token):
"""Sets the token of this ValidateTokenReqDTO.
登录用账号的token字符串
:param token: The token of this ValidateTokenReqDTO.
:type: str
"""
self._token = token
@property
def need_gen_new_token(self):
"""Gets the need_gen_new_token of this ValidateTokenReqDTO.
是否生成新的token,内部使用参数。 true:生成新的token值。 false:不生成新的token值。
:return: The need_gen_new_token of this ValidateTokenReqDTO.
:rtype: bool
"""
return self._need_gen_new_token
@need_gen_new_token.setter
def need_gen_new_token(self, need_gen_new_token):
"""Sets the need_gen_new_token of this ValidateTokenReqDTO.
是否生成新的token,内部使用参数。 true:生成新的token值。 false:不生成新的token值。
:param need_gen_new_token: The need_gen_new_token of this ValidateTokenReqDTO.
:type: bool
"""
self._need_gen_new_token = need_gen_new_token
@property
def need_account_info(self):
"""Gets the need_account_info of this ValidateTokenReqDTO.
是否需要返回用户可见帐号信息(帐号、用户姓名等信息)。
:return: The need_account_info of this ValidateTokenReqDTO.
:rtype: bool
"""
return self._need_account_info
@need_account_info.setter
def need_account_info(self, need_account_info):
"""Sets the need_account_info of this ValidateTokenReqDTO.
是否需要返回用户可见帐号信息(帐号、用户姓名等信息)。
:param need_account_info: The need_account_info of this ValidateTokenReqDTO.
:type: bool
"""
self._need_account_info = need_account_info
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ValidateTokenReqDTO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.601227
| 86
| 0.592354
|
import pprint
import re
import six
class ValidateTokenReqDTO:
sensitive_list = []
openapi_types = {
'token': 'str',
'need_gen_new_token': 'bool',
'need_account_info': 'bool'
}
attribute_map = {
'token': 'token',
'need_gen_new_token': 'needGenNewToken',
'need_account_info': 'needAccountInfo'
}
def __init__(self, token=None, need_gen_new_token=False, need_account_info=True):
self._token = None
self._need_gen_new_token = None
self._need_account_info = None
self.discriminator = None
self.token = token
self.need_gen_new_token = need_gen_new_token
if need_account_info is not None:
self.need_account_info = need_account_info
@property
def token(self):
return self._token
@token.setter
def token(self, token):
self._token = token
@property
def need_gen_new_token(self):
return self._need_gen_new_token
@need_gen_new_token.setter
def need_gen_new_token(self, need_gen_new_token):
self._need_gen_new_token = need_gen_new_token
@property
def need_account_info(self):
return self._need_account_info
@need_account_info.setter
def need_account_info(self, need_account_info):
self._need_account_info = need_account_info
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, ValidateTokenReqDTO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
1c41cb73d4ca00348ce7eb58476501f5896b8338
| 2,807
|
py
|
Python
|
examples/search_as_you_type.py
|
miff2000/elasticsearch-dsl-py
|
4e1260f547e3a1803b2eac154db9d1f89688990c
|
[
"Apache-2.0"
] | 1
|
2021-04-14T08:05:53.000Z
|
2021-04-14T08:05:53.000Z
|
examples/search_as_you_type.py
|
miff2000/elasticsearch-dsl-py
|
4e1260f547e3a1803b2eac154db9d1f89688990c
|
[
"Apache-2.0"
] | 2
|
2021-01-23T03:04:26.000Z
|
2021-05-25T04:53:18.000Z
|
examples/search_as_you_type.py
|
miff2000/elasticsearch-dsl-py
|
4e1260f547e3a1803b2eac154db9d1f89688990c
|
[
"Apache-2.0"
] | 4
|
2021-01-22T21:49:57.000Z
|
2021-11-23T12:00:20.000Z
|
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example ``Document`` with search_as_you_type field datatype and how to search it.
When creating a field with search_as_you_type datatype ElasticSearch creates additional
subfields to enable efficient as-you-type completion, matching terms at any position
within the input.
To custom analyzer with ascii folding allow search to work in different languages.
"""
from __future__ import print_function, unicode_literals
from elasticsearch_dsl import (
Document,
SearchAsYouType,
analyzer,
connections,
token_filter,
)
from elasticsearch_dsl.query import MultiMatch
# custom analyzer for names
ascii_fold = analyzer(
"ascii_fold",
# we don't want to split O'Brian or Toulouse-Lautrec
tokenizer="whitespace",
filter=["lowercase", token_filter("ascii_fold", "asciifolding")],
)
class Person(Document):
name = SearchAsYouType(max_shingle_size=3)
class Index:
name = "test-search-as-you-type"
settings = {"number_of_shards": 1, "number_of_replicas": 0}
if __name__ == "__main__":
# initiate the default connection to elasticsearch
connections.create_connection()
# create the empty index
Person.init()
import pprint
pprint.pprint(Person().to_dict(), indent=2)
# index some sample data
names = [
"Andy Warhol",
"Alphonse Mucha",
"Henri de Toulouse-Lautrec",
"Jára Cimrman",
]
for id, name in enumerate(names):
Person(_id=id, name=name).save()
# refresh index manually to make changes live
Person._index.refresh()
# run some suggestions
for text in ("já", "Cimr", "toulouse", "Henri Tou", "a"):
s = Person.search()
s.query = MultiMatch(
query=text,
type="bool_prefix",
fields=["name", "name._2gram", "name._3gram"],
)
response = s.execute()
# print out all the options we got
for h in response:
print("%15s: %25s" % (text, h.name))
| 29.547368
| 87
| 0.686498
|
from __future__ import print_function, unicode_literals
from elasticsearch_dsl import (
Document,
SearchAsYouType,
analyzer,
connections,
token_filter,
)
from elasticsearch_dsl.query import MultiMatch
ascii_fold = analyzer(
"ascii_fold",
tokenizer="whitespace",
filter=["lowercase", token_filter("ascii_fold", "asciifolding")],
)
class Person(Document):
name = SearchAsYouType(max_shingle_size=3)
class Index:
name = "test-search-as-you-type"
settings = {"number_of_shards": 1, "number_of_replicas": 0}
if __name__ == "__main__":
connections.create_connection()
Person.init()
import pprint
pprint.pprint(Person().to_dict(), indent=2)
names = [
"Andy Warhol",
"Alphonse Mucha",
"Henri de Toulouse-Lautrec",
"Jára Cimrman",
]
for id, name in enumerate(names):
Person(_id=id, name=name).save()
Person._index.refresh()
for text in ("já", "Cimr", "toulouse", "Henri Tou", "a"):
s = Person.search()
s.query = MultiMatch(
query=text,
type="bool_prefix",
fields=["name", "name._2gram", "name._3gram"],
)
response = s.execute()
for h in response:
print("%15s: %25s" % (text, h.name))
| true
| true
|
1c41cbf7a1c5550f9ff29fc46ff03b3b658fbc32
| 4,461
|
py
|
Python
|
hummingbird/ml/operator_converters/_one_hot_encoder_implementations.py
|
rathijit/hummingbird
|
b634a4e5152757af2ff7a41059b3e52c8140fc04
|
[
"MIT"
] | 2,772
|
2020-05-04T21:03:40.000Z
|
2022-03-30T11:00:03.000Z
|
hummingbird/ml/operator_converters/_one_hot_encoder_implementations.py
|
hsaputra/hummingbird
|
0ebd6be58880615bc86eab3648056682b40de614
|
[
"MIT"
] | 486
|
2020-05-05T00:45:44.000Z
|
2022-03-15T01:02:31.000Z
|
hummingbird/ml/operator_converters/_one_hot_encoder_implementations.py
|
hsaputra/hummingbird
|
0ebd6be58880615bc86eab3648056682b40de614
|
[
"MIT"
] | 232
|
2019-11-02T22:06:38.000Z
|
2022-03-25T07:36:17.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
Base classes for one hot encoder implementations.
"""
import numpy as np
import torch
from ._physical_operator import PhysicalOperator
from . import constants
class OneHotEncoderString(PhysicalOperator, torch.nn.Module):
"""
Class implementing OneHotEncoder operators for strings in PyTorch.
Because we are dealing with tensors, strings require additional length information for processing.
"""
def __init__(self, logical_operator, categories, device, extra_config={}):
super(OneHotEncoderString, self).__init__(logical_operator, transformer=True)
self.num_columns = len(categories)
self.max_word_length = max([max([len(c) for c in cat]) for cat in categories])
# Strings are casted to int32, therefore we need to properly size the tensor to me dividable by 4.
while self.max_word_length % 4 != 0:
self.max_word_length += 1
max_length = 0
if constants.MAX_STRING_LENGTH in extra_config:
max_length = extra_config[constants.MAX_STRING_LENGTH]
extra_config[constants.MAX_STRING_LENGTH] = max(max_length, self.max_word_length)
# We build condition tensors as a 2d tensor of integers.
# The first dimension is of size num words, the second dimension is fixed to the max word length (// 4).
condition_tensors = []
categories_idx = [0]
for arr in categories:
cats = (
np.array(arr, dtype="|S" + str(self.max_word_length)) # Encode objects into 4 byte strings.
.view("int32")
.reshape(-1, self.max_word_length // 4)
.tolist()
)
# We merge all categories for all columns into a single tensor
condition_tensors.extend(cats)
# Since all categories are merged together, we need to track of indexes to retrieve them at inference time.
categories_idx.append(categories_idx[-1] + len(cats))
self.condition_tensors = torch.nn.Parameter(torch.IntTensor(condition_tensors), requires_grad=False)
self.categories_idx = categories_idx
def forward(self, x):
encoded_tensors = []
for i in range(self.num_columns):
# First we fetch the condition for the particular column.
conditions = self.condition_tensors[self.categories_idx[i] : self.categories_idx[i + 1], :].view(
1, -1, self.max_word_length // 4
)
# Differently than the numeric case where eq is enough, here we need to aggregate per object (dim = 2)
# because objects can span multiple integers. We use product here since all ints must match to get encoding of 1.
encoded_tensors.append(torch.prod(torch.eq(x[:, i : i + 1, :], conditions), dim=2))
return torch.cat(encoded_tensors, dim=1).float()
class OneHotEncoder(PhysicalOperator, torch.nn.Module):
"""
Class implementing OneHotEncoder operators for ints in PyTorch.
"""
def __init__(self, logical_operator, categories, device):
super(OneHotEncoder, self).__init__(logical_operator, transformer=True)
self.num_columns = len(categories)
condition_tensors = []
for arr in categories:
condition_tensors.append(torch.nn.Parameter(torch.LongTensor(arr), requires_grad=False))
self.condition_tensors = torch.nn.ParameterList(condition_tensors)
def forward(self, *x):
encoded_tensors = []
if len(x) > 1:
assert len(x) == self.num_columns
for i in range(self.num_columns):
input = x[i]
if input.dtype != torch.int64:
input = input.long()
encoded_tensors.append(torch.eq(input, self.condition_tensors[i]))
else:
# This is already a tensor.
x = x[0]
if x.dtype != torch.int64:
x = x.long()
for i in range(self.num_columns):
encoded_tensors.append(torch.eq(x[:, i : i + 1], self.condition_tensors[i]))
return torch.cat(encoded_tensors, dim=1).float()
| 40.926606
| 125
| 0.623403
|
import numpy as np
import torch
from ._physical_operator import PhysicalOperator
from . import constants
class OneHotEncoderString(PhysicalOperator, torch.nn.Module):
def __init__(self, logical_operator, categories, device, extra_config={}):
super(OneHotEncoderString, self).__init__(logical_operator, transformer=True)
self.num_columns = len(categories)
self.max_word_length = max([max([len(c) for c in cat]) for cat in categories])
while self.max_word_length % 4 != 0:
self.max_word_length += 1
max_length = 0
if constants.MAX_STRING_LENGTH in extra_config:
max_length = extra_config[constants.MAX_STRING_LENGTH]
extra_config[constants.MAX_STRING_LENGTH] = max(max_length, self.max_word_length)
condition_tensors = []
categories_idx = [0]
for arr in categories:
cats = (
np.array(arr, dtype="|S" + str(self.max_word_length))
.view("int32")
.reshape(-1, self.max_word_length // 4)
.tolist()
)
condition_tensors.extend(cats)
categories_idx.append(categories_idx[-1] + len(cats))
self.condition_tensors = torch.nn.Parameter(torch.IntTensor(condition_tensors), requires_grad=False)
self.categories_idx = categories_idx
def forward(self, x):
encoded_tensors = []
for i in range(self.num_columns):
conditions = self.condition_tensors[self.categories_idx[i] : self.categories_idx[i + 1], :].view(
1, -1, self.max_word_length // 4
)
encoded_tensors.append(torch.prod(torch.eq(x[:, i : i + 1, :], conditions), dim=2))
return torch.cat(encoded_tensors, dim=1).float()
class OneHotEncoder(PhysicalOperator, torch.nn.Module):
def __init__(self, logical_operator, categories, device):
super(OneHotEncoder, self).__init__(logical_operator, transformer=True)
self.num_columns = len(categories)
condition_tensors = []
for arr in categories:
condition_tensors.append(torch.nn.Parameter(torch.LongTensor(arr), requires_grad=False))
self.condition_tensors = torch.nn.ParameterList(condition_tensors)
def forward(self, *x):
encoded_tensors = []
if len(x) > 1:
assert len(x) == self.num_columns
for i in range(self.num_columns):
input = x[i]
if input.dtype != torch.int64:
input = input.long()
encoded_tensors.append(torch.eq(input, self.condition_tensors[i]))
else:
x = x[0]
if x.dtype != torch.int64:
x = x.long()
for i in range(self.num_columns):
encoded_tensors.append(torch.eq(x[:, i : i + 1], self.condition_tensors[i]))
return torch.cat(encoded_tensors, dim=1).float()
| true
| true
|
1c41cc803343fff646d710a5045a6865f79cf7d8
| 5,722
|
py
|
Python
|
vega/algorithms/nas/sr_ea/sr_mutate.py
|
wnov/vega
|
bf51cbe389d41033c4ae4bc02e5078c3c247c845
|
[
"MIT"
] | 6
|
2020-11-13T15:44:47.000Z
|
2021-12-02T08:14:06.000Z
|
vega/algorithms/nas/sr_ea/sr_mutate.py
|
JacobLee121/vega
|
19256aca4d047bfad3b461f0a927e1c2abb9eb03
|
[
"MIT"
] | null | null | null |
vega/algorithms/nas/sr_ea/sr_mutate.py
|
JacobLee121/vega
|
19256aca4d047bfad3b461f0a927e1c2abb9eb03
|
[
"MIT"
] | 2
|
2021-06-25T09:42:32.000Z
|
2021-08-06T18:00:09.000Z
|
# -*- coding:utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""Mutate part of SR_EA algorithm."""
import logging
import random
from copy import deepcopy
from .conf import SRConfig
from zeus.common import ClassFactory, ClassType
from zeus.report import Report
from vega.core.search_algs import SearchAlgorithm
@ClassFactory.register(ClassType.SEARCH_ALGORITHM)
class SRMutate(SearchAlgorithm):
"""Search algorithm of the mutated structures."""
config = SRConfig()
def __init__(self, search_space=None, **kwargs):
"""Construct the class SRMutate.
:param search_space: Config of the search space
"""
super(SRMutate, self).__init__(search_space, **kwargs)
self.max_sample = self.config.policy.num_sample
self.num_mutate = self.config.policy.num_mutate
self.sample_count = 0
@property
def is_completed(self):
"""Tell whether the search process is completed.
:return: True is completed, or False otherwise
"""
return self.sample_count >= self.max_sample
def search(self):
"""Search one mutated model.
:return: current number of samples, and the model
"""
desc = deepcopy(self.search_space)
search_desc = desc.custom
# TODO: merge sr ea in one pipe step.
records = Report().get_pareto_front_records(['random', 'mutate'])
codes = []
for record in records:
codes.append(record.desc['custom']['code'])
code_to_mutate = random.choice(codes)
current_mutate, code_mutated = 0, code_to_mutate
num_candidates = len(search_desc["candidates"])
while current_mutate < self.num_mutate:
code_new = self.mutate_once(code_mutated, num_candidates)
if code_new != code_mutated:
current_mutate += 1
code_mutated = code_new
logging.info("Mutate from {} to {}".format(code_to_mutate, code_mutated))
search_desc['code'] = code_mutated
search_desc['method'] = "mutate"
search_desc = self.codec.decode(search_desc)
desc['custom'] = search_desc
self.sample_count += 1
return dict(worker_id=self.sample_count, desc=desc)
def mutate_once(self, code, num_largest):
"""Do one mutate.
:param code: original code
:param num_largest: number of candidates (largest number in code)
:return: the mutated code
"""
fun = random.choice([self.flip_once, self.insert_once, self.erase, self.swap_once])
return fun(code, num_largest)
@staticmethod
def flip_once(code, num_largest):
"""Flip one block.
:param code: original code
:param num_largest: number of candidates (largest number in code)
:return: the mutated code
"""
index_to_flip = random.choice([index for index in range(len(code)) if code[index] != '+'])
flip_choices = list(map(str, range(num_largest)))
flip_choices.remove(code[index_to_flip])
ch_flipped = random.choice(flip_choices)
return code[:index_to_flip] + ch_flipped + code[index_to_flip + 1:]
@staticmethod
def insert_once(code, num_largest):
"""Insert one block.
:param code: original code
:param num_largest: number of candidates (largest number in code)
:return: the mutated code
"""
ch_insert = random.choice(list(map(str, range(num_largest))))
place_insert = random.randint(0, len(code))
return code[:place_insert] + ch_insert + code[place_insert:]
@staticmethod
def erase(code, num_largest):
"""Erase one block.
:param code: original code
:param num_largest: number of candidates (largest number in code)
:return: the mutated code
"""
place_choices, index = list(), 0
while index < len(code):
if code[index] == '+':
index += 3
else:
place_choices.append(index)
index += 1
if len(place_choices) == 0:
return code
place_chosen = random.choice(place_choices)
return code[:place_chosen] + code[place_chosen + 1:]
@staticmethod
def swap_once(code, num_largest):
"""Swap two adjacent blocks.
:param code: original code
:param num_largest: number of candidates (largest number in code)
:return: the mutated code
"""
parts, index = list(), 0
while index < len(code):
if code[index] == '+':
parts.append(code[index: index + 3])
index += 3
else:
parts.append(code[index])
index += 1
if len(parts) < 2:
return code
valid_choices = [index for index in range(len(parts) - 2) if parts[index] != parts[index + 1]]
if len(valid_choices) == 0:
return code
place_chosen = random.choice(valid_choices)
parts[place_chosen], parts[place_chosen + 1] = parts[place_chosen + 1], parts[place_chosen]
return ''.join(parts)
def update(self, record):
"""Nothing need to update."""
pass
@property
def max_samples(self):
"""Get max samples number."""
return self.max_sample
| 35.104294
| 102
| 0.623908
|
import logging
import random
from copy import deepcopy
from .conf import SRConfig
from zeus.common import ClassFactory, ClassType
from zeus.report import Report
from vega.core.search_algs import SearchAlgorithm
@ClassFactory.register(ClassType.SEARCH_ALGORITHM)
class SRMutate(SearchAlgorithm):
config = SRConfig()
def __init__(self, search_space=None, **kwargs):
super(SRMutate, self).__init__(search_space, **kwargs)
self.max_sample = self.config.policy.num_sample
self.num_mutate = self.config.policy.num_mutate
self.sample_count = 0
@property
def is_completed(self):
return self.sample_count >= self.max_sample
def search(self):
desc = deepcopy(self.search_space)
search_desc = desc.custom
records = Report().get_pareto_front_records(['random', 'mutate'])
codes = []
for record in records:
codes.append(record.desc['custom']['code'])
code_to_mutate = random.choice(codes)
current_mutate, code_mutated = 0, code_to_mutate
num_candidates = len(search_desc["candidates"])
while current_mutate < self.num_mutate:
code_new = self.mutate_once(code_mutated, num_candidates)
if code_new != code_mutated:
current_mutate += 1
code_mutated = code_new
logging.info("Mutate from {} to {}".format(code_to_mutate, code_mutated))
search_desc['code'] = code_mutated
search_desc['method'] = "mutate"
search_desc = self.codec.decode(search_desc)
desc['custom'] = search_desc
self.sample_count += 1
return dict(worker_id=self.sample_count, desc=desc)
def mutate_once(self, code, num_largest):
fun = random.choice([self.flip_once, self.insert_once, self.erase, self.swap_once])
return fun(code, num_largest)
@staticmethod
def flip_once(code, num_largest):
index_to_flip = random.choice([index for index in range(len(code)) if code[index] != '+'])
flip_choices = list(map(str, range(num_largest)))
flip_choices.remove(code[index_to_flip])
ch_flipped = random.choice(flip_choices)
return code[:index_to_flip] + ch_flipped + code[index_to_flip + 1:]
@staticmethod
def insert_once(code, num_largest):
ch_insert = random.choice(list(map(str, range(num_largest))))
place_insert = random.randint(0, len(code))
return code[:place_insert] + ch_insert + code[place_insert:]
@staticmethod
def erase(code, num_largest):
place_choices, index = list(), 0
while index < len(code):
if code[index] == '+':
index += 3
else:
place_choices.append(index)
index += 1
if len(place_choices) == 0:
return code
place_chosen = random.choice(place_choices)
return code[:place_chosen] + code[place_chosen + 1:]
@staticmethod
def swap_once(code, num_largest):
parts, index = list(), 0
while index < len(code):
if code[index] == '+':
parts.append(code[index: index + 3])
index += 3
else:
parts.append(code[index])
index += 1
if len(parts) < 2:
return code
valid_choices = [index for index in range(len(parts) - 2) if parts[index] != parts[index + 1]]
if len(valid_choices) == 0:
return code
place_chosen = random.choice(valid_choices)
parts[place_chosen], parts[place_chosen + 1] = parts[place_chosen + 1], parts[place_chosen]
return ''.join(parts)
def update(self, record):
pass
@property
def max_samples(self):
return self.max_sample
| true
| true
|
1c41ccbd1adbac5ceee2a27e87170b2ab18c39b9
| 2,342
|
py
|
Python
|
sorting/selection_sorting_string_with_special_chars.py
|
salemalem/algorithms
|
f020e7b8f5a48c9a1962f7df041ea2820228ec86
|
[
"MIT"
] | 1
|
2021-11-23T05:52:37.000Z
|
2021-11-23T05:52:37.000Z
|
sorting/selection_sorting_string_with_special_chars.py
|
salemalem/algorithms
|
f020e7b8f5a48c9a1962f7df041ea2820228ec86
|
[
"MIT"
] | 1
|
2021-11-25T02:19:47.000Z
|
2021-11-29T10:09:22.000Z
|
sorting/selection_sorting_string_with_special_chars.py
|
salemalem/algorithms
|
f020e7b8f5a48c9a1962f7df041ea2820228ec86
|
[
"MIT"
] | null | null | null |
from ast import literal_eval as make_tuple
from string import punctuation
# Selection Sorting string with special characters
def swapPositions(list_argument, pos1, pos2):
list_argument[pos1], list_argument[pos2] = list_argument[pos2], list_argument[pos1]
return list_argument
string_with_special_chars = "i@mksw)84*#~\'\"<1xb"
print("Original string:", string_with_special_chars)
alphabets = []
numbers = []
special_chars = []
for char in string_with_special_chars:
if char.isalpha():
alphabets.append(char)
elif char.isdigit():
numbers.append(int(char))
elif char in punctuation:
special_chars.append(char)
print("Unsorted alphabets:", alphabets)
print("Unsorted numbers:", numbers)
print("Unsorted special characters:", special_chars)
# 1. Sorting alphabets
for outer_iterator in range(len(alphabets)):
outer_element = alphabets[outer_iterator]
minimum_element = outer_element
for inner_element in alphabets[outer_iterator:]:
# inner_iterator_inner_index = inner_iterator0 + outer_iterator
if minimum_element > inner_element:
minimum_element = inner_element
minimum_element_index = alphabets.index(minimum_element)
swapPositions(alphabets, minimum_element_index, outer_iterator)
print("Sorted alphabets:", alphabets)
# 2. Sorting numbers
for outer_iterator in range(len(numbers)):
outer_element = numbers[outer_iterator]
minimum_element = outer_element
for inner_element in numbers[outer_iterator:]:
# inner_iterator_inner_index = inner_iterator0 + outer_iterator
if minimum_element > inner_element:
minimum_element = inner_element
minimum_element_index = numbers.index(minimum_element)
swapPositions(numbers, minimum_element_index, outer_iterator)
print("Sorted numbers:", numbers)
# 3. Sorting special characters
for outer_iterator in range(len(special_chars)):
outer_element = special_chars[outer_iterator]
minimum_element = outer_element
for inner_element in special_chars[outer_iterator:]:
# inner_iterator_inner_index = inner_iterator0 + outer_iterator
if minimum_element > inner_element:
minimum_element = inner_element
minimum_element_index = special_chars.index(minimum_element)
swapPositions(special_chars, minimum_element_index, outer_iterator)
print("Sorted special characters:", special_chars)
| 34.955224
| 87
| 0.781383
|
from ast import literal_eval as make_tuple
from string import punctuation
def swapPositions(list_argument, pos1, pos2):
list_argument[pos1], list_argument[pos2] = list_argument[pos2], list_argument[pos1]
return list_argument
string_with_special_chars = "i@mksw)84*#~\'\"<1xb"
print("Original string:", string_with_special_chars)
alphabets = []
numbers = []
special_chars = []
for char in string_with_special_chars:
if char.isalpha():
alphabets.append(char)
elif char.isdigit():
numbers.append(int(char))
elif char in punctuation:
special_chars.append(char)
print("Unsorted alphabets:", alphabets)
print("Unsorted numbers:", numbers)
print("Unsorted special characters:", special_chars)
# 1. Sorting alphabets
for outer_iterator in range(len(alphabets)):
outer_element = alphabets[outer_iterator]
minimum_element = outer_element
for inner_element in alphabets[outer_iterator:]:
# inner_iterator_inner_index = inner_iterator0 + outer_iterator
if minimum_element > inner_element:
minimum_element = inner_element
minimum_element_index = alphabets.index(minimum_element)
swapPositions(alphabets, minimum_element_index, outer_iterator)
print("Sorted alphabets:", alphabets)
# 2. Sorting numbers
for outer_iterator in range(len(numbers)):
outer_element = numbers[outer_iterator]
minimum_element = outer_element
for inner_element in numbers[outer_iterator:]:
# inner_iterator_inner_index = inner_iterator0 + outer_iterator
if minimum_element > inner_element:
minimum_element = inner_element
minimum_element_index = numbers.index(minimum_element)
swapPositions(numbers, minimum_element_index, outer_iterator)
print("Sorted numbers:", numbers)
# 3. Sorting special characters
for outer_iterator in range(len(special_chars)):
outer_element = special_chars[outer_iterator]
minimum_element = outer_element
for inner_element in special_chars[outer_iterator:]:
# inner_iterator_inner_index = inner_iterator0 + outer_iterator
if minimum_element > inner_element:
minimum_element = inner_element
minimum_element_index = special_chars.index(minimum_element)
swapPositions(special_chars, minimum_element_index, outer_iterator)
print("Sorted special characters:", special_chars)
| true
| true
|
1c41ccc60fb8ccff50994d49dc3596bd07ff8214
| 4,008
|
py
|
Python
|
utils.py
|
PeaKend/pychain
|
40b5f6ce572b64abd7d5a788045732306679dd02
|
[
"MIT"
] | 1
|
2019-08-15T20:37:46.000Z
|
2019-08-15T20:37:46.000Z
|
utils.py
|
PeaKend/PyChain
|
40b5f6ce572b64abd7d5a788045732306679dd02
|
[
"MIT"
] | null | null | null |
utils.py
|
PeaKend/PyChain
|
40b5f6ce572b64abd7d5a788045732306679dd02
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import hashlib
import codecs
import time
import random
import os
# declare beginning and end of byte values
version_position = { "begin" : 0, "end" : 8 }
prev_block_position = { "begin" : 8, "end" : 72 }
merkle_root_position = { "begin" : 72, "end" : 136 }
timestamp_position = { "begin" : 136, "end" : 144 }
bits_position = { "begin" : 144, "end" : 152 }
nonce_position = { "begin" : 152, "end" : 160 }
def getHash(block):
block_bin = codecs.decode(block, 'hex')
block_hash = hashlib.sha256(hashlib.sha256(block_bin).digest()).digest()
block_hash = codecs.encode(block_hash[::-1], 'hex').decode('ascii')
return block_hash
def getLittleEndian(_hash):
_hash = codecs.decode(_hash, 'hex')
return str.encode(codecs.encode(_hash[::-1], 'hex').decode('ascii'))
def getVersion(block_name):
return open('data/' + block_name, 'rb').read()[version_position["begin"]:version_position["end"]]
def getPrevBlock(block_name):
return open('data/' + block_name, 'rb').read()[prev_block_position["begin"]:prev_block_position["end"]]
def getMerkleRoot(block_name):
return open('data/' + block_name, 'rb').read()[merkle_root_position["begin"]:merkle_root_position["end"]]
def getTimestamp(block_name):
return open('data/' + block_name, 'rb').read()[timestamp_position["begin"]:timestamp_position["end"]]
def getBits(block_name):
return open('data/' + block_name, 'rb').read()[bits_position["begin"]:bits_position["end"]]
def getNonce(block_name):
return open('data/' + block_name, 'rb').read()[nonce_position["begin"]:nonce_position["end"]]
def getBlockList():
for _file in os.listdir('data/'):
print(_file)
def getTx():
tx = open('tx', 'r').read()
return tx
def setVersion(version = b'01000000'):
return version
def getLastBlock():
last_block_time = 0
last_block_name = ''
for _file in os.listdir('data/'):
block = open('data/' + _file, 'rb').read()
block = codecs.encode(block, 'hex')
block_time = block[136:144]
if block_time != b'':
block_time = int(block_time, 16).to_bytes(4, 'little')
block_time = codecs.encode(block_time, 'hex')
block_time = int(block_time, 16)
if block_time > last_block_time:
last_block_time = block_time
last_block_name = _file
return last_block_name[0:64]
def setPrevBlock(block_name):
prev_block = open('data/' + block_name, 'rb').read()
prev_block = codecs.encode(prev_block, 'hex')
prev_block = getHash(prev_block)
prev_block = codecs.encode(prev_block, 'ascii')
return prev_block
def createNewTxFile():
return open('tx', 'w').write('')
def setMerkleRoot():
tx = open('tx', 'r').read()
tx = str.encode(tx)
tx_hash = hashlib.sha256(hashlib.sha256(tx).digest()).digest()
tx_hash = codecs.encode(tx_hash[::-1], 'hex').decode('ascii')
return str.encode(tx_hash)
def setTimestamp():
timestamp = int(time.time()).to_bytes(4, 'little')
timestamp = codecs.encode(timestamp, 'hex')
return timestamp
def setBits():
return b'FFFF001D'
def setNonce():
nonce = random.randint(0, 4294967295).to_bytes(4, 'little')
nonce = codecs.encode(nonce, 'hex')
return nonce
def createBlock(block_name):
open('data/' + block_name + '.bin', 'w').write('')
def writeToBlock(block_name, version, prev_block, merkle_root, timestamp, bits, nonce):
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(version, 'hex'))
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(prev_block, 'hex'))
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(merkle_root, 'hex'))
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(timestamp, 'hex'))
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(bits, 'hex'))
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(nonce, 'hex'))
def writeTx(block_name):
tx = open('tx', 'r').read()
tx = codecs.encode(str.encode(tx), 'hex')
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(tx, 'hex'))
| 34.25641
| 107
| 0.669411
|
import hashlib
import codecs
import time
import random
import os
version_position = { "begin" : 0, "end" : 8 }
prev_block_position = { "begin" : 8, "end" : 72 }
merkle_root_position = { "begin" : 72, "end" : 136 }
timestamp_position = { "begin" : 136, "end" : 144 }
bits_position = { "begin" : 144, "end" : 152 }
nonce_position = { "begin" : 152, "end" : 160 }
def getHash(block):
block_bin = codecs.decode(block, 'hex')
block_hash = hashlib.sha256(hashlib.sha256(block_bin).digest()).digest()
block_hash = codecs.encode(block_hash[::-1], 'hex').decode('ascii')
return block_hash
def getLittleEndian(_hash):
_hash = codecs.decode(_hash, 'hex')
return str.encode(codecs.encode(_hash[::-1], 'hex').decode('ascii'))
def getVersion(block_name):
return open('data/' + block_name, 'rb').read()[version_position["begin"]:version_position["end"]]
def getPrevBlock(block_name):
return open('data/' + block_name, 'rb').read()[prev_block_position["begin"]:prev_block_position["end"]]
def getMerkleRoot(block_name):
return open('data/' + block_name, 'rb').read()[merkle_root_position["begin"]:merkle_root_position["end"]]
def getTimestamp(block_name):
return open('data/' + block_name, 'rb').read()[timestamp_position["begin"]:timestamp_position["end"]]
def getBits(block_name):
return open('data/' + block_name, 'rb').read()[bits_position["begin"]:bits_position["end"]]
def getNonce(block_name):
return open('data/' + block_name, 'rb').read()[nonce_position["begin"]:nonce_position["end"]]
def getBlockList():
for _file in os.listdir('data/'):
print(_file)
def getTx():
tx = open('tx', 'r').read()
return tx
def setVersion(version = b'01000000'):
return version
def getLastBlock():
last_block_time = 0
last_block_name = ''
for _file in os.listdir('data/'):
block = open('data/' + _file, 'rb').read()
block = codecs.encode(block, 'hex')
block_time = block[136:144]
if block_time != b'':
block_time = int(block_time, 16).to_bytes(4, 'little')
block_time = codecs.encode(block_time, 'hex')
block_time = int(block_time, 16)
if block_time > last_block_time:
last_block_time = block_time
last_block_name = _file
return last_block_name[0:64]
def setPrevBlock(block_name):
prev_block = open('data/' + block_name, 'rb').read()
prev_block = codecs.encode(prev_block, 'hex')
prev_block = getHash(prev_block)
prev_block = codecs.encode(prev_block, 'ascii')
return prev_block
def createNewTxFile():
return open('tx', 'w').write('')
def setMerkleRoot():
tx = open('tx', 'r').read()
tx = str.encode(tx)
tx_hash = hashlib.sha256(hashlib.sha256(tx).digest()).digest()
tx_hash = codecs.encode(tx_hash[::-1], 'hex').decode('ascii')
return str.encode(tx_hash)
def setTimestamp():
timestamp = int(time.time()).to_bytes(4, 'little')
timestamp = codecs.encode(timestamp, 'hex')
return timestamp
def setBits():
return b'FFFF001D'
def setNonce():
nonce = random.randint(0, 4294967295).to_bytes(4, 'little')
nonce = codecs.encode(nonce, 'hex')
return nonce
def createBlock(block_name):
open('data/' + block_name + '.bin', 'w').write('')
def writeToBlock(block_name, version, prev_block, merkle_root, timestamp, bits, nonce):
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(version, 'hex'))
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(prev_block, 'hex'))
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(merkle_root, 'hex'))
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(timestamp, 'hex'))
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(bits, 'hex'))
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(nonce, 'hex'))
def writeTx(block_name):
tx = open('tx', 'r').read()
tx = codecs.encode(str.encode(tx), 'hex')
open('data/' + block_name + '.bin', 'ab').write(codecs.decode(tx, 'hex'))
| true
| true
|
1c41cea3ae5095de385ae9afc5ffbbde420efcbb
| 2,837
|
py
|
Python
|
toontown/coghq/DistributedBattleFactoryAI.py
|
AnonymousDeveloper65535/open-toontown
|
3d05c22a7d960ad843dde231140447c46973dba5
|
[
"BSD-3-Clause"
] | 1
|
2019-11-23T21:54:23.000Z
|
2019-11-23T21:54:23.000Z
|
toontown/coghq/DistributedBattleFactoryAI.py
|
AnonymousDeveloper65535/open-toontown
|
3d05c22a7d960ad843dde231140447c46973dba5
|
[
"BSD-3-Clause"
] | null | null | null |
toontown/coghq/DistributedBattleFactoryAI.py
|
AnonymousDeveloper65535/open-toontown
|
3d05c22a7d960ad843dde231140447c46973dba5
|
[
"BSD-3-Clause"
] | null | null | null |
from toontown.coghq import DistributedLevelBattleAI
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import State
from direct.fsm import ClassicFSM, State
from toontown.battle.BattleBase import *
import CogDisguiseGlobals
from direct.showbase.PythonUtil import addListsByValue
class DistributedBattleFactoryAI(DistributedLevelBattleAI.DistributedLevelBattleAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBattleFactoryAI')
def __init__(self, air, battleMgr, pos, suit, toonId, zoneId, level, battleCellId, roundCallback=None, finishCallback=None, maxSuits=4):
DistributedLevelBattleAI.DistributedLevelBattleAI.__init__(self, air, battleMgr, pos, suit, toonId, zoneId, level, battleCellId, 'FactoryReward', roundCallback, finishCallback, maxSuits)
self.battleCalc.setSkillCreditMultiplier(1)
if self.bossBattle:
self.level.d_setForemanConfronted(toonId)
self.fsm.addState(State.State('FactoryReward', self.enterFactoryReward, self.exitFactoryReward, [
'Resume']))
playMovieState = self.fsm.getStateNamed('PlayMovie')
playMovieState.addTransition('FactoryReward')
def getTaskZoneId(self):
return self.level.factoryId
def handleToonsWon(self, toons):
for toon in toons:
recovered, notRecovered = self.air.questManager.recoverItems(toon, self.suitsKilled, self.getTaskZoneId())
self.toonItems[toon.doId][0].extend(recovered)
self.toonItems[toon.doId][1].extend(notRecovered)
meritArray = self.air.promotionMgr.recoverMerits(toon, self.suitsKilled, self.getTaskZoneId(), getFactoryMeritMultiplier(self.getTaskZoneId()))
if toon.doId in self.helpfulToons:
self.toonMerits[toon.doId] = addListsByValue(self.toonMerits[toon.doId], meritArray)
else:
self.notify.debug('toon %d not helpful, skipping merits' % toon.doId)
if self.bossBattle:
self.toonParts[toon.doId] = self.air.cogSuitMgr.recoverPart(toon, self.level.factoryType, self.suitTrack, self.getTaskZoneId(), toons)
self.notify.debug('toonParts = %s' % self.toonParts)
def enterFactoryReward(self):
self.joinableFsm.request('Unjoinable')
self.runableFsm.request('Unrunable')
self.resetResponses()
self.assignRewards()
self.bossDefeated = 1
self.level.setVictors(self.activeToons[:])
self.timer.startCallback(BUILDING_REWARD_TIMEOUT, self.serverRewardDone)
return None
def exitFactoryReward(self):
return None
def enterResume(self):
DistributedLevelBattleAI.DistributedLevelBattleAI.enterResume(self)
if self.bossBattle and self.bossDefeated:
self.battleMgr.level.b_setDefeated()
| 50.660714
| 194
| 0.721889
|
from toontown.coghq import DistributedLevelBattleAI
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import State
from direct.fsm import ClassicFSM, State
from toontown.battle.BattleBase import *
import CogDisguiseGlobals
from direct.showbase.PythonUtil import addListsByValue
class DistributedBattleFactoryAI(DistributedLevelBattleAI.DistributedLevelBattleAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBattleFactoryAI')
def __init__(self, air, battleMgr, pos, suit, toonId, zoneId, level, battleCellId, roundCallback=None, finishCallback=None, maxSuits=4):
DistributedLevelBattleAI.DistributedLevelBattleAI.__init__(self, air, battleMgr, pos, suit, toonId, zoneId, level, battleCellId, 'FactoryReward', roundCallback, finishCallback, maxSuits)
self.battleCalc.setSkillCreditMultiplier(1)
if self.bossBattle:
self.level.d_setForemanConfronted(toonId)
self.fsm.addState(State.State('FactoryReward', self.enterFactoryReward, self.exitFactoryReward, [
'Resume']))
playMovieState = self.fsm.getStateNamed('PlayMovie')
playMovieState.addTransition('FactoryReward')
def getTaskZoneId(self):
return self.level.factoryId
def handleToonsWon(self, toons):
for toon in toons:
recovered, notRecovered = self.air.questManager.recoverItems(toon, self.suitsKilled, self.getTaskZoneId())
self.toonItems[toon.doId][0].extend(recovered)
self.toonItems[toon.doId][1].extend(notRecovered)
meritArray = self.air.promotionMgr.recoverMerits(toon, self.suitsKilled, self.getTaskZoneId(), getFactoryMeritMultiplier(self.getTaskZoneId()))
if toon.doId in self.helpfulToons:
self.toonMerits[toon.doId] = addListsByValue(self.toonMerits[toon.doId], meritArray)
else:
self.notify.debug('toon %d not helpful, skipping merits' % toon.doId)
if self.bossBattle:
self.toonParts[toon.doId] = self.air.cogSuitMgr.recoverPart(toon, self.level.factoryType, self.suitTrack, self.getTaskZoneId(), toons)
self.notify.debug('toonParts = %s' % self.toonParts)
def enterFactoryReward(self):
self.joinableFsm.request('Unjoinable')
self.runableFsm.request('Unrunable')
self.resetResponses()
self.assignRewards()
self.bossDefeated = 1
self.level.setVictors(self.activeToons[:])
self.timer.startCallback(BUILDING_REWARD_TIMEOUT, self.serverRewardDone)
return None
def exitFactoryReward(self):
return None
def enterResume(self):
DistributedLevelBattleAI.DistributedLevelBattleAI.enterResume(self)
if self.bossBattle and self.bossDefeated:
self.battleMgr.level.b_setDefeated()
| true
| true
|
1c41cf0961a83e6d398240f56c98e5411cb9fca6
| 311
|
py
|
Python
|
info.py
|
morrisjh/Basic-Text-Analyzer
|
db2cdd8f974c690397b789b154da7a9fbec10ff8
|
[
"MIT"
] | 2
|
2019-07-02T13:12:04.000Z
|
2019-07-10T18:39:18.000Z
|
info.py
|
morrisjh/Basic-Text-Analyzer
|
db2cdd8f974c690397b789b154da7a9fbec10ff8
|
[
"MIT"
] | null | null | null |
info.py
|
morrisjh/Basic-Text-Analyzer
|
db2cdd8f974c690397b789b154da7a9fbec10ff8
|
[
"MIT"
] | null | null | null |
""" BasicTextAnalyzer Information """
__author__ = "James Morris"
__maintainer__ = "James Morris"
__email__ = "morrisjamesharry@gmail.com"
__license__ = "MIT"
__version__ = "0.0.1"
__credits__ = ["Tyler Barrus and Peter Norvig (for pyspellchecker"]
__url__ = "https://github.com/morrisjh/Basic-Text-Analyzer"
| 28.272727
| 67
| 0.752412
|
__author__ = "James Morris"
__maintainer__ = "James Morris"
__email__ = "morrisjamesharry@gmail.com"
__license__ = "MIT"
__version__ = "0.0.1"
__credits__ = ["Tyler Barrus and Peter Norvig (for pyspellchecker"]
__url__ = "https://github.com/morrisjh/Basic-Text-Analyzer"
| true
| true
|
1c41cf4d92f49bda11f0c1320a11c8c39ba223ba
| 3,500
|
py
|
Python
|
lib/surface/compute/instances/get_screenshot.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 2
|
2019-11-10T09:17:07.000Z
|
2019-12-18T13:44:08.000Z
|
lib/surface/compute/instances/get_screenshot.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | null | null | null |
lib/surface/compute/instances/get_screenshot.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 1
|
2020-07-25T01:40:19.000Z
|
2020-07-25T01:40:19.000Z
|
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for retrieving a screenshot from the designated instance."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import base64
import sys
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute.instances import flags
from googlecloudsdk.core import log
from googlecloudsdk.core.util import files
_DETAILED_HELP = {
'DESCRIPTION':
("Capture a screenshot (JPEG image) of the virtual machine instance's "
"display."),
'EXAMPLES':
"""\
To get a screenshot from an instance named ``{0}'', run:
$ {1} {0} --destination=output.jpg
""".format('test-instance', '{command}'),
}
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class GetScreenshot(base.Command):
"""Capture a screenshot (JPEG image) of the virtual machine instance's display."""
detailed_help = _DETAILED_HELP
_display_output = False
@staticmethod
def Args(parser):
flags.INSTANCE_ARG.AddArgument(
parser, operation_type='get a screenshot from')
parser.add_argument(
'--destination',
help=('Filename, including the path, to save the screenshot '
'(JPEG image).'))
def _GetInstanceRef(self, holder, args):
return flags.INSTANCE_ARG.ResolveAsResource(
args,
holder.resources,
scope_lister=flags.GetInstanceZoneScopeLister(holder.client))
def _GetInstance(self, holder, instance_ref):
request = holder.client.messages.ComputeInstancesGetRequest(
**instance_ref.AsDict())
return holder.client.MakeRequests([(holder.client.apitools_client.instances,
'Get', request)])[0]
def Run(self, args):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
instance_ref = self._GetInstanceRef(holder, args)
request = holder.client.messages.ComputeInstancesGetScreenshotRequest(
**instance_ref.AsDict())
response = holder.client.MakeRequests([
(holder.client.apitools_client.instances, 'GetScreenshot', request)
])[0]
self._display_file_output = False
if args.IsSpecified('destination'):
with files.BinaryFileWriter(args.destination) as output:
output.write(base64.b64decode(response.contents))
self._resource_name = instance_ref.instance
self._destination = args.destination
self._display_file_output = True
else:
self._response_contents = response.contents
return
def Epilog(self, resources_were_displayed=False):
if self._display_file_output:
log.status.Print("Output screenshot for [{}] to '{}'.".format(
self._resource_name, self._destination))
else:
sys.stdout.buffer.write(base64.b64decode(self._response_contents))
| 35.353535
| 84
| 0.715143
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import base64
import sys
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute.instances import flags
from googlecloudsdk.core import log
from googlecloudsdk.core.util import files
_DETAILED_HELP = {
'DESCRIPTION':
("Capture a screenshot (JPEG image) of the virtual machine instance's "
"display."),
'EXAMPLES':
"""\
To get a screenshot from an instance named ``{0}'', run:
$ {1} {0} --destination=output.jpg
""".format('test-instance', '{command}'),
}
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class GetScreenshot(base.Command):
detailed_help = _DETAILED_HELP
_display_output = False
@staticmethod
def Args(parser):
flags.INSTANCE_ARG.AddArgument(
parser, operation_type='get a screenshot from')
parser.add_argument(
'--destination',
help=('Filename, including the path, to save the screenshot '
'(JPEG image).'))
def _GetInstanceRef(self, holder, args):
return flags.INSTANCE_ARG.ResolveAsResource(
args,
holder.resources,
scope_lister=flags.GetInstanceZoneScopeLister(holder.client))
def _GetInstance(self, holder, instance_ref):
request = holder.client.messages.ComputeInstancesGetRequest(
**instance_ref.AsDict())
return holder.client.MakeRequests([(holder.client.apitools_client.instances,
'Get', request)])[0]
def Run(self, args):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
instance_ref = self._GetInstanceRef(holder, args)
request = holder.client.messages.ComputeInstancesGetScreenshotRequest(
**instance_ref.AsDict())
response = holder.client.MakeRequests([
(holder.client.apitools_client.instances, 'GetScreenshot', request)
])[0]
self._display_file_output = False
if args.IsSpecified('destination'):
with files.BinaryFileWriter(args.destination) as output:
output.write(base64.b64decode(response.contents))
self._resource_name = instance_ref.instance
self._destination = args.destination
self._display_file_output = True
else:
self._response_contents = response.contents
return
def Epilog(self, resources_were_displayed=False):
if self._display_file_output:
log.status.Print("Output screenshot for [{}] to '{}'.".format(
self._resource_name, self._destination))
else:
sys.stdout.buffer.write(base64.b64decode(self._response_contents))
| true
| true
|
1c41cfbfc3890bce7a83910576e4bcfc92f254e8
| 2,564
|
py
|
Python
|
utils/kitti.py
|
Sliverk/hybridAveragePrecision
|
e0417ef71e7419a770b3c106624b5f4336ff5a8d
|
[
"MIT"
] | null | null | null |
utils/kitti.py
|
Sliverk/hybridAveragePrecision
|
e0417ef71e7419a770b3c106624b5f4336ff5a8d
|
[
"MIT"
] | null | null | null |
utils/kitti.py
|
Sliverk/hybridAveragePrecision
|
e0417ef71e7419a770b3c106624b5f4336ff5a8d
|
[
"MIT"
] | null | null | null |
import pathlib
import numpy as np
def get_image_index_str(img_idx):
return "{:06d}".format(img_idx)
def get_label_anno(label_path):
annotations = {}
annotations.update({
'name': [],
'truncated': [],
'occluded': [],
'alpha': [],
'bbox': [],
'dimensions': [],
'location': [],
'rotation_y': []
})
with open(label_path, 'r') as f:
lines = f.readlines()
# if len(lines) == 0 or len(lines[0]) < 15:
# content = []
# else:
content = [line.strip().split(' ') for line in lines]
annotations['name'] = np.array([x[0] for x in content])
annotations['truncated'] = np.array([float(x[1]) for x in content])
annotations['occluded'] = np.array([int(x[2]) for x in content])
annotations['alpha'] = np.array([float(x[3]) for x in content])
annotations['bbox'] = np.array(
[[float(info) for info in x[4:8]] for x in content]).reshape(-1, 4)
# dimensions will convert hwl format to standard lhw(camera) format.
annotations['dimensions'] = np.array(
[[float(info) for info in x[8:11]] for x in content]).reshape(
-1, 3)[:, [2, 0, 1]]
annotations['location'] = np.array(
[[float(info) for info in x[11:14]] for x in content]).reshape(-1, 3)
annotations['rotation_y'] = np.array(
[float(x[14]) for x in content]).reshape(-1)
if len(content) != 0 and len(content[0]) == 16: # have score
annotations['score'] = np.array([float(x[15]) for x in content])
else:
annotations['score'] = np.zeros([len(annotations['bbox'])])
return annotations
def get_label_annos(label_folder, image_ids=None):
if image_ids is None:
filepaths = pathlib.Path(label_folder).glob('*.txt')
prog = re.compile(r'^\d{6}.txt$')
filepaths = filter(lambda f: prog.match(f.name), filepaths)
image_ids = [int(p.stem) for p in filepaths]
image_ids = sorted(image_ids)
if not isinstance(image_ids, list):
image_ids = list(range(image_ids))
annos = []
label_folder = pathlib.Path(label_folder)
for idx in image_ids:
image_idx = get_image_index_str(idx)
label_filename = label_folder / (image_idx + '.txt')
annos.append(get_label_anno(label_filename))
return annos
def load_dt_annos(root_path, filelist):
dt_annos = get_label_annos(root_path, filelist)
return dt_annos
def load_gt_annos(root_path, filelist):
gt_annos = get_label_annos(root_path, filelist)
return gt_annos
| 30.52381
| 77
| 0.608814
|
import pathlib
import numpy as np
def get_image_index_str(img_idx):
return "{:06d}".format(img_idx)
def get_label_anno(label_path):
annotations = {}
annotations.update({
'name': [],
'truncated': [],
'occluded': [],
'alpha': [],
'bbox': [],
'dimensions': [],
'location': [],
'rotation_y': []
})
with open(label_path, 'r') as f:
lines = f.readlines()
content = [line.strip().split(' ') for line in lines]
annotations['name'] = np.array([x[0] for x in content])
annotations['truncated'] = np.array([float(x[1]) for x in content])
annotations['occluded'] = np.array([int(x[2]) for x in content])
annotations['alpha'] = np.array([float(x[3]) for x in content])
annotations['bbox'] = np.array(
[[float(info) for info in x[4:8]] for x in content]).reshape(-1, 4)
annotations['dimensions'] = np.array(
[[float(info) for info in x[8:11]] for x in content]).reshape(
-1, 3)[:, [2, 0, 1]]
annotations['location'] = np.array(
[[float(info) for info in x[11:14]] for x in content]).reshape(-1, 3)
annotations['rotation_y'] = np.array(
[float(x[14]) for x in content]).reshape(-1)
if len(content) != 0 and len(content[0]) == 16:
annotations['score'] = np.array([float(x[15]) for x in content])
else:
annotations['score'] = np.zeros([len(annotations['bbox'])])
return annotations
def get_label_annos(label_folder, image_ids=None):
if image_ids is None:
filepaths = pathlib.Path(label_folder).glob('*.txt')
prog = re.compile(r'^\d{6}.txt$')
filepaths = filter(lambda f: prog.match(f.name), filepaths)
image_ids = [int(p.stem) for p in filepaths]
image_ids = sorted(image_ids)
if not isinstance(image_ids, list):
image_ids = list(range(image_ids))
annos = []
label_folder = pathlib.Path(label_folder)
for idx in image_ids:
image_idx = get_image_index_str(idx)
label_filename = label_folder / (image_idx + '.txt')
annos.append(get_label_anno(label_filename))
return annos
def load_dt_annos(root_path, filelist):
dt_annos = get_label_annos(root_path, filelist)
return dt_annos
def load_gt_annos(root_path, filelist):
gt_annos = get_label_annos(root_path, filelist)
return gt_annos
| true
| true
|
1c41d05846e91ffb115828352ba38c0ccc9074be
| 444
|
py
|
Python
|
backend/src/libs/strings.py
|
codeglitchz/attendance-system
|
c82a8d75375069b15e0b827608209bfacb67cde7
|
[
"MIT"
] | 37
|
2019-12-15T17:39:38.000Z
|
2022-03-13T08:16:09.000Z
|
backend/src/libs/strings.py
|
codeglitchz/attendance-system
|
c82a8d75375069b15e0b827608209bfacb67cde7
|
[
"MIT"
] | 16
|
2020-05-05T14:17:26.000Z
|
2022-03-02T09:09:38.000Z
|
backend/src/libs/strings.py
|
codeglitchz/attendance-system
|
c82a8d75375069b15e0b827608209bfacb67cde7
|
[
"MIT"
] | 18
|
2019-12-15T17:39:43.000Z
|
2022-01-22T10:42:41.000Z
|
"""
libs.strings
By default, uses `en-gb.json` file inside the `strings` top-level folder.
If language changes, set `libs.strings.default_locale` and run `libs.strings.refresh()`.
"""
import json
default_locale = "en-us"
cached_strings = {}
def refresh():
global cached_strings
with open(f"strings/{default_locale}.json") as f:
cached_strings = json.load(f)
def gettext(name):
return cached_strings[name]
refresh()
| 17.76
| 88
| 0.702703
|
import json
default_locale = "en-us"
cached_strings = {}
def refresh():
global cached_strings
with open(f"strings/{default_locale}.json") as f:
cached_strings = json.load(f)
def gettext(name):
return cached_strings[name]
refresh()
| true
| true
|
1c41d094b5f8dec87722e4e7bd89446113ad06cd
| 2,412
|
py
|
Python
|
leaguescrape.py
|
jmcph4/leaguescrape
|
2ab51dc09875952671857e1942a0ef8e17627cb4
|
[
"MIT"
] | 2
|
2017-10-09T11:08:44.000Z
|
2019-07-23T14:48:23.000Z
|
leaguescrape.py
|
jmcph4/leaguescrape
|
2ab51dc09875952671857e1942a0ef8e17627cb4
|
[
"MIT"
] | null | null | null |
leaguescrape.py
|
jmcph4/leaguescrape
|
2ab51dc09875952671857e1942a0ef8e17627cb4
|
[
"MIT"
] | null | null | null |
from time import gmtime, strftime, sleep
from urllib.request import urlopen
from urllib.error import HTTPError
import json
import os
# files
TEAM_FILE_PATH = "team.txt"
API_KEY_PATH = "api_key.secret"
DATA_ROOT = "data/"
# API constants
API_KEY = open(API_KEY_PATH, "r").read()
API_REGION = "OC1" # oceania
API_ROOT = "https://oce.api.pvp.net/"
API_ENDPOINT_CURRENT_GAME = API_ROOT + "observer-mode/rest/consumer/getSpectatorGameInfo/" + API_REGION + "/{0}?api_key=" + API_KEY
API_THROTTLE_MAX_REQUESTS = 10 # 10 requests every
API_THROTTLE_WINDOW = 60 * 5 # 10 seconds
API_BACKOFF = 60 * 60 # wait an hour if told to back off
def save(summoner_id):
url = API_ENDPOINT_CURRENT_GAME.format(summoner_id)
try:
response = urlopen(url)
except HTTPError as e:
if e.code == 404: # nothing there
return False
elif e.code == 403: # banned
print("Banned.")
quit()
elif e.code == 429: # back off
print("Backing off for " + str(API_BACKOFF) + " seconds...")
sleep(API_BACKOFF)
raw_data = response.read().decode("utf-8")
print(str(raw_data))
json_data = json.loads(str(raw_data))
game_id = json_data["gameId"]
current_time = strftime("%Y-%m-%dT%H_%M_%S", gmtime())
save_location = DATA_ROOT + "{0}/{1}/{2}.json".format(summoner_id, game_id, current_time)
if not os.path.exists(os.path.dirname(save_location)):
try:
os.makedirs(os.path.dirname(save_location))
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(save_location, "w") as file:
file.write(raw_data)
return True
def listen(summoner_ids):
requests = 0
while 1:
for summoner_id in summoner_ids:
if(save(summoner_id)):
print(str(requests) + " Pulled " + summoner_id + ".")
else:
print(str(requests) + " None found for " + summoner_id + ".")
requests += 1
# rate-limiting compliance
if requests == API_THROTTLE_MAX_REQUESTS:
print("Waiting for " + str(API_THROTTLE_WINDOW) + " seconds...")
requests = 0 # reset request counter
sleep(API_THROTTLE_WINDOW) # wait
print("\n")
team = [line.rstrip('\n') for line in open(TEAM_FILE_PATH)]
# main loop
listen(team)
| 29.777778
| 131
| 0.613599
|
from time import gmtime, strftime, sleep
from urllib.request import urlopen
from urllib.error import HTTPError
import json
import os
TEAM_FILE_PATH = "team.txt"
API_KEY_PATH = "api_key.secret"
DATA_ROOT = "data/"
API_KEY = open(API_KEY_PATH, "r").read()
API_REGION = "OC1"
API_ROOT = "https://oce.api.pvp.net/"
API_ENDPOINT_CURRENT_GAME = API_ROOT + "observer-mode/rest/consumer/getSpectatorGameInfo/" + API_REGION + "/{0}?api_key=" + API_KEY
API_THROTTLE_MAX_REQUESTS = 10
API_THROTTLE_WINDOW = 60 * 5
API_BACKOFF = 60 * 60
def save(summoner_id):
url = API_ENDPOINT_CURRENT_GAME.format(summoner_id)
try:
response = urlopen(url)
except HTTPError as e:
if e.code == 404:
return False
elif e.code == 403:
print("Banned.")
quit()
elif e.code == 429:
print("Backing off for " + str(API_BACKOFF) + " seconds...")
sleep(API_BACKOFF)
raw_data = response.read().decode("utf-8")
print(str(raw_data))
json_data = json.loads(str(raw_data))
game_id = json_data["gameId"]
current_time = strftime("%Y-%m-%dT%H_%M_%S", gmtime())
save_location = DATA_ROOT + "{0}/{1}/{2}.json".format(summoner_id, game_id, current_time)
if not os.path.exists(os.path.dirname(save_location)):
try:
os.makedirs(os.path.dirname(save_location))
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(save_location, "w") as file:
file.write(raw_data)
return True
def listen(summoner_ids):
requests = 0
while 1:
for summoner_id in summoner_ids:
if(save(summoner_id)):
print(str(requests) + " Pulled " + summoner_id + ".")
else:
print(str(requests) + " None found for " + summoner_id + ".")
requests += 1
if requests == API_THROTTLE_MAX_REQUESTS:
print("Waiting for " + str(API_THROTTLE_WINDOW) + " seconds...")
requests = 0
sleep(API_THROTTLE_WINDOW)
print("\n")
team = [line.rstrip('\n') for line in open(TEAM_FILE_PATH)]
listen(team)
| true
| true
|
1c41d0ad27a59617bb4ea2d7df5ed911bd2225ec
| 64
|
py
|
Python
|
tupe/__init__.py
|
jaketae/tupe
|
82fab32fc9dc3030d3fc710cdc38827aedf04d4e
|
[
"MIT"
] | 9
|
2021-12-24T15:23:24.000Z
|
2022-01-27T16:34:11.000Z
|
tupe/__init__.py
|
jaketae/tupe
|
82fab32fc9dc3030d3fc710cdc38827aedf04d4e
|
[
"MIT"
] | null | null | null |
tupe/__init__.py
|
jaketae/tupe
|
82fab32fc9dc3030d3fc710cdc38827aedf04d4e
|
[
"MIT"
] | 1
|
2022-01-25T10:31:15.000Z
|
2022-01-25T10:31:15.000Z
|
from .config import TUPEConfig
from .encoder import TUPEEncoder
| 21.333333
| 32
| 0.84375
|
from .config import TUPEConfig
from .encoder import TUPEEncoder
| true
| true
|
1c41d191153218a74bf2d262f13cd2596d54c3b4
| 687
|
py
|
Python
|
tools/leetcode.144.Binary Tree Preorder Traversal/leetcode.144.Binary Tree Preorder Traversal.submission1.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | 4
|
2015-10-10T00:30:55.000Z
|
2020-07-27T19:45:54.000Z
|
tools/leetcode.144.Binary Tree Preorder Traversal/leetcode.144.Binary Tree Preorder Traversal.submission1.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | null | null | null |
tools/leetcode.144.Binary Tree Preorder Traversal/leetcode.144.Binary Tree Preorder Traversal.submission1.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | null | null | null |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param {TreeNode} root
# @return {integer[]}
def preorderTraversal(self, root):
res = []
if not root:
return res
path = [root]
while path:
cur = path[-1]
if cur:
res.append(cur.val)
path.append(cur.left)
else:
path.pop(-1)
if path:
cur = path.pop(-1)
path.append(cur.right)
return res
| 687
| 687
| 0.436681
| true
| true
|
|
1c41d2134b8ca72250e8d0535d1f39976a4d09df
| 485
|
py
|
Python
|
Python (3)/Desafios/desafio10_B.py
|
Gafanhoto742/Python-3
|
b0a13ec4cf60185af3ed2508fc69188e36415b80
|
[
"MIT"
] | 1
|
2021-04-08T19:54:01.000Z
|
2021-04-08T19:54:01.000Z
|
Python (3)/Desafios/desafio10_B.py
|
Gafanhoto742/Python-3
|
b0a13ec4cf60185af3ed2508fc69188e36415b80
|
[
"MIT"
] | null | null | null |
Python (3)/Desafios/desafio10_B.py
|
Gafanhoto742/Python-3
|
b0a13ec4cf60185af3ed2508fc69188e36415b80
|
[
"MIT"
] | null | null | null |
'''Escreva um programa que leia a velocidade de um carro.
Se ele ultrapassar 80Km/h, mostre uma mensagem dizendo que ele foi multado.
A multa vai custar R$ 7,00 por cada Km acima do limite. '''
veloc = float(input('Qual a velocidade do carro que estava o seu carro? '))
multa = input(veloc - 80)
vlmulta = input(multa * 7,00)
if veloc >=80.1:
print('Você foi multado! E pagará R$ {:.2f}!'.format(vlmulta))
else:
print('Parabéns, você está andando dentro do limite da via.')
| 37.307692
| 75
| 0.703093
|
veloc = float(input('Qual a velocidade do carro que estava o seu carro? '))
multa = input(veloc - 80)
vlmulta = input(multa * 7,00)
if veloc >=80.1:
print('Você foi multado! E pagará R$ {:.2f}!'.format(vlmulta))
else:
print('Parabéns, você está andando dentro do limite da via.')
| true
| true
|
1c41d21ddcd691671a9af7ab86a72df77e0ab5f7
| 6,789
|
py
|
Python
|
model.py
|
uv10000/P4
|
e9e7f0c06dd9fb32e0febae016857b113eee747a
|
[
"MIT"
] | null | null | null |
model.py
|
uv10000/P4
|
e9e7f0c06dd9fb32e0febae016857b113eee747a
|
[
"MIT"
] | null | null | null |
model.py
|
uv10000/P4
|
e9e7f0c06dd9fb32e0febae016857b113eee747a
|
[
"MIT"
] | null | null | null |
import os
import csv
import cv2
import numpy as np
import keras
from scipy import ndimage
from random import shuffle
# read in udacity data from file
lines=[]
with open('../data_provided_by_udacity/driving_log.csv') as csvfile:
reader=csv.reader(csvfile)
i_have_seen_firstline=False
for line in reader:
if i_have_seen_firstline:
lines.append(line)
else:
i_have_seen_firstline = True
import sklearn
# split them into a training and a validation set
from sklearn.model_selection import train_test_split
train_samples, validation_samples = train_test_split(lines, test_size=0.2)
#define generator
def generator(samples, batch_size=32):
num_samples = len(samples)
while 1: # Loop forever so the generator never terminates
shuffle(samples)
for offset in range(0, num_samples, batch_size):
batch_samples = samples[offset:offset+batch_size]
images = []
angles = []
for batch_sample in batch_samples:
#name = './IMG/'+batch_sample[0].split('/')[-1]
current_path = '../data_provided_by_udacity/IMG/' + batch_sample[0].split('/')[-1]
current_left_path = '../data_provided_by_udacity/IMG/' + batch_sample[1].split('/')[-1]
current_right_path = '../data_provided_by_udacity/IMG/' + batch_sample[2].split('/')[-1]
#center_image = cv2.imread(current_path)
center_image = ndimage.imread(current_path)
left_image = ndimage.imread(current_left_path)
right_image = ndimage.imread(current_right_path)
#center_image = cv2.cvtColor(ndimage.imread(current_path), cv2.COLOR_RBG2YUV)
#left_image = cv2.cvtColor(ndimage.imread(current_left_path) , cv2.COLOR_RBG2YUV)
#right_image = cv2.cvtColor(ndimage.imread(current_right_path), cv2.COLOR_RBG2YUV)
center_angle = float(batch_sample[3])
correction = 0.003 # this is a parameter to tune 0.03 was not bad
left_angle = center_angle + correction
right_angle = center_angle - correction
#left_angle = center_angle *1.15
#ight_angle = center_angle - 1.15
#optionally use left and right cameras
use_all_cameras = True
if use_all_cameras:
images.extend([center_image, left_image,right_image])
angles.extend([center_angle,left_angle,right_angle])
else:
images.append(center_image)
angles.extend(center_angle)
#optionally augment by flipping all images right curves <> left curves
augment_by_flipping=True
if augment_by_flipping:
augmented_images, augmented_angles = [],[]
for image,angle in zip(images, angles):
augmented_images.append(image)
augmented_angles.append(angle)
#augmented_images.append(cv2.flip(image,1))
augmented_images.append(np.fliplr(image))
augmented_angles.append(angle*-1.0)
else:
augmented_images, augmented_angles =images,angles
X_train = np.array(augmented_images)
y_train = np.array(augmented_angles)
yield sklearn.utils.shuffle(X_train, y_train)
from keras.models import Sequential
from keras.layers import Flatten,Dense,Lambda,Dense, Activation, Dropout
from keras.layers.convolutional import Conv2D, Cropping2D
from keras.layers.pooling import MaxPooling2D
import matplotlib.pyplot as plt
# compile and train the model using the generator function
my_batch_size= 16 #128
train_generator = generator(train_samples, batch_size=my_batch_size)
validation_generator = generator(validation_samples, batch_size=my_batch_size)
ch, row, col = 3, 160, 320 # Trimmed image format
# optionally perform dropout in some layers, see below
dropout_prob=0.0#0.8
model=Sequential()
#model.add(Lambda(lambda x: x/255.0 -0.5, input_shape=(160,320,3)))
#normalize data
model.add(Lambda(lambda x: x/127.5 - 1., #
input_shape=(row, col,ch))) #,
#output_shape=(row, col, ch)))
#optionally apply cropping
cropping= True
if cropping:
model.add(Cropping2D(cropping=((50,20), (0,0)), input_shape=(160,320,3)))
model.add(Dropout(dropout_prob))
##### 1st convolutional layer:
model.add(Conv2D(24, kernel_size=(5, 5),
strides = (2,2),
activation='relu',
padding='valid'))
#model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(Dropout(dropout_prob))
##### 2nd convolutional layer:
model.add(Conv2D(36, kernel_size=(5, 5),
strides = (2,2),
activation='relu',
padding='valid'))
#model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(Dropout(dropout_prob))
##### 3rd convolutional layer:
model.add(Conv2D(48, kernel_size=(5, 5),
strides = (2,2),
activation='relu',
padding='valid'))
#model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(Dropout(dropout_prob))
##### 4th convolutional layer:
model.add(Conv2D(64, kernel_size=(3, 3),
strides = (1,1),
activation='relu',
padding='valid'))
#model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(Dropout(dropout_prob))
##### 5th convolutional layer:
model.add(Conv2D(64, kernel_size=(3, 3),
strides = (1,1),
activation='relu',
padding='valid'))
#model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(Dropout(dropout_prob))
model.add(Flatten())
model.add(Dense(100))
model.add(Activation('relu'))
model.add(Dropout(dropout_prob))
model.add(Dense(50))
model.add(Activation('relu'))
model.add(Dropout(dropout_prob))
model.add(Dense(10))
model.add(Activation('relu'))
#model.add(Dropout(dropout_prob))
model.add(Dense(1))
#model.summary()
model.compile(loss='mse',optimizer='adam')
history_object = model.fit_generator(train_generator, steps_per_epoch= len(train_samples)/my_batch_size,
epochs=4, verbose=1,
validation_data=validation_generator, validation_steps= len(validation_samples)/my_batch_size, use_multiprocessing=True
)
# save the model
model.save('model.h5')
#plot validation and training losses over time
plt.plot(history_object.history['loss'])
plt.plot(history_object.history['val_loss'])
plt.title('model mean squared error loss')
plt.ylabel('mean squared error loss')
plt.xlabel('epoch')
plt.legend(['training set', 'validation set'], loc='upper right')
plt.show()
##############
| 35.359375
| 131
| 0.643836
|
import os
import csv
import cv2
import numpy as np
import keras
from scipy import ndimage
from random import shuffle
lines=[]
with open('../data_provided_by_udacity/driving_log.csv') as csvfile:
reader=csv.reader(csvfile)
i_have_seen_firstline=False
for line in reader:
if i_have_seen_firstline:
lines.append(line)
else:
i_have_seen_firstline = True
import sklearn
from sklearn.model_selection import train_test_split
train_samples, validation_samples = train_test_split(lines, test_size=0.2)
def generator(samples, batch_size=32):
num_samples = len(samples)
while 1:
shuffle(samples)
for offset in range(0, num_samples, batch_size):
batch_samples = samples[offset:offset+batch_size]
images = []
angles = []
for batch_sample in batch_samples:
current_path = '../data_provided_by_udacity/IMG/' + batch_sample[0].split('/')[-1]
current_left_path = '../data_provided_by_udacity/IMG/' + batch_sample[1].split('/')[-1]
current_right_path = '../data_provided_by_udacity/IMG/' + batch_sample[2].split('/')[-1]
center_image = ndimage.imread(current_path)
left_image = ndimage.imread(current_left_path)
right_image = ndimage.imread(current_right_path)
center_angle = float(batch_sample[3])
correction = 0.003
left_angle = center_angle + correction
right_angle = center_angle - correction
use_all_cameras = True
if use_all_cameras:
images.extend([center_image, left_image,right_image])
angles.extend([center_angle,left_angle,right_angle])
else:
images.append(center_image)
angles.extend(center_angle)
augment_by_flipping=True
if augment_by_flipping:
augmented_images, augmented_angles = [],[]
for image,angle in zip(images, angles):
augmented_images.append(image)
augmented_angles.append(angle)
augmented_images.append(np.fliplr(image))
augmented_angles.append(angle*-1.0)
else:
augmented_images, augmented_angles =images,angles
X_train = np.array(augmented_images)
y_train = np.array(augmented_angles)
yield sklearn.utils.shuffle(X_train, y_train)
from keras.models import Sequential
from keras.layers import Flatten,Dense,Lambda,Dense, Activation, Dropout
from keras.layers.convolutional import Conv2D, Cropping2D
from keras.layers.pooling import MaxPooling2D
import matplotlib.pyplot as plt
my_batch_size= 16
train_generator = generator(train_samples, batch_size=my_batch_size)
validation_generator = generator(validation_samples, batch_size=my_batch_size)
ch, row, col = 3, 160, 320
dropout_prob=0.0
model=Sequential()
model.add(Lambda(lambda x: x/127.5 - 1.,
input_shape=(row, col,ch)))
cropping= True
if cropping:
model.add(Cropping2D(cropping=((50,20), (0,0)), input_shape=(160,320,3)))
model.add(Dropout(dropout_prob))
,
padding='valid'))
,
padding='valid'))
,
padding='valid'))
,
padding='valid'))
,
padding='valid'))
model.add(Flatten())
model.add(Dense(100))
model.add(Activation('relu'))
model.add(Dropout(dropout_prob))
model.add(Dense(50))
model.add(Activation('relu'))
model.add(Dropout(dropout_prob))
model.add(Dense(10))
model.add(Activation('relu'))
model.add(Dense(1))
model.compile(loss='mse',optimizer='adam')
history_object = model.fit_generator(train_generator, steps_per_epoch= len(train_samples)/my_batch_size,
epochs=4, verbose=1,
validation_data=validation_generator, validation_steps= len(validation_samples)/my_batch_size, use_multiprocessing=True
)
model.save('model.h5')
plt.plot(history_object.history['loss'])
plt.plot(history_object.history['val_loss'])
plt.title('model mean squared error loss')
plt.ylabel('mean squared error loss')
plt.xlabel('epoch')
plt.legend(['training set', 'validation set'], loc='upper right')
plt.show()
| true
| true
|
1c41d2517e0de178948c7261948c9170386efc52
| 1,142
|
py
|
Python
|
order.py
|
amur-host/PyAmur
|
f3a794b6b702e2e9f601d3e9451109fcb4fb2ee9
|
[
"MIT"
] | null | null | null |
order.py
|
amur-host/PyAmur
|
f3a794b6b702e2e9f601d3e9451109fcb4fb2ee9
|
[
"MIT"
] | null | null | null |
order.py
|
amur-host/PyAmur
|
f3a794b6b702e2e9f601d3e9451109fcb4fb2ee9
|
[
"MIT"
] | null | null | null |
import pyamur
class Order(object):
def __init__(self, orderId, assetPair, address = ''):
self.orderId = orderId
self.assetPair = assetPair
self.address = address
self.matcher = pyamur.MATCHER
self.matcherPublicKey = pyamur.MATCHER_PUBLICKEY
self.status()
def __str__(self):
return 'status = %s\n' \
'id = %s\n' \
'%s\n' \
'sender.address = %s\n' \
'sender.publicKey = %s\n' \
'matcher = %s' % (self.status(), self.orderId, self.assetPair, self.address.address, self.address.publicKey, self.matcherPublicKey)
def status(self):
try:
req = pyamur.wrapper('/matcher/orderbook/%s/%s/%s' % ('AMUR' if self.assetPair.asset1.assetId=='' else self.assetPair.asset1.assetId, 'AMUR' if self.assetPair.asset2.assetId=='' else self.assetPair.asset2.assetId, self.orderId), host=self.matcher)
return req['status']
except:
pass
def cancel(self):
if self.address:
self.address.cancelOrder(self.assetPair, self)
__repr__ = __str__
| 35.6875
| 259
| 0.589317
|
import pyamur
class Order(object):
def __init__(self, orderId, assetPair, address = ''):
self.orderId = orderId
self.assetPair = assetPair
self.address = address
self.matcher = pyamur.MATCHER
self.matcherPublicKey = pyamur.MATCHER_PUBLICKEY
self.status()
def __str__(self):
return 'status = %s\n' \
'id = %s\n' \
'%s\n' \
'sender.address = %s\n' \
'sender.publicKey = %s\n' \
'matcher = %s' % (self.status(), self.orderId, self.assetPair, self.address.address, self.address.publicKey, self.matcherPublicKey)
def status(self):
try:
req = pyamur.wrapper('/matcher/orderbook/%s/%s/%s' % ('AMUR' if self.assetPair.asset1.assetId=='' else self.assetPair.asset1.assetId, 'AMUR' if self.assetPair.asset2.assetId=='' else self.assetPair.asset2.assetId, self.orderId), host=self.matcher)
return req['status']
except:
pass
def cancel(self):
if self.address:
self.address.cancelOrder(self.assetPair, self)
__repr__ = __str__
| true
| true
|
1c41d2bfd427c06ca8fd000b534717b1eaed08aa
| 126
|
py
|
Python
|
tests/__init__.py
|
kurniawano/flask_tdd_workshop
|
0f6a427ae627b20ed5b0d53349c61ab0f2cd97c4
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
kurniawano/flask_tdd_workshop
|
0f6a427ae627b20ed5b0d53349c61ab0f2cd97c4
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
kurniawano/flask_tdd_workshop
|
0f6a427ae627b20ed5b0d53349c61ab0f2cd97c4
|
[
"MIT"
] | null | null | null |
import pytest
from urlshortener.app import app
@pytest.fixture
def client():
client = app.test_client()
yield client
| 18
| 32
| 0.738095
|
import pytest
from urlshortener.app import app
@pytest.fixture
def client():
client = app.test_client()
yield client
| true
| true
|
1c41d43cf2688fdef8a476da3c79fd0a994bde35
| 910
|
py
|
Python
|
backend/migrations/versions/7408c872d6c0_.py
|
KSodowska/codeforpoznan.pl_v3
|
ddb079db90eda869940f4467832ef96f70591055
|
[
"MIT"
] | 8
|
2019-08-12T22:10:48.000Z
|
2020-09-13T17:46:10.000Z
|
backend/migrations/versions/7408c872d6c0_.py
|
KSodowska/codeforpoznan.pl_v3
|
ddb079db90eda869940f4467832ef96f70591055
|
[
"MIT"
] | 326
|
2018-11-07T20:29:28.000Z
|
2022-02-10T08:53:42.000Z
|
backend/migrations/versions/7408c872d6c0_.py
|
KSodowska/codeforpoznan.pl_v3
|
ddb079db90eda869940f4467832ef96f70591055
|
[
"MIT"
] | 49
|
2018-11-14T17:50:27.000Z
|
2021-04-20T22:40:29.000Z
|
"""empty message
Revision ID: 7408c872d6c0
Revises: 4d81578a84d3
Create Date: 2019-05-15 15:31:00.388640
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "7408c872d6c0"
down_revision = "4d81578a84d3"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"participant_hacknight",
sa.Column("participant_id", sa.Integer(), nullable=True),
sa.Column("hacknight_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["hacknight_id"], ["hacknight.id"]),
sa.ForeignKeyConstraint(["participant_id"], ["participant.id"]),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("participant_hacknight")
# ### end Alembic commands ###
| 26
| 72
| 0.679121
|
from alembic import op
import sqlalchemy as sa
revision = "7408c872d6c0"
down_revision = "4d81578a84d3"
branch_labels = None
depends_on = None
def upgrade():
ue),
sa.ForeignKeyConstraint(["hacknight_id"], ["hacknight.id"]),
sa.ForeignKeyConstraint(["participant_id"], ["participant.id"]),
)
| true
| true
|
1c41d452eff0e3892f4806b861e48081142b524a
| 636
|
py
|
Python
|
manage.py
|
ralphleyga/sleeknotes
|
b92c8dd12021eb0bfddcdf5dc046b8173930b68b
|
[
"MIT"
] | null | null | null |
manage.py
|
ralphleyga/sleeknotes
|
b92c8dd12021eb0bfddcdf5dc046b8173930b68b
|
[
"MIT"
] | 4
|
2021-03-19T02:32:55.000Z
|
2021-06-10T19:03:45.000Z
|
manage.py
|
ralphleyga/sleeknotes
|
b92c8dd12021eb0bfddcdf5dc046b8173930b68b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sleeknotes.settings.local')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.909091
| 80
| 0.685535
|
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sleeknotes.settings.local')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true
| true
|
1c41d4603e3f669b0578a44d5ceb60e270cf5d23
| 3,287
|
py
|
Python
|
experiments/classification/scripts/save_best_checkpt.py
|
Daulbaev/IRDM
|
4bb60191ac0072e4349ca47092675d06b39a979a
|
[
"MIT"
] | 10
|
2020-12-06T13:04:33.000Z
|
2021-02-04T23:10:43.000Z
|
experiments/classification/scripts/save_best_checkpt.py
|
Daulbaev/IRDM
|
4bb60191ac0072e4349ca47092675d06b39a979a
|
[
"MIT"
] | null | null | null |
experiments/classification/scripts/save_best_checkpt.py
|
Daulbaev/IRDM
|
4bb60191ac0072e4349ca47092675d06b39a979a
|
[
"MIT"
] | null | null | null |
from glob import glob
import os
def get_normalizations(folder_stats):
### empty normalization key corresponds to "_bn1-BN_resblock-BNReLU_odeblock-LNReLU"
normalizations = []
anode_logs = folder_stats['anode_logs']
for dirname in glob('{}*'.format(anode_logs, recursive = False)):
normalizations.append('_' + dirname.strip('{}'.format(anode_logs)))
normalizations.append("")
return normalizations
def get_configs(folder_stats):
configs_h1 = folder_stats['configs_h1']
configs_h2 = folder_stats['configs_h2']
configs_z = folder_stats['configs_z']
configs_resnet34 = []
configs_resnet18 = []
configs_resnet10 = []
configs_resnet6 = []
configs_resnet4 = []
for el in (configs_h1 +\
configs_h2 +\
configs_z):
if 'resnet10' in el:
configs_resnet10.append(el)
elif 'resnet6' in el:
configs_resnet6.append(el)
elif 'resnet18' in el:
configs_resnet18.append(el)
elif 'resnet34' in el:
configs_resnet34.append(el)
elif 'resnet4' in el:
configs_resnet4.append(el)
return configs_resnet4, configs_resnet6, configs_resnet10, configs_resnet18, configs_resnet34
def get_best_acc(log_filename):
with open(log_filename, 'r') as f:
lines = f.readlines()
best = lines[-1]
if best.startswith('Best'):
best_acc, best_epoch = [el.split(':')[-1].strip() for el in best.strip().split(',')]
best_acc = float(best_acc)
best_epoch = int(best_epoch)
return best_acc, best_epoch
else:
return None, None
if __name__=="__main__":
anode_logs = "/gpfs/gpfs0/y.gusak/anode_workshop_logs/classification"
inplanes = 64
n = 8
f = {'anode_logs' : anode_logs,
'configs_z' : [('resnet10', 'Euler', 8, '1e-1', 512),\
('resnet10', 'Euler', 2, '1e-1', 512),\
('resnet4', 'Euler', 8, '1e-1', 512),\
('resnet4', 'Euler', 16, '1e-1', 512),\
],
'configs_h1' : [],
'configs_h2' : [],
}
normalizations = get_normalizations(f)
configs_resnet4, configs_resnet6, configs_resnet10, configs_resnet18, configs_resnet34 = get_configs(f)
for normalization in normalizations:
if len(normalization) < 1:
continue
for network in ['resnet4', 'resnet10']:
for log_filename in glob('{}/{}/Euler_n{}_lr1e-1_bs512/*/logs'.format(anode_logs + normalization,\
"{}_inplanes{}".format(network, inplanes), n),\
recursive = True):
best_acc, best_epoch = get_best_acc(log_filename)
if best_acc is not None:
print(normalization, best_acc, best_epoch)
prefix = log_filename.replace('_logs/', '/').strip('logs')
#print(prefix)
best_checkpt = '{}checkpt_{}.pth'.format(prefix, best_epoch)
save_checkpt = '{}checkpt_best.pth'.format(log_filename.strip('logs'))
os.system("cp {} {}".format(best_checkpt, save_checkpt))
| 34.6
| 107
| 0.573471
|
from glob import glob
import os
def get_normalizations(folder_stats):
ppend('_' + dirname.strip('{}'.format(anode_logs)))
normalizations.append("")
return normalizations
def get_configs(folder_stats):
configs_h1 = folder_stats['configs_h1']
configs_h2 = folder_stats['configs_h2']
configs_z = folder_stats['configs_z']
configs_resnet34 = []
configs_resnet18 = []
configs_resnet10 = []
configs_resnet6 = []
configs_resnet4 = []
for el in (configs_h1 +\
configs_h2 +\
configs_z):
if 'resnet10' in el:
configs_resnet10.append(el)
elif 'resnet6' in el:
configs_resnet6.append(el)
elif 'resnet18' in el:
configs_resnet18.append(el)
elif 'resnet34' in el:
configs_resnet34.append(el)
elif 'resnet4' in el:
configs_resnet4.append(el)
return configs_resnet4, configs_resnet6, configs_resnet10, configs_resnet18, configs_resnet34
def get_best_acc(log_filename):
with open(log_filename, 'r') as f:
lines = f.readlines()
best = lines[-1]
if best.startswith('Best'):
best_acc, best_epoch = [el.split(':')[-1].strip() for el in best.strip().split(',')]
best_acc = float(best_acc)
best_epoch = int(best_epoch)
return best_acc, best_epoch
else:
return None, None
if __name__=="__main__":
anode_logs = "/gpfs/gpfs0/y.gusak/anode_workshop_logs/classification"
inplanes = 64
n = 8
f = {'anode_logs' : anode_logs,
'configs_z' : [('resnet10', 'Euler', 8, '1e-1', 512),\
('resnet10', 'Euler', 2, '1e-1', 512),\
('resnet4', 'Euler', 8, '1e-1', 512),\
('resnet4', 'Euler', 16, '1e-1', 512),\
],
'configs_h1' : [],
'configs_h2' : [],
}
normalizations = get_normalizations(f)
configs_resnet4, configs_resnet6, configs_resnet10, configs_resnet18, configs_resnet34 = get_configs(f)
for normalization in normalizations:
if len(normalization) < 1:
continue
for network in ['resnet4', 'resnet10']:
for log_filename in glob('{}/{}/Euler_n{}_lr1e-1_bs512/*/logs'.format(anode_logs + normalization,\
"{}_inplanes{}".format(network, inplanes), n),\
recursive = True):
best_acc, best_epoch = get_best_acc(log_filename)
if best_acc is not None:
print(normalization, best_acc, best_epoch)
prefix = log_filename.replace('_logs/', '/').strip('logs')
best_checkpt = '{}checkpt_{}.pth'.format(prefix, best_epoch)
save_checkpt = '{}checkpt_best.pth'.format(log_filename.strip('logs'))
os.system("cp {} {}".format(best_checkpt, save_checkpt))
| true
| true
|
1c41d46dae24e3af5650544cb44a4a7e917c362f
| 19,594
|
py
|
Python
|
adanet/distributed/placement_test.py
|
eustomaqua/adanet
|
9c1de82428a4e661768af8e764041afebfec2e6f
|
[
"Apache-2.0"
] | 1
|
2020-11-08T17:03:52.000Z
|
2020-11-08T17:03:52.000Z
|
adanet/distributed/placement_test.py
|
eustomaqua/adanet
|
9c1de82428a4e661768af8e764041afebfec2e6f
|
[
"Apache-2.0"
] | null | null | null |
adanet/distributed/placement_test.py
|
eustomaqua/adanet
|
9c1de82428a4e661768af8e764041afebfec2e6f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 The AdaNet Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Distributed placement strategy tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
from absl.testing import parameterized
from adanet.distributed.placement import ReplicationStrategy
from adanet.distributed.placement import RoundRobinStrategy
import tensorflow as tf
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.eager import context
from tensorflow.python.framework import test_util
# pylint: enable=g-direct-tensorflow-import
class ReplicationStrategyTest(tf.test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_strategy(self):
strategy = ReplicationStrategy()
num_subnetworks = 3
subnetwork_index = 1
self.assertTrue(strategy.should_build_ensemble(num_subnetworks))
self.assertTrue(
strategy.should_build_subnetwork(num_subnetworks, subnetwork_index))
self.assertTrue(strategy.should_train_subnetworks(num_subnetworks))
class WorkerConfig(object):
def __init__(self, num_worker_replicas, global_id_in_cluster):
self.num_worker_replicas = num_worker_replicas
self.global_id_in_cluster = global_id_in_cluster
class ParameterServerConfig(object):
def __init__(self, num_ps_replicas):
self.num_ps_replicas = num_ps_replicas
def _testcase_name(name, drop_remainder):
return "{}{}".format(name, "_drop_remainder" if drop_remainder else "")
class RoundRobinStrategyTest(parameterized.TestCase, tf.test.TestCase):
# pylint: disable=g-complex-comprehension
@parameterized.named_parameters(
itertools.chain(*[[
{
"testcase_name":
_testcase_name("one_worker_one_subnetwork", drop_remainder),
"num_workers":
1,
"num_subnetworks":
1,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True],
"want_should_build_subnetwork": [[True]],
"want_should_train_subnetworks": [True],
},
{
"testcase_name":
_testcase_name("three_workers_one_subnetworks", drop_remainder
),
"num_workers":
3,
"num_subnetworks":
1,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, True, True],
"want_should_build_subnetwork": [[True], [True], [True]],
"want_should_train_subnetworks": [True, True, True],
},
{
"testcase_name":
_testcase_name("two_workers_one_subnetworks", drop_remainder),
"num_workers":
2,
"num_subnetworks":
5,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, False],
"want_should_build_subnetwork": [[True, True, True, True, True],
[
True,
not drop_remainder,
not drop_remainder,
not drop_remainder,
not drop_remainder,
]],
"want_should_train_subnetworks": [False, True],
},
{
"testcase_name":
_testcase_name("one_worker_three_subnetworks", drop_remainder
),
"num_workers":
1,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True],
"want_should_build_subnetwork": [[True, True, True]],
"want_should_train_subnetworks": [True],
},
{
"testcase_name":
_testcase_name("two_workers_three_subnetworks", drop_remainder
),
"num_workers":
2,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, not drop_remainder, not drop_remainder],
],
"want_should_train_subnetworks": [False, True],
},
{
"testcase_name":
_testcase_name("three_workers_three_subnetworks",
drop_remainder),
"num_workers":
3,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, False, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, not drop_remainder],
[False, True, False],
],
"want_should_train_subnetworks": [False, True, True],
},
{
"testcase_name":
_testcase_name("four_workers_three_subnetworks",
drop_remainder),
"num_workers":
4,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, False, False, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
],
"want_should_train_subnetworks": [False, True, True, True],
},
{
"testcase_name":
_testcase_name("five_workers_three_subnetworks",
drop_remainder),
"num_workers":
5,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, False, False, False, True],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
[True, True, True],
],
"want_should_train_subnetworks": [False, True, True, True, False],
},
{
"testcase_name":
_testcase_name("six_workers_three_subnetworks", drop_remainder
),
"num_workers":
6,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble":
[True, False, False, False, True, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
[True, True, True],
[True, not drop_remainder, not drop_remainder],
],
"want_should_train_subnetworks":
[False, True, True, True, False, True],
},
{
"testcase_name":
_testcase_name("seven_workers_three_subnetworks",
drop_remainder),
"num_workers":
7,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble":
[True, False, False, False, True, False, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
[True, True, True],
[True, False, not drop_remainder],
[False, True, False],
],
"want_should_train_subnetworks":
[False, True, True, True, False, True, True],
},
{
"testcase_name":
_testcase_name("eight_workers_three_subnetworks",
drop_remainder),
"num_workers":
8,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble":
[True, False, False, False, True, False, False, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
],
"want_should_train_subnetworks":
[False, True, True, True, False, True, True, True],
},
] for drop_remainder in [False, True]]))
# pylint: enable=g-complex-comprehension
@test_util.run_in_graph_and_eager_modes
def test_worker_methods(self, num_workers, num_subnetworks, drop_remainder,
want_should_build_ensemble,
want_should_build_subnetwork,
want_should_train_subnetworks):
should_build_ensemble = []
should_build_subnetwork = []
should_train_subnetworks = []
for worker_index in range(num_workers):
strategy = RoundRobinStrategy(drop_remainder)
strategy.config = WorkerConfig(num_workers, worker_index)
should_build_ensemble.append(
strategy.should_build_ensemble(num_subnetworks))
should_build_subnetwork.append([])
should_train_subnetworks.append(
strategy.should_train_subnetworks(num_subnetworks))
for subnetwork_index in range(num_subnetworks):
should_build_subnetwork[-1].append(
strategy.should_build_subnetwork(num_subnetworks, subnetwork_index))
self.assertEqual(want_should_build_ensemble, should_build_ensemble)
self.assertEqual(want_should_build_subnetwork, should_build_subnetwork)
self.assertEqual(want_should_train_subnetworks, should_train_subnetworks)
@parameterized.named_parameters(
{
"testcase_name":
"one_ps_one_subnetwork",
"num_ps":
1,
"num_subnetworks":
1,
"want_variable_devices": [[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],],
},
{
"testcase_name":
"three_ps_one_subnetwork",
"num_ps":
3,
"num_subnetworks":
1,
"want_variable_devices": [[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:2",
"/job:ps/task:0"
],],
},
{
"testcase_name":
"two_ps_five_subnetworks",
"num_ps":
2,
"num_subnetworks":
5,
"want_variable_devices": [
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:1", "/job:ps/task:1", "/job:ps/task:1",
"/job:ps/task:1"
],
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:1", "/job:ps/task:1", "/job:ps/task:1",
"/job:ps/task:1"
],
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
],
},
{
"testcase_name":
"one_ps_three_subnetworks",
"num_ps":
1,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
],
},
{
"testcase_name":
"two_ps_three_subnetworks",
"num_ps":
2,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:1", "/job:ps/task:1", "/job:ps/task:1",
"/job:ps/task:1"
],
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
],
},
{
"testcase_name":
"three_ps_three_subnetworks",
"num_ps":
3,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:1", "/job:ps/task:1", "/job:ps/task:1",
"/job:ps/task:1"
],
[
"/job:ps/task:2", "/job:ps/task:2", "/job:ps/task:2",
"/job:ps/task:2"
],
],
},
{
"testcase_name":
"three_ps_three_subnetworks_no_dedicated_parameter_servers",
"num_ps":
3,
"num_subnetworks":
3,
"dedicate_parameter_servers":
False,
"want_variable_devices": [
["", "", "", ""],
["", "", "", ""],
["", "", "", ""],
],
},
{
"testcase_name":
"four_ps_three_subnetworks",
"num_ps":
4,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:2", "/job:ps/task:2", "/job:ps/task:2",
"/job:ps/task:2"
],
[
"/job:ps/task:3", "/job:ps/task:3", "/job:ps/task:3",
"/job:ps/task:3"
],
],
},
{
"testcase_name":
"five_ps_three_subnetworks",
"num_ps":
5,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:2", "/job:ps/task:3", "/job:ps/task:3",
"/job:ps/task:2"
],
[
"/job:ps/task:4", "/job:ps/task:4", "/job:ps/task:4",
"/job:ps/task:4"
],
],
},
{
"testcase_name":
"six_ps_three_subnetworks",
"num_ps":
6,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:2", "/job:ps/task:3", "/job:ps/task:3",
"/job:ps/task:2"
],
[
"/job:ps/task:5", "/job:ps/task:4", "/job:ps/task:4",
"/job:ps/task:5"
],
],
},
{
"testcase_name":
"seven_ps_three_subnetworks",
"num_ps":
7,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:2",
"/job:ps/task:0"
],
[
"/job:ps/task:3", "/job:ps/task:4", "/job:ps/task:4",
"/job:ps/task:3"
],
[
"/job:ps/task:6", "/job:ps/task:5", "/job:ps/task:5",
"/job:ps/task:6"
],
],
},
{
"testcase_name":
"eight_ps_three_subnetworks",
"num_ps":
8,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:2",
"/job:ps/task:0"
],
[
"/job:ps/task:4", "/job:ps/task:5", "/job:ps/task:5",
"/job:ps/task:4"
],
[
"/job:ps/task:7", "/job:ps/task:6", "/job:ps/task:6",
"/job:ps/task:7"
],
],
},
)
@test_util.run_in_graph_and_eager_modes
def test_device_methods(self,
num_ps,
num_subnetworks,
want_variable_devices,
dedicate_parameter_servers=True):
with context.graph_mode():
x = tf.constant([[1., 0.]])
strategy = RoundRobinStrategy(
dedicate_parameter_servers=dedicate_parameter_servers)
strategy.config = ParameterServerConfig(num_ps)
variable_devices = []
for i in range(num_subnetworks):
with strategy.subnetwork_devices(num_subnetworks, i):
subnetwork = tf.keras.Sequential()
subnetwork.add(tf.keras.layers.Dense(4))
subnetwork.add(tf.keras.layers.Dense(3))
subnetwork(x)
variable_devices.append([w.op.device for w in subnetwork.weights])
self.assertEqual(want_variable_devices, variable_devices)
if __name__ == "__main__":
tf.test.main()
| 34.315236
| 80
| 0.457181
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
from absl.testing import parameterized
from adanet.distributed.placement import ReplicationStrategy
from adanet.distributed.placement import RoundRobinStrategy
import tensorflow as tf
from tensorflow.python.eager import context
from tensorflow.python.framework import test_util
class ReplicationStrategyTest(tf.test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_strategy(self):
strategy = ReplicationStrategy()
num_subnetworks = 3
subnetwork_index = 1
self.assertTrue(strategy.should_build_ensemble(num_subnetworks))
self.assertTrue(
strategy.should_build_subnetwork(num_subnetworks, subnetwork_index))
self.assertTrue(strategy.should_train_subnetworks(num_subnetworks))
class WorkerConfig(object):
def __init__(self, num_worker_replicas, global_id_in_cluster):
self.num_worker_replicas = num_worker_replicas
self.global_id_in_cluster = global_id_in_cluster
class ParameterServerConfig(object):
def __init__(self, num_ps_replicas):
self.num_ps_replicas = num_ps_replicas
def _testcase_name(name, drop_remainder):
return "{}{}".format(name, "_drop_remainder" if drop_remainder else "")
class RoundRobinStrategyTest(parameterized.TestCase, tf.test.TestCase):
@parameterized.named_parameters(
itertools.chain(*[[
{
"testcase_name":
_testcase_name("one_worker_one_subnetwork", drop_remainder),
"num_workers":
1,
"num_subnetworks":
1,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True],
"want_should_build_subnetwork": [[True]],
"want_should_train_subnetworks": [True],
},
{
"testcase_name":
_testcase_name("three_workers_one_subnetworks", drop_remainder
),
"num_workers":
3,
"num_subnetworks":
1,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, True, True],
"want_should_build_subnetwork": [[True], [True], [True]],
"want_should_train_subnetworks": [True, True, True],
},
{
"testcase_name":
_testcase_name("two_workers_one_subnetworks", drop_remainder),
"num_workers":
2,
"num_subnetworks":
5,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, False],
"want_should_build_subnetwork": [[True, True, True, True, True],
[
True,
not drop_remainder,
not drop_remainder,
not drop_remainder,
not drop_remainder,
]],
"want_should_train_subnetworks": [False, True],
},
{
"testcase_name":
_testcase_name("one_worker_three_subnetworks", drop_remainder
),
"num_workers":
1,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True],
"want_should_build_subnetwork": [[True, True, True]],
"want_should_train_subnetworks": [True],
},
{
"testcase_name":
_testcase_name("two_workers_three_subnetworks", drop_remainder
),
"num_workers":
2,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, not drop_remainder, not drop_remainder],
],
"want_should_train_subnetworks": [False, True],
},
{
"testcase_name":
_testcase_name("three_workers_three_subnetworks",
drop_remainder),
"num_workers":
3,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, False, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, not drop_remainder],
[False, True, False],
],
"want_should_train_subnetworks": [False, True, True],
},
{
"testcase_name":
_testcase_name("four_workers_three_subnetworks",
drop_remainder),
"num_workers":
4,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, False, False, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
],
"want_should_train_subnetworks": [False, True, True, True],
},
{
"testcase_name":
_testcase_name("five_workers_three_subnetworks",
drop_remainder),
"num_workers":
5,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble": [True, False, False, False, True],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
[True, True, True],
],
"want_should_train_subnetworks": [False, True, True, True, False],
},
{
"testcase_name":
_testcase_name("six_workers_three_subnetworks", drop_remainder
),
"num_workers":
6,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble":
[True, False, False, False, True, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
[True, True, True],
[True, not drop_remainder, not drop_remainder],
],
"want_should_train_subnetworks":
[False, True, True, True, False, True],
},
{
"testcase_name":
_testcase_name("seven_workers_three_subnetworks",
drop_remainder),
"num_workers":
7,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble":
[True, False, False, False, True, False, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
[True, True, True],
[True, False, not drop_remainder],
[False, True, False],
],
"want_should_train_subnetworks":
[False, True, True, True, False, True, True],
},
{
"testcase_name":
_testcase_name("eight_workers_three_subnetworks",
drop_remainder),
"num_workers":
8,
"num_subnetworks":
3,
"drop_remainder":
drop_remainder,
"want_should_build_ensemble":
[True, False, False, False, True, False, False, False],
"want_should_build_subnetwork": [
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
[True, True, True],
[True, False, False],
[False, True, False],
[False, False, True],
],
"want_should_train_subnetworks":
[False, True, True, True, False, True, True, True],
},
] for drop_remainder in [False, True]]))
@test_util.run_in_graph_and_eager_modes
def test_worker_methods(self, num_workers, num_subnetworks, drop_remainder,
want_should_build_ensemble,
want_should_build_subnetwork,
want_should_train_subnetworks):
should_build_ensemble = []
should_build_subnetwork = []
should_train_subnetworks = []
for worker_index in range(num_workers):
strategy = RoundRobinStrategy(drop_remainder)
strategy.config = WorkerConfig(num_workers, worker_index)
should_build_ensemble.append(
strategy.should_build_ensemble(num_subnetworks))
should_build_subnetwork.append([])
should_train_subnetworks.append(
strategy.should_train_subnetworks(num_subnetworks))
for subnetwork_index in range(num_subnetworks):
should_build_subnetwork[-1].append(
strategy.should_build_subnetwork(num_subnetworks, subnetwork_index))
self.assertEqual(want_should_build_ensemble, should_build_ensemble)
self.assertEqual(want_should_build_subnetwork, should_build_subnetwork)
self.assertEqual(want_should_train_subnetworks, should_train_subnetworks)
@parameterized.named_parameters(
{
"testcase_name":
"one_ps_one_subnetwork",
"num_ps":
1,
"num_subnetworks":
1,
"want_variable_devices": [[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],],
},
{
"testcase_name":
"three_ps_one_subnetwork",
"num_ps":
3,
"num_subnetworks":
1,
"want_variable_devices": [[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:2",
"/job:ps/task:0"
],],
},
{
"testcase_name":
"two_ps_five_subnetworks",
"num_ps":
2,
"num_subnetworks":
5,
"want_variable_devices": [
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:1", "/job:ps/task:1", "/job:ps/task:1",
"/job:ps/task:1"
],
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:1", "/job:ps/task:1", "/job:ps/task:1",
"/job:ps/task:1"
],
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
],
},
{
"testcase_name":
"one_ps_three_subnetworks",
"num_ps":
1,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
],
},
{
"testcase_name":
"two_ps_three_subnetworks",
"num_ps":
2,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:1", "/job:ps/task:1", "/job:ps/task:1",
"/job:ps/task:1"
],
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
],
},
{
"testcase_name":
"three_ps_three_subnetworks",
"num_ps":
3,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:0", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:1", "/job:ps/task:1", "/job:ps/task:1",
"/job:ps/task:1"
],
[
"/job:ps/task:2", "/job:ps/task:2", "/job:ps/task:2",
"/job:ps/task:2"
],
],
},
{
"testcase_name":
"three_ps_three_subnetworks_no_dedicated_parameter_servers",
"num_ps":
3,
"num_subnetworks":
3,
"dedicate_parameter_servers":
False,
"want_variable_devices": [
["", "", "", ""],
["", "", "", ""],
["", "", "", ""],
],
},
{
"testcase_name":
"four_ps_three_subnetworks",
"num_ps":
4,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:2", "/job:ps/task:2", "/job:ps/task:2",
"/job:ps/task:2"
],
[
"/job:ps/task:3", "/job:ps/task:3", "/job:ps/task:3",
"/job:ps/task:3"
],
],
},
{
"testcase_name":
"five_ps_three_subnetworks",
"num_ps":
5,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:2", "/job:ps/task:3", "/job:ps/task:3",
"/job:ps/task:2"
],
[
"/job:ps/task:4", "/job:ps/task:4", "/job:ps/task:4",
"/job:ps/task:4"
],
],
},
{
"testcase_name":
"six_ps_three_subnetworks",
"num_ps":
6,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:0",
"/job:ps/task:0"
],
[
"/job:ps/task:2", "/job:ps/task:3", "/job:ps/task:3",
"/job:ps/task:2"
],
[
"/job:ps/task:5", "/job:ps/task:4", "/job:ps/task:4",
"/job:ps/task:5"
],
],
},
{
"testcase_name":
"seven_ps_three_subnetworks",
"num_ps":
7,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:2",
"/job:ps/task:0"
],
[
"/job:ps/task:3", "/job:ps/task:4", "/job:ps/task:4",
"/job:ps/task:3"
],
[
"/job:ps/task:6", "/job:ps/task:5", "/job:ps/task:5",
"/job:ps/task:6"
],
],
},
{
"testcase_name":
"eight_ps_three_subnetworks",
"num_ps":
8,
"num_subnetworks":
3,
"want_variable_devices": [
[
"/job:ps/task:1", "/job:ps/task:0", "/job:ps/task:2",
"/job:ps/task:0"
],
[
"/job:ps/task:4", "/job:ps/task:5", "/job:ps/task:5",
"/job:ps/task:4"
],
[
"/job:ps/task:7", "/job:ps/task:6", "/job:ps/task:6",
"/job:ps/task:7"
],
],
},
)
@test_util.run_in_graph_and_eager_modes
def test_device_methods(self,
num_ps,
num_subnetworks,
want_variable_devices,
dedicate_parameter_servers=True):
with context.graph_mode():
x = tf.constant([[1., 0.]])
strategy = RoundRobinStrategy(
dedicate_parameter_servers=dedicate_parameter_servers)
strategy.config = ParameterServerConfig(num_ps)
variable_devices = []
for i in range(num_subnetworks):
with strategy.subnetwork_devices(num_subnetworks, i):
subnetwork = tf.keras.Sequential()
subnetwork.add(tf.keras.layers.Dense(4))
subnetwork.add(tf.keras.layers.Dense(3))
subnetwork(x)
variable_devices.append([w.op.device for w in subnetwork.weights])
self.assertEqual(want_variable_devices, variable_devices)
if __name__ == "__main__":
tf.test.main()
| true
| true
|
1c41d4b11c7e8e19e9053f4daa2146566d6d84b4
| 4,338
|
py
|
Python
|
accounts/forms.py
|
HydroLearn/HydroLearn
|
fbf14aab7a33d71a336afa3878666378e1a75358
|
[
"BSD-3-Clause"
] | null | null | null |
accounts/forms.py
|
HydroLearn/HydroLearn
|
fbf14aab7a33d71a336afa3878666378e1a75358
|
[
"BSD-3-Clause"
] | 23
|
2018-08-09T18:46:20.000Z
|
2021-06-10T20:21:26.000Z
|
accounts/forms.py
|
HydroLearn/HydroLearn
|
fbf14aab7a33d71a336afa3878666378e1a75358
|
[
"BSD-3-Clause"
] | 1
|
2019-01-28T15:42:39.000Z
|
2019-01-28T15:42:39.000Z
|
from django import forms
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from accounts.models import User, Profile
import six
class RegisterForm(forms.ModelForm):
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Confirm password', widget=forms.PasswordInput)
class Meta:
model = User
fields = ('email',)
# def clean_email(self):
# email = self.cleaned_data.get('email')
# qs = User.objects.filter(email=email)
# if qs.exists():
# raise forms.ValidationError("email is taken")
# return email
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
user = super(RegisterForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
# TODO: need to get this integrated (EMAIL CONFIRMATION BEFORE SETTING ACTIVE)
# user.active = False # send a confirmation email before setting to active
#user.username = self.cleaned_data["email"]
if commit:
user.save()
return user
class UserAdminCreationForm(forms.ModelForm):
"""A form for creating new users. Includes all the required
fields, plus a repeated password."""
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = User
fields = ('email',)
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserAdminCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserAdminChangeForm(forms.ModelForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField()
class Meta:
model = User
fields = ('email', 'password', 'is_active', 'is_superuser')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class LoginForm(forms.Form):
email = forms.EmailField(label='Email')
password = forms.CharField(widget=forms.PasswordInput)
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ['email']
def clean_email(self):
data = self.cleaned_data['email']
if not isinstance(data, six.string_types) or len(data.strip()) == 0:
raise forms.ValidationError("Email is a required field.")
return data
class UserProfileForm(forms.ModelForm):
# def __init__(self, *args, **kwargs):
# super(UserProfileForm, self).__init__(*args, **kwargs)
# self.fields['identifiers'].required = False
class Meta:
model = Profile
exclude = ['user', 'email_confirmed']
def clean_first_name(self):
data = self.cleaned_data['first_name']
if not isinstance(data, six.string_types) or len(data.strip()) == 0:
raise forms.ValidationError("First name is a required field.")
return data
def clean_last_name(self):
data = self.cleaned_data['last_name']
if not isinstance(data, six.string_types) or len(data.strip()) == 0:
raise forms.ValidationError("Last name is a required field.")
return data
| 33.890625
| 90
| 0.660673
|
from django import forms
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from accounts.models import User, Profile
import six
class RegisterForm(forms.ModelForm):
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Confirm password', widget=forms.PasswordInput)
class Meta:
model = User
fields = ('email',)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
user = super(RegisterForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
# TODO: need to get this integrated (EMAIL CONFIRMATION BEFORE SETTING ACTIVE)
# user.active = False # send a confirmation email before setting to active
#user.username = self.cleaned_data["email"]
if commit:
user.save()
return user
class UserAdminCreationForm(forms.ModelForm):
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = User
fields = ('email',)
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
user = super(UserAdminCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserAdminChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField()
class Meta:
model = User
fields = ('email', 'password', 'is_active', 'is_superuser')
def clean_password(self):
return self.initial["password"]
class LoginForm(forms.Form):
email = forms.EmailField(label='Email')
password = forms.CharField(widget=forms.PasswordInput)
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ['email']
def clean_email(self):
data = self.cleaned_data['email']
if not isinstance(data, six.string_types) or len(data.strip()) == 0:
raise forms.ValidationError("Email is a required field.")
return data
class UserProfileForm(forms.ModelForm):
class Meta:
model = Profile
exclude = ['user', 'email_confirmed']
def clean_first_name(self):
data = self.cleaned_data['first_name']
if not isinstance(data, six.string_types) or len(data.strip()) == 0:
raise forms.ValidationError("First name is a required field.")
return data
def clean_last_name(self):
data = self.cleaned_data['last_name']
if not isinstance(data, six.string_types) or len(data.strip()) == 0:
raise forms.ValidationError("Last name is a required field.")
return data
| true
| true
|
1c41d4f75774a0d772741eee7c4e5ae752cbd09c
| 743
|
py
|
Python
|
manage.py
|
MichaelNjoroge254/Personal-Blog
|
3a27f0667ec3e86d6d27b1c0b026a92fd2d4c64a
|
[
"MIT"
] | null | null | null |
manage.py
|
MichaelNjoroge254/Personal-Blog
|
3a27f0667ec3e86d6d27b1c0b026a92fd2d4c64a
|
[
"MIT"
] | null | null | null |
manage.py
|
MichaelNjoroge254/Personal-Blog
|
3a27f0667ec3e86d6d27b1c0b026a92fd2d4c64a
|
[
"MIT"
] | null | null | null |
from app import create_app,db
from flask_script import Manager,Server
from app.models import User,Blog
from flask_migrate import Migrate, MigrateCommand
# from run import create_app
# Creating app instance
app = create_app('production')
# manager = Manager(create_app())
manager = Manager(app)
migrate = Migrate(app,db)
manager.add_command('db',MigrateCommand)
manager.add_command('server',Server)
@manager.shell
def make_shell_context():
return dict(app = app,db = db,User = User,Blog = Blog)
@manager.command
def test():
'''
Run the unit tests
'''
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=5).run(tests)
if __name__ == '__main__':
manager.run()
| 22.515152
| 58
| 0.730821
|
from app import create_app,db
from flask_script import Manager,Server
from app.models import User,Blog
from flask_migrate import Migrate, MigrateCommand
app = create_app('production')
manager = Manager(app)
migrate = Migrate(app,db)
manager.add_command('db',MigrateCommand)
manager.add_command('server',Server)
@manager.shell
def make_shell_context():
return dict(app = app,db = db,User = User,Blog = Blog)
@manager.command
def test():
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=5).run(tests)
if __name__ == '__main__':
manager.run()
| true
| true
|
1c41d52271a713cd58bdbe1ba302ed5ef72ab728
| 6,527
|
py
|
Python
|
coremltools/converters/mil/frontend/torch/torchir_passes.py
|
dpkg9/coremltools
|
096cc25f00d6cd0b6d9e9a76f128caf08aab9f40
|
[
"BSD-3-Clause"
] | null | null | null |
coremltools/converters/mil/frontend/torch/torchir_passes.py
|
dpkg9/coremltools
|
096cc25f00d6cd0b6d9e9a76f128caf08aab9f40
|
[
"BSD-3-Clause"
] | null | null | null |
coremltools/converters/mil/frontend/torch/torchir_passes.py
|
dpkg9/coremltools
|
096cc25f00d6cd0b6d9e9a76f128caf08aab9f40
|
[
"BSD-3-Clause"
] | null | null | null |
from collections import OrderedDict
import logging as _logging
from .internal_graph import *
def transform_inplace_ops(graph, name_remap_dict=None):
# As we modify ops, we'll need to remap symbols.
if name_remap_dict is None:
name_remap_dict = {}
for node in graph.nodes:
for k, v in name_remap_dict.items():
node.replace_name(k, v)
if node.kind == "append":
if isinstance(node.parent, InternalTorchIRGraph):
# If append appears in a graph (outer block), replace
# subsequent uses of its input symbol with its output symbol.
name_remap_dict[node.inputs[0]] = node.outputs[0]
elif node.parent.parent.kind == "loop":
# If append appears in a loop block, add its inputs to the block
# inputs and loop inputs, and its outputs to the block outputs
# and loop outputs.
# This is the global input to append. We need to add it to the
# loop's input list, and replace any uses after the node with
# @global_output below.
global_input = node.inputs[0]
# This will be the name of the input to append within the
# block. We need to add it to the block inputs.
local_input = node.parent.parent.name + ".0"
# This is the output of append. We need to add it to the list
# of block outputs.
local_output = node.outputs[0]
# This is the name of the new output from the loop. It should
# replace any uses of @global_input after the loop op.
global_output = local_output + ".out"
name_remap_dict[global_input] = global_output
node.parent.parent.inputs.append(global_input)
node.parent.inputs.append(local_input)
node.replace_name(global_input, local_input)
node.parent.outputs.append(local_output)
node.parent.parent.outputs.append(global_output)
node.parent.parent.name = node.parent.parent.outputs[0]
elif node.parent.parent.kind == "if":
# If append appears in an if/else block, add its outputs to the
# block outputs and loop outputs.
# Note that we can't assume the append appears in both blocks.
raise NotImplementedError(
"inplace_ops pass doesn't yet support append op inside conditional"
)
for block in node.blocks:
transform_inplace_ops(block, name_remap_dict)
# Replace names in graph outputs
for k, v in name_remap_dict.items():
try:
idx = graph.outputs.index(k)
except ValueError:
pass
else:
graph.outputs[idx] = v
def flatten_graph_input_values(graph):
""" CoreML can't handle nested iterables of tensors, so we flatten the
inputs of any graph that expects them.
"""
new_graph_inputs = graph.inputs
all_new_nodes = []
changed = True
notified = False
while changed:
old_graph_inputs = new_graph_inputs
new_graph_inputs = OrderedDict()
new_nodes = []
changed = False
for _input_name, _input_val in old_graph_inputs.items():
if isinstance(_input_val, (tuple, list)):
changed = True
if not notified:
notified = True
_logging.warning(
"Tuple detected at graph input. This will be flattened in the converted model."
)
# If this input to the graph is a tuple, we want to replace it
# with a flattened version and add an op to construct the tuple.
node_inputs = []
for idx, item in enumerate(_input_val):
name = _input_name + "_{}".format(idx)
new_graph_inputs[name] = item
node_inputs.append(name)
new_nodes.append(
InternalTorchIRNode(
inputs=node_inputs,
outputs=[_input_name],
kind="tupleconstruct",
)
)
else:
# This input isn't a tuple, keep it as is.
new_graph_inputs[_input_name] = _input_val
all_new_nodes = new_nodes + all_new_nodes
graph.inputs = new_graph_inputs
graph.nodes = all_new_nodes + graph.nodes
def flatten_graph_output_values(graph):
""" CoreML can't handle nested iterables of tensors, so we flatten the
outputs of any graph that produces them.
"""
node_names = [node.name for node in graph.nodes]
new_graph_outputs = graph.outputs
changed = True
notified = False
while changed:
old_graph_outputs = new_graph_outputs
new_graph_outputs = []
changed = False
for outp in old_graph_outputs:
# Find the node that generates this output var.
# It is possible to not find the output var in the list of node
# names since nodes are named after their first output. In that
# case, it means the output var comes from a node that returns
# multiple outputs, which means that node cannot be a construct op.
try:
node_idx = node_names.index(outp)
except:
# @outp doesn't come from a construct op
new_graph_outputs.append(outp)
continue
if graph.nodes[node_idx].kind in [
"tupleconstruct",
"listconstruct",
]:
# Since this output came from a construct op, we can replace it
# with the inputs to the op.
new_graph_outputs.extend(graph.nodes[node_idx].inputs)
changed = True
if not notified:
notified = True
_logging.warning(
"Tuple detected at graph output. This will be flattened in the converted model."
)
else:
new_graph_outputs.append(outp)
# Note: if we flattened outputs, there are likely to be construct ops
# that are no longer needed. These will be removed in a later DCE pass.
graph.outputs = new_graph_outputs
| 41.839744
| 104
| 0.573004
|
from collections import OrderedDict
import logging as _logging
from .internal_graph import *
def transform_inplace_ops(graph, name_remap_dict=None):
if name_remap_dict is None:
name_remap_dict = {}
for node in graph.nodes:
for k, v in name_remap_dict.items():
node.replace_name(k, v)
if node.kind == "append":
if isinstance(node.parent, InternalTorchIRGraph):
# If append appears in a graph (outer block), replace
# subsequent uses of its input symbol with its output symbol.
name_remap_dict[node.inputs[0]] = node.outputs[0]
elif node.parent.parent.kind == "loop":
# If append appears in a loop block, add its inputs to the block
# inputs and loop inputs, and its outputs to the block outputs
# and loop outputs.
# This is the global input to append. We need to add it to the
# loop's input list, and replace any uses after the node with
global_input = node.inputs[0]
local_input = node.parent.parent.name + ".0"
local_output = node.outputs[0]
global_output = local_output + ".out"
name_remap_dict[global_input] = global_output
node.parent.parent.inputs.append(global_input)
node.parent.inputs.append(local_input)
node.replace_name(global_input, local_input)
node.parent.outputs.append(local_output)
node.parent.parent.outputs.append(global_output)
node.parent.parent.name = node.parent.parent.outputs[0]
elif node.parent.parent.kind == "if":
raise NotImplementedError(
"inplace_ops pass doesn't yet support append op inside conditional"
)
for block in node.blocks:
transform_inplace_ops(block, name_remap_dict)
for k, v in name_remap_dict.items():
try:
idx = graph.outputs.index(k)
except ValueError:
pass
else:
graph.outputs[idx] = v
def flatten_graph_input_values(graph):
new_graph_inputs = graph.inputs
all_new_nodes = []
changed = True
notified = False
while changed:
old_graph_inputs = new_graph_inputs
new_graph_inputs = OrderedDict()
new_nodes = []
changed = False
for _input_name, _input_val in old_graph_inputs.items():
if isinstance(_input_val, (tuple, list)):
changed = True
if not notified:
notified = True
_logging.warning(
"Tuple detected at graph input. This will be flattened in the converted model."
)
node_inputs = []
for idx, item in enumerate(_input_val):
name = _input_name + "_{}".format(idx)
new_graph_inputs[name] = item
node_inputs.append(name)
new_nodes.append(
InternalTorchIRNode(
inputs=node_inputs,
outputs=[_input_name],
kind="tupleconstruct",
)
)
else:
new_graph_inputs[_input_name] = _input_val
all_new_nodes = new_nodes + all_new_nodes
graph.inputs = new_graph_inputs
graph.nodes = all_new_nodes + graph.nodes
def flatten_graph_output_values(graph):
node_names = [node.name for node in graph.nodes]
new_graph_outputs = graph.outputs
changed = True
notified = False
while changed:
old_graph_outputs = new_graph_outputs
new_graph_outputs = []
changed = False
for outp in old_graph_outputs:
# Find the node that generates this output var.
# It is possible to not find the output var in the list of node
# names since nodes are named after their first output. In that
# case, it means the output var comes from a node that returns
# multiple outputs, which means that node cannot be a construct op.
try:
node_idx = node_names.index(outp)
except:
# @outp doesn't come from a construct op
new_graph_outputs.append(outp)
continue
if graph.nodes[node_idx].kind in [
"tupleconstruct",
"listconstruct",
]:
new_graph_outputs.extend(graph.nodes[node_idx].inputs)
changed = True
if not notified:
notified = True
_logging.warning(
"Tuple detected at graph output. This will be flattened in the converted model."
)
else:
new_graph_outputs.append(outp)
graph.outputs = new_graph_outputs
| true
| true
|
1c41d5e143b2ee34d6d06f4c6e5c7363dc3ec3c5
| 113
|
py
|
Python
|
setup.py
|
johnzech/geotiffer
|
1ba49445f3006f12134ab2b9a8d8141e3c0dc8f0
|
[
"Qhull"
] | 3
|
2016-04-08T09:26:04.000Z
|
2019-10-13T21:27:53.000Z
|
setup.py
|
johnzech/geotiffer
|
1ba49445f3006f12134ab2b9a8d8141e3c0dc8f0
|
[
"Qhull"
] | null | null | null |
setup.py
|
johnzech/geotiffer
|
1ba49445f3006f12134ab2b9a8d8141e3c0dc8f0
|
[
"Qhull"
] | null | null | null |
from distutils.core import setup
import py2exe
setup(console=['print_layers.py'])
setup(console=['geotiffer.py'])
| 28.25
| 34
| 0.787611
|
from distutils.core import setup
import py2exe
setup(console=['print_layers.py'])
setup(console=['geotiffer.py'])
| true
| true
|
1c41d71a0abdfdedec2985c9614b4a7f28e8ed49
| 6,184
|
py
|
Python
|
test-suite/exonum-py-tests/suite/common.py
|
Kollego/exonum
|
1374af9fef04c7afe1cfdf1245e0ce6c45109676
|
[
"Apache-2.0"
] | 1,226
|
2017-07-17T05:09:05.000Z
|
2022-03-17T07:37:15.000Z
|
test-suite/exonum-py-tests/suite/common.py
|
Kollego/exonum
|
1374af9fef04c7afe1cfdf1245e0ce6c45109676
|
[
"Apache-2.0"
] | 1,309
|
2017-07-17T10:51:09.000Z
|
2022-03-15T16:17:07.000Z
|
test-suite/exonum-py-tests/suite/common.py
|
Kollego/exonum
|
1374af9fef04c7afe1cfdf1245e0ce6c45109676
|
[
"Apache-2.0"
] | 312
|
2017-07-17T06:21:05.000Z
|
2022-03-28T19:08:05.000Z
|
"""Module containing common scenarios that can be used
for writing tests with less boiler-plate."""
from typing import List, Dict, Any
import unittest
import time
from exonum_client import ExonumClient
from suite import ExonumNetwork, ProcessOutput, ProcessExitResult
from requests.exceptions import ConnectionError
RETRIES_AMOUNT = 30
ARTIFACT_NAME = "exonum-cryptocurrency"
ARTIFACT_VERSION = "0.2.0"
MIN_PEER_PORT = 6331
MIN_API_PORT = 8080
# Range of ports to use. Since each test requires 4 peer ports and 8 API ports,
# not restricting the port range can easily enumerate hundreds of ports.
PORT_RANGE = 32
def run_dev_node(application: str) -> ExonumNetwork:
"""Starts a single node in the run-dev mode and returns
`ExonumNetwork` object with the running node.
Example:
>>> network = run_dev_node("exonum-cryptocurrency-advanced")"""
network = ExonumNetwork(application)
network.run_dev()
return network
available_peer_port = MIN_PEER_PORT
available_api_port = MIN_API_PORT
def run_n_nodes(application: str, nodes_amount: int) -> ExonumNetwork:
"""Creates and runs a network with N validators and return an
`ExonumNetwork` object with it."""
global available_peer_port, available_api_port
address = "127.0.0.1:{}"
network = ExonumNetwork(application)
network.generate_template(nodes_amount)
for i in range(nodes_amount):
network.generate_config(i, address.format(available_peer_port))
available_peer_port += 1
if available_peer_port > MIN_PEER_PORT + PORT_RANGE:
available_peer_port = MIN_PEER_PORT
for i in range(nodes_amount):
public_api_address = address.format(available_api_port)
private_api_address = address.format(available_api_port + 1)
network.finalize(i, public_api_address, private_api_address)
available_api_port += 2
if available_api_port > MIN_API_PORT + PORT_RANGE:
available_api_port = MIN_API_PORT
for i in range(nodes_amount):
network.run_node(i)
return network
def run_4_nodes(application: str) -> ExonumNetwork:
"""Creates and runs a network with 4 validators and return an
`ExonumNetwork` object with it.
Example:
>>> network = run_4_nodes("exonum-cryptocurrency-advanced")
>>> for i in range(1, network.validators_count()):
... print(network.api_address(i))
...
'127.0.0.1', 8080, 8081
'127.0.0.1', 8082, 8083
'127.0.0.1', 8084, 8085
'127.0.0.1', 8086, 8087
"""
return run_n_nodes(application, 4)
def assert_processes_exited_successfully(test: unittest.TestCase, outputs: List[ProcessOutput]) -> None:
"""Asserts that all the processes exited successfully."""
for output in outputs:
test.assertEqual(output.exit_result, ProcessExitResult.Ok)
test.assertEqual(output.exit_code, 0, f"Process exited with non-zero code: {output.stderr}")
def launcher_networks(network: ExonumNetwork) -> List[Dict[str, Any]]:
"""Builds a network configuration for `exonum-launcher` from the
`ExonumNetwork` object."""
networks = []
for validator_id in range(network.validators_count()):
host, public_port, private_port = network.api_address(validator_id)
node_network = {"host": host, "ssl": False, "public-api-port": public_port, "private-api-port": private_port}
networks.append(node_network)
# Temporary workaround: supervisor works in simple mode and we need only one node.
return networks[:1]
def wait_network_to_start(network: ExonumNetwork) -> None:
"""Wait for network starting"""
wait_api_to_start(network)
wait_for_block(network, 1)
def wait_for_block(network: ExonumNetwork, height: int = 1) -> None:
"""Wait for block at specific height"""
for validator_id in range(network.validators_count()):
host, public_port, private_port = network.api_address(validator_id)
client = ExonumClient(host, public_port, private_port)
for _ in range(RETRIES_AMOUNT):
try:
block = client.public_api.get_block(height)
if block.status_code == 200:
break
except ConnectionError:
pass
time.sleep(0.5)
else:
raise Exception(f"Waiting for block {height} failed for validator {validator_id}")
def wait_api_to_start(network: ExonumNetwork) -> None:
"""Wait for api starting"""
for validator_id in range(network.validators_count()):
host, public_port, private_port = network.api_address(validator_id)
client = ExonumClient(host, public_port, private_port)
for _ in range(RETRIES_AMOUNT):
try:
client.private_api.get_info()
break
except ConnectionError:
time.sleep(0.5)
else:
raise Exception(f"Waiting for start failed for validator {validator_id}")
def generate_config(
network: ExonumNetwork,
deadline_height: int = 10000,
consensus: dict = None,
instances: dict = None,
artifact_name: str = ARTIFACT_NAME,
artifact_version: str = ARTIFACT_VERSION,
artifact_action: str = "deploy",
) -> dict:
config_dict = {
"networks": launcher_networks(network),
"deadline_height": deadline_height,
"consensus": consensus,
"artifacts": {
"cryptocurrency": {
"runtime": "rust",
"name": artifact_name,
"version": artifact_version,
"action": artifact_action,
}
},
"instances": instances if instances else {},
}
return config_dict
def generate_migration_config(network: ExonumNetwork, migrations: dict, deadline_height: int = 10000) -> dict:
config_dict = {"networks": launcher_networks(network), "deadline_height": deadline_height, "migrations": migrations}
return config_dict
def find_service_status(available_service, service_name):
for service in available_service["services"]:
if service["spec"]["name"] == service_name:
return service["status"]["type"]
raise RuntimeError
| 32.547368
| 120
| 0.682245
|
from typing import List, Dict, Any
import unittest
import time
from exonum_client import ExonumClient
from suite import ExonumNetwork, ProcessOutput, ProcessExitResult
from requests.exceptions import ConnectionError
RETRIES_AMOUNT = 30
ARTIFACT_NAME = "exonum-cryptocurrency"
ARTIFACT_VERSION = "0.2.0"
MIN_PEER_PORT = 6331
MIN_API_PORT = 8080
PORT_RANGE = 32
def run_dev_node(application: str) -> ExonumNetwork:
network = ExonumNetwork(application)
network.run_dev()
return network
available_peer_port = MIN_PEER_PORT
available_api_port = MIN_API_PORT
def run_n_nodes(application: str, nodes_amount: int) -> ExonumNetwork:
global available_peer_port, available_api_port
address = "127.0.0.1:{}"
network = ExonumNetwork(application)
network.generate_template(nodes_amount)
for i in range(nodes_amount):
network.generate_config(i, address.format(available_peer_port))
available_peer_port += 1
if available_peer_port > MIN_PEER_PORT + PORT_RANGE:
available_peer_port = MIN_PEER_PORT
for i in range(nodes_amount):
public_api_address = address.format(available_api_port)
private_api_address = address.format(available_api_port + 1)
network.finalize(i, public_api_address, private_api_address)
available_api_port += 2
if available_api_port > MIN_API_PORT + PORT_RANGE:
available_api_port = MIN_API_PORT
for i in range(nodes_amount):
network.run_node(i)
return network
def run_4_nodes(application: str) -> ExonumNetwork:
return run_n_nodes(application, 4)
def assert_processes_exited_successfully(test: unittest.TestCase, outputs: List[ProcessOutput]) -> None:
for output in outputs:
test.assertEqual(output.exit_result, ProcessExitResult.Ok)
test.assertEqual(output.exit_code, 0, f"Process exited with non-zero code: {output.stderr}")
def launcher_networks(network: ExonumNetwork) -> List[Dict[str, Any]]:
networks = []
for validator_id in range(network.validators_count()):
host, public_port, private_port = network.api_address(validator_id)
node_network = {"host": host, "ssl": False, "public-api-port": public_port, "private-api-port": private_port}
networks.append(node_network)
return networks[:1]
def wait_network_to_start(network: ExonumNetwork) -> None:
wait_api_to_start(network)
wait_for_block(network, 1)
def wait_for_block(network: ExonumNetwork, height: int = 1) -> None:
for validator_id in range(network.validators_count()):
host, public_port, private_port = network.api_address(validator_id)
client = ExonumClient(host, public_port, private_port)
for _ in range(RETRIES_AMOUNT):
try:
block = client.public_api.get_block(height)
if block.status_code == 200:
break
except ConnectionError:
pass
time.sleep(0.5)
else:
raise Exception(f"Waiting for block {height} failed for validator {validator_id}")
def wait_api_to_start(network: ExonumNetwork) -> None:
for validator_id in range(network.validators_count()):
host, public_port, private_port = network.api_address(validator_id)
client = ExonumClient(host, public_port, private_port)
for _ in range(RETRIES_AMOUNT):
try:
client.private_api.get_info()
break
except ConnectionError:
time.sleep(0.5)
else:
raise Exception(f"Waiting for start failed for validator {validator_id}")
def generate_config(
network: ExonumNetwork,
deadline_height: int = 10000,
consensus: dict = None,
instances: dict = None,
artifact_name: str = ARTIFACT_NAME,
artifact_version: str = ARTIFACT_VERSION,
artifact_action: str = "deploy",
) -> dict:
config_dict = {
"networks": launcher_networks(network),
"deadline_height": deadline_height,
"consensus": consensus,
"artifacts": {
"cryptocurrency": {
"runtime": "rust",
"name": artifact_name,
"version": artifact_version,
"action": artifact_action,
}
},
"instances": instances if instances else {},
}
return config_dict
def generate_migration_config(network: ExonumNetwork, migrations: dict, deadline_height: int = 10000) -> dict:
config_dict = {"networks": launcher_networks(network), "deadline_height": deadline_height, "migrations": migrations}
return config_dict
def find_service_status(available_service, service_name):
for service in available_service["services"]:
if service["spec"]["name"] == service_name:
return service["status"]["type"]
raise RuntimeError
| true
| true
|
1c41da76853ab1945ffb0df0f5fb6d193a2733ff
| 476
|
py
|
Python
|
Aula 10/Desafios/032.py
|
mateuschaves/curso-python
|
53b2f3b4bf083ae2ce7ea19dd358f49a36becd9d
|
[
"MIT"
] | 1
|
2018-07-23T04:03:35.000Z
|
2018-07-23T04:03:35.000Z
|
Aula 10/Desafios/032.py
|
mateuschaves/curso-python
|
53b2f3b4bf083ae2ce7ea19dd358f49a36becd9d
|
[
"MIT"
] | null | null | null |
Aula 10/Desafios/032.py
|
mateuschaves/curso-python
|
53b2f3b4bf083ae2ce7ea19dd358f49a36becd9d
|
[
"MIT"
] | null | null | null |
"""
Faça um programa que leia
um ano qualquer e mostre
se ele é bissexto.
Linda
Do jeito que é
Da cabeça ao pé
Do jeitinho que for
É, e só de pensar
Sei que já vou estar
Morrendo de amor
De amor
Coisa Linda - Tiago Iorc ♪♫
"""
ano = int(input('Informe o ano: '))
if ano % 400 == 0 or ano % 4 == 0 and ano % 100 != 0:
print('O ano {} é bissexto !'.format(ano))
else:
print('O ano {} não é bissexto !'.format(ano))
| 17.62963
| 53
| 0.577731
|
ano = int(input('Informe o ano: '))
if ano % 400 == 0 or ano % 4 == 0 and ano % 100 != 0:
print('O ano {} é bissexto !'.format(ano))
else:
print('O ano {} não é bissexto !'.format(ano))
| true
| true
|
1c41dbed0453e2bd0f4943371b18e52147c24390
| 2,385
|
py
|
Python
|
Tutorials/12text.py
|
zza-jeff/MCB185-2022
|
562dc9de4be58cbb7783b66ee7781e5b93ff4f1d
|
[
"MIT"
] | null | null | null |
Tutorials/12text.py
|
zza-jeff/MCB185-2022
|
562dc9de4be58cbb7783b66ee7781e5b93ff4f1d
|
[
"MIT"
] | null | null | null |
Tutorials/12text.py
|
zza-jeff/MCB185-2022
|
562dc9de4be58cbb7783b66ee7781e5b93ff4f1d
|
[
"MIT"
] | 22
|
2022-01-04T17:03:32.000Z
|
2022-01-10T22:47:48.000Z
|
#!/usr/bin/env python3
# 12text.py
# Move the triple quotes downward to uncover each segment of code
"""
# Variables with text are called strings
s = 'ACGT' # a string
# Square brackets let you access sub-strings as 'slices'
# Follow closely below, because the endpoints can be confusing
# The first number before : is the starting index
# The second number : is one larger than the last index
print(s, s[0], s[1])
print(s[2], s[2:3], s[2:4], s[2:5])
# You can also do the following shortcuts
print(s[:2]) # the 0 is implict on the left
print(s[2:]) # the end of the string is implicit on the right
# The + operator concatenates strings
s = s + 'N'
s += 'n' # note that += is a shorthand for s = s +, just like in math
print(s)
# The * operator repeats strings
s *= 3
print(s)
# The len() function returns the length of a string
# Some function like len() return values, others like print() do not
print(len(s))
# There are several ways to format strings
txt = 'Ian'
num = 3/11
# Previously, we have used the print() function with commas
print(txt, num)
# What if we want to control the way the text looks?
# For example, what if we want exactly 3 decimal places?
# There are 3 distinct ways to format strings in python
# Method 1: printf-style formatting
# printf() is the name of an old C function, but not Python
# The syntax is well-known among old-school programmers
print('%s %.3f' % (txt, num)) # %s string, %f float
print('%s %.3f %d %e' % (txt, num, 2.1, .1)) # %d integer, %e scientific
# Method 2: str.format()
# Strings are objects with built-in functions (which are called methods)
# upper() and lower() are some simple examples of string methods
# When using object syntax, the function comes after the variable
print(txt.upper(), txt.lower())
# The format() method is a powerful way to control string formatting
print('{} {}'.format(txt, num))
print('{} {:.3f}'.format(txt, num))
# Method 3: f-strings
# f-strings are the newest and best way to format strings
# f-strings interpolate variables and other statements inside curly brackets
print(f'{txt} {num}')
print(f'{txt} {num:.3f}')
# You can even interpolate python code
print(f'{2+2} {1/7:.5f} {len(txt)}')
# The examples here are but the tip of a very large iceberg
# Each formatting method has many more options
# Check documentation online for more information
"""
| 26.797753
| 76
| 0.697694
| true
| true
|
|
1c41dc89cac944997ef491c2a9ccedf34ed86b14
| 38
|
py
|
Python
|
expman/plotters/__init__.py
|
vzhong/expman
|
e73603a4466771170e85b19afb1583c5794f342b
|
[
"MIT"
] | null | null | null |
expman/plotters/__init__.py
|
vzhong/expman
|
e73603a4466771170e85b19afb1583c5794f342b
|
[
"MIT"
] | null | null | null |
expman/plotters/__init__.py
|
vzhong/expman
|
e73603a4466771170e85b19afb1583c5794f342b
|
[
"MIT"
] | null | null | null |
from .line_plotter import LinePlotter
| 19
| 37
| 0.868421
|
from .line_plotter import LinePlotter
| true
| true
|
1c41dcc10234348cbb222fd4d8fa9f100a3854b6
| 6,643
|
py
|
Python
|
rank.py
|
chariothy/proxy-mon
|
aa51220874d8a08553d4a2a26783533feb4a4949
|
[
"MIT"
] | null | null | null |
rank.py
|
chariothy/proxy-mon
|
aa51220874d8a08553d4a2a26783533feb4a4949
|
[
"MIT"
] | null | null | null |
rank.py
|
chariothy/proxy-mon
|
aa51220874d8a08553d4a2a26783533feb4a4949
|
[
"MIT"
] | null | null | null |
from pandas import DataFrame
from utils import ut, tmp_env
from pybeans import utils as pu
import pandas as pd
from datetime import datetime, timedelta
#from model import Proxy, Delay, Rank, query_delay, query_proxy
from premailer import transform
from pybeans import today
from notify import notify_by_ding_talk
import arrow
import os
import re
REG_DATE = re.compile(r'(\d{8})_\d{6}.json')
RANK_CONDITIONS = dict(
avg = dict(asc=True, weight=3),
std = dict(asc=True, weight=1),
lost = dict(asc=True, weight=3)
)
def clear_old_data(days:int=3):
ut.D(f'############ 清除{days}天前的CURL数据 ############')
ut.session.query(Delay).where(Delay.when < (datetime.now()-timedelta(days = days))).delete()
ut.D(f'############ 清除{days*10}天前的排名数据 ############')
ut.session.query(Rank).where(Rank.when < (datetime.now()-timedelta(days = days*10))).delete()
ut.session.commit()
def report(data):
template = tmp_env.get_template('rank.html')
html = template.render(dict(
rank_conditions = RANK_CONDITIONS,
data = data
))
#su.D(html)
html = transform(html)
#print(html)
result = ut.send_email(f'代理服务器统计报告', html_body=html)
ut.D('发送邮件:', f'失败:{result}' if result else '成功')
notify_by_ding_talk(data)
def rank_v1():
import re
reg_proxy_multi = re.compile(r'\|(\d\.?\d?)x(?:\||$)')
id_proxy = {}
multi_proxies = {}
proxies = query_proxy(ut.session).all()
for p in proxies:
#ut.D(p)
id_proxy[p.id] = p
match = reg_proxy_multi.search(p.remark)
if match:
multi = float(match.groups()[0])
else:
multi = 1.0
q = query_delay(ut.session, p.id)
df = pd.read_sql(q.statement, q.session.bind, parse_dates=["when"])
p01 = df.value.quantile(0.01)
p99 = df.value.quantile(0.95)
vdf = df[(df.value >= p01) & (df.value <= p99)]
if vdf.proxy_id.count() < 100:
continue
if multi not in multi_proxies:
multi_proxies[multi] = {}
#ut.D(f'{p.remark},倍率{multi}')
multi_proxies[multi][p.id] = [
p.id,
vdf.value.mean(),
vdf.value.median(),
df[df.value.isnull()].proxy_id.count() / df.value.count() * 100,
vdf.value.std(),
p.remark,
p.type,
vdf.value.count(),
df.value.count(),
0,
0 if p.avg_rank is None else round(p.avg_rank),
0,
0 # 必须放在最后一个
]
#ut.D(multi_proxies)
columns = {
'id': None,
'vmean': True, # valid mean
'vmed': True, # valid med
'outper': True, # timeout percentage
'std': True,
'remark': None,
'type': None,
'vcount': None,
'count': None,
'drank': None, # delta rank
'arank': None, # avg rank
'nrank': None, # new rank
'score': None
}
column_name = {k:v for k,v in enumerate(columns)}
for multi in multi_proxies:
dfm = DataFrame(multi_proxies[multi]).T
dfm.rename(columns=column_name,inplace=True)
#ut.D(f'倍率{multi}组排序\n', dfm)
for col in columns:
if columns[col] is not None:
sorted_dfm = dfm.sort_values(by=[col], ignore_index=True, ascending=columns[col])
for i, p in sorted_dfm.iterrows():
# 排序成绩相加
multi_proxies[multi][p.id][-1] += i
data = {}
top = 2
today_str = today()
for multi in multi_proxies:
ut.D(f'倍率{multi}组TOP3')
dfr = DataFrame(multi_proxies[multi]).T
dfr.rename(columns=column_name,inplace=True)
#print(dfr)
sorted_dfr = dfr.sort_values(by=['score'], ignore_index=True)
print(sorted_dfr.head(5))
for i, sp in sorted_dfr.iterrows():
new_rank = i + 1
sp.nrank = new_rank
old_rank = multi_proxies[multi][sp.id][-3]
if old_rank is not None:
sp.drank = new_rank - old_rank
rank_mod = ut.session \
.query(Rank) \
.where(Rank.proxy_id == sp.id) \
.where(Rank.when == today_str) \
.one_or_none()
if not rank_mod:
rank_mod = Rank()
rank_mod.proxy_id = sp.id
rank_mod.when = today_str
rank_mod.rank = new_rank
ut.session.add(rank_mod)
data[multi] = sorted_dfr.T.to_dict().values()
if data:
#TODO: Report all NONE proxy
ut.session.commit()
report(data)
def df_from_json(data_path:str):
result = pu.load_json(data_path)
df = pd.json_normalize(
result,
record_path=['raw'],
meta=['alias', 'id']
).rename(columns={0: 'curl'})
#print(df)
return df
def history(df_agg):
today_int = int(pu.today('%Y%m%d'))
df_agg['date'] = today_int
df_agg['pos'] = df_agg.index
today_cnt = 0
history_path = ut['history_path']
if os.path.exists(history_path):
dfh = pd.read_csv(history_path, index_col=0, parse_dates=['date'])
# 去除更新订阅后消失的节点,!记得换机场时要备份history,否则会被全部自动删除
dfh = dfh[dfh.alias.isin(df_agg.alias)]
if dfh.pos.count() == 0:
raise RuntimeError('History中不存在任何新节点,请先备份history.csv')
# 只保留最近一个月的记录
dfh = dfh[dfh.date>arrow.now().shift(months=-1).format('YYYY-MM-DD')]
today_cnt = dfh[dfh.date==today_int].pos.count()
if today_cnt == 0:
all_frame = pd.concat([df_agg, dfh])
all_frame.to_csv(history_path)
else:
df_agg.to_csv(history_path)
ut.run(f'scp {history_path} {ut["scp_data_dir"]}') # 复制到网站服务器
ut.run(ut['after_scp_data'])
def rank(df:DataFrame):
df_agg=df.groupby(['alias', 'id']).agg(avg=('curl','mean'),std=('curl','std'),valid=('curl','count'),total=('curl','size'))
df_agg['lost'] = df_agg['total'] - df_agg['valid']
df_agg.reset_index(inplace=True)
df_agg['curl_rank'] = 0
#print(df_agg)
for col in RANK_CONDITIONS:
condition = RANK_CONDITIONS[col]
percentile = f'{col}_pct'
df_agg[percentile] = df_agg[col].rank(method='min', ascending=condition['asc'])
df_agg['curl_rank'] += df_agg[percentile] * condition['weight']
return df_agg.sort_values(by=['curl_rank']).reset_index()
if __name__ == '__main__':
df_agg = rank(df_from_json('./data/20211111_092739.json'))
history(df_agg)
#report(df_agg.head(4))
| 31.633333
| 127
| 0.564353
|
from pandas import DataFrame
from utils import ut, tmp_env
from pybeans import utils as pu
import pandas as pd
from datetime import datetime, timedelta
from premailer import transform
from pybeans import today
from notify import notify_by_ding_talk
import arrow
import os
import re
REG_DATE = re.compile(r'(\d{8})_\d{6}.json')
RANK_CONDITIONS = dict(
avg = dict(asc=True, weight=3),
std = dict(asc=True, weight=1),
lost = dict(asc=True, weight=3)
)
def clear_old_data(days:int=3):
ut.D(f'############ 清除{days}天前的CURL数据 ############')
ut.session.query(Delay).where(Delay.when < (datetime.now()-timedelta(days = days))).delete()
ut.D(f'############ 清除{days*10}天前的排名数据 ############')
ut.session.query(Rank).where(Rank.when < (datetime.now()-timedelta(days = days*10))).delete()
ut.session.commit()
def report(data):
template = tmp_env.get_template('rank.html')
html = template.render(dict(
rank_conditions = RANK_CONDITIONS,
data = data
))
html = transform(html)
result = ut.send_email(f'代理服务器统计报告', html_body=html)
ut.D('发送邮件:', f'失败:{result}' if result else '成功')
notify_by_ding_talk(data)
def rank_v1():
import re
reg_proxy_multi = re.compile(r'\|(\d\.?\d?)x(?:\||$)')
id_proxy = {}
multi_proxies = {}
proxies = query_proxy(ut.session).all()
for p in proxies:
id_proxy[p.id] = p
match = reg_proxy_multi.search(p.remark)
if match:
multi = float(match.groups()[0])
else:
multi = 1.0
q = query_delay(ut.session, p.id)
df = pd.read_sql(q.statement, q.session.bind, parse_dates=["when"])
p01 = df.value.quantile(0.01)
p99 = df.value.quantile(0.95)
vdf = df[(df.value >= p01) & (df.value <= p99)]
if vdf.proxy_id.count() < 100:
continue
if multi not in multi_proxies:
multi_proxies[multi] = {}
multi_proxies[multi][p.id] = [
p.id,
vdf.value.mean(),
vdf.value.median(),
df[df.value.isnull()].proxy_id.count() / df.value.count() * 100,
vdf.value.std(),
p.remark,
p.type,
vdf.value.count(),
df.value.count(),
0,
0 if p.avg_rank is None else round(p.avg_rank),
0,
0
]
columns = {
'id': None,
'vmean': True,
'vmed': True,
'outper': True,
'std': True,
'remark': None,
'type': None,
'vcount': None,
'count': None,
'drank': None,
'arank': None,
'nrank': None,
'score': None
}
column_name = {k:v for k,v in enumerate(columns)}
for multi in multi_proxies:
dfm = DataFrame(multi_proxies[multi]).T
dfm.rename(columns=column_name,inplace=True)
for col in columns:
if columns[col] is not None:
sorted_dfm = dfm.sort_values(by=[col], ignore_index=True, ascending=columns[col])
for i, p in sorted_dfm.iterrows():
multi_proxies[multi][p.id][-1] += i
data = {}
top = 2
today_str = today()
for multi in multi_proxies:
ut.D(f'倍率{multi}组TOP3')
dfr = DataFrame(multi_proxies[multi]).T
dfr.rename(columns=column_name,inplace=True)
sorted_dfr = dfr.sort_values(by=['score'], ignore_index=True)
print(sorted_dfr.head(5))
for i, sp in sorted_dfr.iterrows():
new_rank = i + 1
sp.nrank = new_rank
old_rank = multi_proxies[multi][sp.id][-3]
if old_rank is not None:
sp.drank = new_rank - old_rank
rank_mod = ut.session \
.query(Rank) \
.where(Rank.proxy_id == sp.id) \
.where(Rank.when == today_str) \
.one_or_none()
if not rank_mod:
rank_mod = Rank()
rank_mod.proxy_id = sp.id
rank_mod.when = today_str
rank_mod.rank = new_rank
ut.session.add(rank_mod)
data[multi] = sorted_dfr.T.to_dict().values()
if data:
ut.session.commit()
report(data)
def df_from_json(data_path:str):
result = pu.load_json(data_path)
df = pd.json_normalize(
result,
record_path=['raw'],
meta=['alias', 'id']
).rename(columns={0: 'curl'})
return df
def history(df_agg):
today_int = int(pu.today('%Y%m%d'))
df_agg['date'] = today_int
df_agg['pos'] = df_agg.index
today_cnt = 0
history_path = ut['history_path']
if os.path.exists(history_path):
dfh = pd.read_csv(history_path, index_col=0, parse_dates=['date'])
dfh = dfh[dfh.alias.isin(df_agg.alias)]
if dfh.pos.count() == 0:
raise RuntimeError('History中不存在任何新节点,请先备份history.csv')
dfh = dfh[dfh.date>arrow.now().shift(months=-1).format('YYYY-MM-DD')]
today_cnt = dfh[dfh.date==today_int].pos.count()
if today_cnt == 0:
all_frame = pd.concat([df_agg, dfh])
all_frame.to_csv(history_path)
else:
df_agg.to_csv(history_path)
ut.run(f'scp {history_path} {ut["scp_data_dir"]}')
ut.run(ut['after_scp_data'])
def rank(df:DataFrame):
df_agg=df.groupby(['alias', 'id']).agg(avg=('curl','mean'),std=('curl','std'),valid=('curl','count'),total=('curl','size'))
df_agg['lost'] = df_agg['total'] - df_agg['valid']
df_agg.reset_index(inplace=True)
df_agg['curl_rank'] = 0
for col in RANK_CONDITIONS:
condition = RANK_CONDITIONS[col]
percentile = f'{col}_pct'
df_agg[percentile] = df_agg[col].rank(method='min', ascending=condition['asc'])
df_agg['curl_rank'] += df_agg[percentile] * condition['weight']
return df_agg.sort_values(by=['curl_rank']).reset_index()
if __name__ == '__main__':
df_agg = rank(df_from_json('./data/20211111_092739.json'))
history(df_agg)
| true
| true
|
1c41deb2a0b654817fe9c0c9323a847028796100
| 25,589
|
py
|
Python
|
xfields/fieldmaps/interpolated.py
|
kparasch/xfields
|
fd288588ee269bf0d18da24ef83f5f925e6c5e4f
|
[
"MIT"
] | 1
|
2021-09-07T14:34:22.000Z
|
2021-09-07T14:34:22.000Z
|
xfields/fieldmaps/interpolated.py
|
kparasch/xfields
|
fd288588ee269bf0d18da24ef83f5f925e6c5e4f
|
[
"MIT"
] | 16
|
2021-03-12T11:28:57.000Z
|
2021-08-17T19:52:40.000Z
|
xfields/fieldmaps/interpolated.py
|
kparasch/xfields
|
fd288588ee269bf0d18da24ef83f5f925e6c5e4f
|
[
"MIT"
] | 5
|
2021-06-16T12:45:22.000Z
|
2022-01-07T15:24:09.000Z
|
import numpy as np
import xobjects as xo
import xpart as xp
import xtrack as xt
from ..solvers.fftsolvers import FFTSolver3D, FFTSolver2p5D
from ..general import _pkg_root
class TriLinearInterpolatedFieldMapData(xo.Struct):
x_min = xo.Float64
y_min = xo.Float64
z_min = xo.Float64
nx = xo.Int64
ny = xo.Int64
nz = xo.Int64
dx = xo.Float64
dy = xo.Float64
dz = xo.Float64
rho = xo.Float64[:]
phi = xo.Float64[:]
dphi_dx = xo.Float64[:]
dphi_dy = xo.Float64[:]
dphi_dz = xo.Float64[:]
TriLinearInterpolatedFieldMapData.extra_sources = [
_pkg_root.joinpath('headers/constants.h'),
_pkg_root.joinpath('fieldmaps/interpolated_src/central_diff.h'),
_pkg_root.joinpath('fieldmaps/interpolated_src/linear_interpolators.h'),
_pkg_root.joinpath('fieldmaps/interpolated_src/charge_deposition.h'),
]
TriLinearInterpolatedFieldMapData.custom_kernels = {
'central_diff': xo.Kernel(
args=[
xo.Arg(xo.Int32, pointer=False, name='nelem'),
xo.Arg(xo.Int32, pointer=False, name='row_size'),
xo.Arg(xo.Int32, pointer=False, name='stride_in_dbl'),
xo.Arg(xo.Float64, pointer=False, name='factor'),
xo.Arg(xo.Int8, pointer=True, name='matrix_buffer'),
xo.Arg(xo.Int64, pointer=False, name='matrix_offset'),
xo.Arg(xo.Int8, pointer=True, name='res_buffer'),
xo.Arg(xo.Int64, pointer=False, name='res_offset'),
],
n_threads='nelem'
),
'p2m_rectmesh3d_xparticles': xo.Kernel(
args=[
xo.Arg(xo.Int32, pointer=False, name='nparticles'),
xo.Arg(xp.Particles.XoStruct, pointer=False, name='particles'),
xo.Arg(xo.Float64, pointer=False, name='x0'),
xo.Arg(xo.Float64, pointer=False, name='y0'),
xo.Arg(xo.Float64, pointer=False, name='z0'),
xo.Arg(xo.Float64, pointer=False, name='dx'),
xo.Arg(xo.Float64, pointer=False, name='dy'),
xo.Arg(xo.Float64, pointer=False, name='dz'),
xo.Arg(xo.Int32, pointer=False, name='nx'),
xo.Arg(xo.Int32, pointer=False, name='ny'),
xo.Arg(xo.Int32, pointer=False, name='nz'),
xo.Arg(xo.Int8, pointer=True, name='grid1d_buffer'),
xo.Arg(xo.Int64, pointer=False, name='grid1d_offset'),
],
n_threads='nparticles'
),
'p2m_rectmesh3d': xo.Kernel(
args=[
xo.Arg(xo.Int32, pointer=False, name='nparticles'),
xo.Arg(xo.Float64, pointer=True, name='x'),
xo.Arg(xo.Float64, pointer=True, name='y'),
xo.Arg(xo.Float64, pointer=True, name='z'),
xo.Arg(xo.Float64, pointer=True, name='part_weights'),
xo.Arg(xo.Int64, pointer=True, name='part_state'),
xo.Arg(xo.Float64, pointer=False, name='x0'),
xo.Arg(xo.Float64, pointer=False, name='y0'),
xo.Arg(xo.Float64, pointer=False, name='z0'),
xo.Arg(xo.Float64, pointer=False, name='dx'),
xo.Arg(xo.Float64, pointer=False, name='dy'),
xo.Arg(xo.Float64, pointer=False, name='dz'),
xo.Arg(xo.Int32, pointer=False, name='nx'),
xo.Arg(xo.Int32, pointer=False, name='ny'),
xo.Arg(xo.Int32, pointer=False, name='nz'),
xo.Arg(xo.Int8, pointer=True, name='grid1d_buffer'),
xo.Arg(xo.Int64, pointer=False, name='grid1d_offset'),
],
n_threads='nparticles'
),
'TriLinearInterpolatedFieldMap_interpolate_3d_map_vector': xo.Kernel(
args=[
xo.Arg(TriLinearInterpolatedFieldMapData, pointer=False, name='fmap'),
xo.Arg(xo.Int64, pointer=False, name='n_points'),
xo.Arg(xo.Float64, pointer=True, name='x'),
xo.Arg(xo.Float64, pointer=True, name='y'),
xo.Arg(xo.Float64, pointer=True, name='z'),
xo.Arg(xo.Int64, pointer=False, name='n_quantities'),
xo.Arg(xo.Int8, pointer=True, name='buffer_mesh_quantities'),
xo.Arg(xo.Int64, pointer=True, name='offsets_mesh_quantities'),
xo.Arg(xo.Float64, pointer=True, name='particles_quantities'),
],
n_threads='n_points'
),
}
# I add undescores in front of the names so that I can define custom properties
rename_trilinear = {ff.name:'_'+ff.name for ff
in TriLinearInterpolatedFieldMapData._fields}
class TriLinearInterpolatedFieldMap(xo.dress(TriLinearInterpolatedFieldMapData,
rename=rename_trilinear)):
"""
Builds a linear interpolator for a 3D field map. The map can be updated
using the Parcle In Cell method.
Args:
context (xobjects context): identifies the :doc:`context <contexts>`
on which the computation is executed.
x_range (tuple): Horizontal extent (in meters) of the
computing grid.
y_range (tuple): Vertical extent (in meters) of the
computing grid.
z_range (tuple): Longitudina extent (in meters) of
the computing grid.
nx (int): Number of cells in the horizontal direction.
ny (int): Number of cells in the vertical direction.
nz (int): Number of cells in the vertical direction.
dx (float): Horizontal cell size in meters. It can be
provided alternatively to ``nx``.
dy (float): Vertical cell size in meters. It can be
provided alternatively to ``ny``.
dz (float): Longitudinal cell size in meters.It can be
provided alternatively to ``nz``.
x_grid (np.ndarray): Equispaced array with the horizontal grid points
(cell centers).
It can be provided alternatively to ``x_range``, ``dx``/``nx``.
y_grid (np.ndarray): Equispaced array with the horizontal grid points
(cell centers).
It can be provided alternatively to ``y_range``, ``dy``/``ny``.
z_grid (np.ndarray): Equispaced array with the horizontal grid points
(cell centers).
It can be provided alternatively to ``z_range``, ``dz``/``nz``.
rho (np.ndarray): initial charge density at the grid points in
Coulomb/m^3.
phi (np.ndarray): initial electric potential at the grid points in
Volts. If not provided the ``phi`` is calculated from ``rho``
using the Poisson solver (if available).
solver (str or solver object): Defines the Poisson solver to be used
to compute phi from rho. Accepted values are ``FFTSolver3D`` and
``FFTSolver2p5D``. A Xfields solver object can also be provided.
In case ``update_on_track``is ``False`` and ``phi`` is provided
by the user, this argument can be omitted.
scale_coordinates_in_solver (tuple): Three coefficients used to rescale
the grid coordinates in the definition of the solver. The default is
(1.,1.,1.).
updatable (bool): If ``True`` the field map can be updated after
creation. Default is ``True``.
Returns:
(TriLinearInterpolatedFieldMap): Interpolator object.
"""
def __init__(self,
_context=None,
_buffer=None,
_offset=None,
x_range=None, y_range=None, z_range=None,
nx=None, ny=None, nz=None,
dx=None, dy=None, dz=None,
x_grid=None, y_grid=None, z_grid=None,
rho=None, phi=None,
solver=None,
scale_coordinates_in_solver=(1.,1.,1.),
updatable=True,
fftplan=None
):
self.updatable = updatable
self.scale_coordinates_in_solver = scale_coordinates_in_solver
self._x_grid = _configure_grid('x', x_grid, dx, x_range, nx)
self._y_grid = _configure_grid('y', y_grid, dy, y_range, ny)
self._z_grid = _configure_grid('z', z_grid, dz, z_range, nz)
nelem = self.nx*self.ny*self.nz
self.xoinitialize(
_context=_context,
_buffer=_buffer,
_offset=_offset,
x_min = self._x_grid[0],
y_min = self._y_grid[0],
z_min = self._z_grid[0],
nx = self.nx,
ny = self.ny,
nz = self.nz,
dx = self.dx,
dy = self.dy,
dz = self.dz,
rho = nelem,
phi = nelem,
dphi_dx = nelem,
dphi_dy = nelem,
dphi_dz = nelem)
self.compile_custom_kernels(only_if_needed=True)
if isinstance(solver, str):
self.solver = self.generate_solver(solver, fftplan)
else:
#TODO: consistency check to be added
self.solver = solver
# Set rho
if rho is not None:
self.update_rho(rho, force=True)
# Set phi
if phi is not None:
self.update_phi(phi, force=True)
else:
if solver is not None and rho is not None:
self.update_phi_from_rho()
def _assert_updatable(self):
assert self.updatable, 'This FieldMap is not updatable!'
#@profile
def get_values_at_points(self,
x, y, z,
return_rho=True,
return_phi=True,
return_dphi_dx=True,
return_dphi_dy=True,
return_dphi_dz=True):
"""
Returns the charge density, the field potential and its derivatives
at the points specified by x, y, z. The output can be customized (see below).
Zeros are returned for points outside the grid.
Args:
x (float64 array): Horizontal coordinates at which the field is evaluated.
y (float64 array): Vertical coordinates at which the field is evaluated.
z (float64 array): Longitudinal coordinates at which the field is evaluated.
return_rho (bool): If ``True``, the charge density at the given points is
returned.
return_phi (bool): If ``True``, the potential at the given points is returned.
return_dphi_dx (bool): If ``True``, the horizontal derivative of the potential
at the given points is returned.
return_dphi_dy: If ``True``, the vertical derivative of the potential
at the given points is returned.
return_dphi_dz: If ``True``, the longitudinal derivative of the potential
at the given points is returned.
Returns:
(tuple of float64 array): The required quantities at the provided points.
"""
assert len(x) == len(y) == len(z)
pos_in_buffer_of_maps_to_interp = []
if return_rho:
pos_in_buffer_of_maps_to_interp.append(
self._xobject.rho._offset + self._xobject.rho._data_offset)
if return_phi:
pos_in_buffer_of_maps_to_interp.append(
self._xobject.phi._offset + self._xobject.phi._data_offset)
if return_dphi_dx:
pos_in_buffer_of_maps_to_interp.append(
self._xobject.dphi_dx._offset + self._xobject.dphi_dx._data_offset)
if return_dphi_dy:
pos_in_buffer_of_maps_to_interp.append(
self._xobject.dphi_dy._offset + self._xobject.dphi_dy._data_offset)
if return_dphi_dz:
pos_in_buffer_of_maps_to_interp.append(
self._xobject.dphi_dz._offset + self._xobject.dphi_dz._data_offset)
context = self._buffer.context
pos_in_buffer_of_maps_to_interp = context.nparray_to_context_array(
np.array(pos_in_buffer_of_maps_to_interp, dtype=np.int64))
nmaps_to_interp = len(pos_in_buffer_of_maps_to_interp)
buffer_out = context.zeros(
shape=(nmaps_to_interp * len(x),), dtype=np.float64)
if nmaps_to_interp > 0:
context.kernels.TriLinearInterpolatedFieldMap_interpolate_3d_map_vector(
fmap=self._xobject,
n_points=len(x),
x=x, y=y, z=z,
n_quantities=nmaps_to_interp,
buffer_mesh_quantities=self._buffer.buffer,
offsets_mesh_quantities=pos_in_buffer_of_maps_to_interp,
particles_quantities=buffer_out)
# Split buffer
particles_quantities = [buffer_out[ii*len(x):(ii+1)*len(x)]
for ii in range(nmaps_to_interp)]
return particles_quantities
#@profile
def update_from_particles(self,
particles=None,
x_p=None, y_p=None, z_p=None,
ncharges_p=None, state_p=None, q0_coulomb=None,
reset=True, update_phi=True, solver=None, force=False):
"""
Updates the charge density at the grid using a given set of particles,
which can be provided by a particles object or by individual arrays.
The potential can be optionally updated accordingly.
Args:
particles (xtrack.Particles): xtrack particle object.
x_p (float64 array): Horizontal coordinates of the macroparticles.
y_p (float64 array): Vertical coordinates of the macroparticles.
z_p (float64 array): Longitudinal coordinates of the macroparticles.
ncharges_p (float64 array): Number of reference charges in the
macroparticles.
state_p (int64, array): particle state (>0 active, lost otherwise)
q0_coulomb (float64): Reference charge in Coulomb.
reset (bool): If ``True`` the stored charge density is overwritten
with the provided one. If ``False`` the provided charge density
is added to the stored one. The default is ``True``.
update_phi (bool): If ``True`` the stored potential is recalculated
from the stored charge density.
solver (Solver object): solver object to be used to solve Poisson's
equation (compute phi from rho). If ``None`` is provided the solver
attached to the fieldmap is used (if any). The default is ``None``.
force (bool): If ``True`` the potential is updated even if the
map is declared as not updateable. The default is ``False``.
"""
if not force:
self._assert_updatable()
if reset:
self.rho[:,:,:] = 0.
context = self._buffer.context
if particles is None:
assert (len(x_p) == len(y_p) == len(z_p) == len(ncharges_p))
if state_p is None:
state_p = context.zeros(shape=x_p.shape, dtype=np.int64) + 1
else:
assert len(state_p) == len(x_p)
context.kernels.p2m_rectmesh3d(
nparticles=len(x_p),
x=x_p, y=y_p, z=z_p,
part_weights=q0_coulomb*ncharges_p,
part_state=state_p,
x0=self.x_grid[0], y0=self.y_grid[0], z0=self.z_grid[0],
dx=self.dx, dy=self.dy, dz=self.dz,
nx=self.nx, ny=self.ny, nz=self.nz,
grid1d_buffer=self._xobject.rho._buffer.buffer,
grid1d_offset=self._xobject.rho._offset
+self._xobject.rho._data_offset)
else:
assert (x_p is None and y_p is None and z_p is None
and ncharges_p is None and state_p is None)
context.kernels.p2m_rectmesh3d_xparticles(
nparticles=particles._capacity,
particles=particles,
x0=self.x_grid[0], y0=self.y_grid[0], z0=self.z_grid[0],
dx=self.dx, dy=self.dy, dz=self.dz,
nx=self.nx, ny=self.ny, nz=self.nz,
grid1d_buffer=self._xobject.rho._buffer.buffer,
grid1d_offset=self._xobject.rho._offset
+self._xobject.rho._data_offset)
if update_phi:
self.update_phi_from_rho(solver=solver)
def update_rho(self, rho, reset=True, force=False):
"""
Updates the charge density on the grid.
Args:
rho (float64 array): Charge density at the grid points in C/m^3.
reset (bool): If ``True`` the stored charge density is overwritten
with the provided one. If ``False`` the provided charge density
is added to the stored one. The default is ``True``.
force (bool): If ``True`` the charge density is updated even if the
map is declared as not updateable. The default is ``False``.
"""
if not force:
self._assert_updatable()
if reset:
self.rho[:,:,:] = rho
else:
raise ValueError('Not implemented!')
#@profile
def update_phi(self, phi, reset=True, force=False):
"""
Updates the potential on the grid. The stored derivatives are also
updated.
Args:
rho (float64 array): Potential at the grid points.
reset (bool): If ``True`` the stored potential is overwritten
with the provided one. If ``False`` the provided potential
is added to the stored one. The default is ``True``.
force (bool): If ``True`` the potential is updated even if the
map is declared as not updateable. The default is ``False``.
"""
if not force:
self._assert_updatable()
if reset:
self.phi.T[:,:,:] = phi.T
else:
raise ValueError('Not implemented!')
context = self._buffer.context
# Compute gradient
context.kernels.central_diff(
nelem = self.phi.size,
row_size = self.nx,
stride_in_dbl = self.phi.strides[0]/8,
factor = 1/(2*self.dx),
matrix_buffer = self._xobject.phi._buffer.buffer,
matrix_offset = (self._xobject.phi._offset
+ self._xobject.phi._data_offset),
res_buffer = self._xobject.dphi_dx._buffer.buffer,
res_offset = (self._xobject.dphi_dx._offset
+ self._xobject.dphi_dx._data_offset))
context.kernels.central_diff(
nelem = self.phi.size,
row_size = self.ny,
stride_in_dbl = self.phi.strides[1]/8,
factor = 1/(2*self.dy),
matrix_buffer = self._xobject.phi._buffer.buffer,
matrix_offset = (self._xobject.phi._offset
+ self._xobject.phi._data_offset),
res_buffer = self._xobject.dphi_dy._buffer.buffer,
res_offset = (self._xobject.dphi_dy._offset
+ self._xobject.dphi_dy._data_offset))
context.kernels.central_diff(
nelem = self.phi.size,
row_size = self.nz,
stride_in_dbl = self.phi.strides[2]/8,
factor = 1/(2*self.dz),
matrix_buffer = self._xobject.phi._buffer.buffer,
matrix_offset = (self._xobject.phi._offset
+ self._xobject.phi._data_offset),
res_buffer = self._xobject.dphi_dz._buffer.buffer,
res_offset = (self._xobject.dphi_dz._offset
+ self._xobject.dphi_dz._data_offset))
#@profile
def update_phi_from_rho(self, solver=None):
"""
Updates the potential on the grid (phi) from the charge density on the
grid (phi). It requires a :doc:`Poisson solver object <solvers>`. If
none is provided the one attached to the fieldmap is used (if any).
Args:
solver (Solver object): solver object to be used to solve Poisson's
equation. If ``None`` is provided the solver attached to the fieldmap
is used (if any). The default is ``None``.
"""
self._assert_updatable()
if solver is None:
if hasattr(self, 'solver'):
solver = self.solver
else:
raise ValueError('I have no solver to compute phi!')
new_phi = solver.solve(self.rho)
self.update_phi(new_phi)
def generate_solver(self, solver, fftplan):
"""
Generates a Poisson solver associated to the defined grid.
Args:
solver (str): Defines the Poisson solver to be used
to compute phi from rho. Accepted values are ``FFTSolver3D`` and
``FFTSolver2p5D``.
Returns:
(Solver): Solver object associated to the defined grid.
"""
scale_dx, scale_dy, scale_dz = self.scale_coordinates_in_solver
if solver == 'FFTSolver3D':
solver = FFTSolver3D(
dx=self.dx*scale_dx,
dy=self.dy*scale_dy,
dz=self.dz*scale_dz,
nx=self.nx, ny=self.ny, nz=self.nz,
context=self._buffer.context,
fftplan=fftplan)
elif solver == 'FFTSolver2p5D':
solver = FFTSolver2p5D(
dx=self.dx*scale_dx,
dy=self.dy*scale_dy,
dz=self.dz*scale_dz,
nx=self.nx, ny=self.ny, nz=self.nz,
context=self._buffer.context,
fftplan=fftplan)
else:
raise ValueError(f'solver name {solver} not recognized')
return solver
@property
def x_grid(self):
"""
Array with the horizontal grid points (cell centers).
"""
return self._x_grid
@property
def y_grid(self):
"""
Array with the vertical grid points (cell centers).
"""
return self._y_grid
@property
def z_grid(self):
"""
Array with the longitudinal grid points (cell centers).
"""
return self._z_grid
@property
def nx(self):
"""
Number of cells in the horizontal direction.
"""
return len(self.x_grid)
@property
def ny(self):
"""
Number of cells in the vertical direction.
"""
return len(self.y_grid)
@property
def nz(self):
"""
Number of cells in the longitudinal direction.
"""
return len(self.z_grid)
@property
def dx(self):
"""
Horizontal cell size in meters.
"""
return self.x_grid[1] - self.x_grid[0]
@property
def dy(self):
"""
Vertical cell size in meters.
"""
return self.y_grid[1] - self.y_grid[0]
@property
def dz(self):
"""
Longitudinal cell size in meters.
"""
return self.z_grid[1] - self.z_grid[0]
# TODO: these reshapes can be avoided by allocating 3d arrays directly in the xobject
@property
def rho(self):
return self._rho.reshape(
(self.nx, self.ny, self.nz), order='F')
@property
def phi(self):
"""
Electric potential at the grid points in Volts.
"""
return self._phi.reshape(
(self.nx, self.ny, self.nz), order='F')
@property
def dphi_dx(self):
return self._dphi_dx.reshape(
(self.nx, self.ny, self.nz), order='F')
@property
def dphi_dy(self):
return self._dphi_dy.reshape(
(self.nx, self.ny, self.nz), order='F')
@property
def dphi_dz(self):
return self._dphi_dz.reshape(
(self.nx, self.ny, self.nz), order='F')
def _configure_grid(vname, v_grid, dv, v_range, nv):
# Check input consistency
if v_grid is not None:
assert dv is None, (f'd{vname} cannot be given '
f'if {vname}_grid is provided ')
assert nv is None, (f'n{vname} cannot be given '
f'if {vname}_grid is provided ')
assert v_range is None, (f'{vname}_range cannot be given '
f'if {vname}_grid is provided')
ddd = np.diff(v_grid)
assert np.allclose(ddd,ddd[0]), (f'{vname}_grid must be '
'unifirmly spaced')
else:
assert v_range is not None, (f'{vname}_grid or {vname}_range '
f'must be provided')
assert len(v_range)==2, (f'{vname}_range must be in the form '
f'({vname}_min, {vname}_max)')
if dv is not None:
assert nv is None, (f'n{vname} cannot be given '
f'if d{vname} is provided ')
v_grid = np.arange(v_range[0], v_range[1]+0.1*dv, dv)
else:
assert nv is not None, (f'n{vname} must be given '
f'if d{vname} is not provided ')
v_grid = np.linspace(v_range[0], v_range[1], nv)
return v_grid
| 40.10815
| 90
| 0.563992
|
import numpy as np
import xobjects as xo
import xpart as xp
import xtrack as xt
from ..solvers.fftsolvers import FFTSolver3D, FFTSolver2p5D
from ..general import _pkg_root
class TriLinearInterpolatedFieldMapData(xo.Struct):
x_min = xo.Float64
y_min = xo.Float64
z_min = xo.Float64
nx = xo.Int64
ny = xo.Int64
nz = xo.Int64
dx = xo.Float64
dy = xo.Float64
dz = xo.Float64
rho = xo.Float64[:]
phi = xo.Float64[:]
dphi_dx = xo.Float64[:]
dphi_dy = xo.Float64[:]
dphi_dz = xo.Float64[:]
TriLinearInterpolatedFieldMapData.extra_sources = [
_pkg_root.joinpath('headers/constants.h'),
_pkg_root.joinpath('fieldmaps/interpolated_src/central_diff.h'),
_pkg_root.joinpath('fieldmaps/interpolated_src/linear_interpolators.h'),
_pkg_root.joinpath('fieldmaps/interpolated_src/charge_deposition.h'),
]
TriLinearInterpolatedFieldMapData.custom_kernels = {
'central_diff': xo.Kernel(
args=[
xo.Arg(xo.Int32, pointer=False, name='nelem'),
xo.Arg(xo.Int32, pointer=False, name='row_size'),
xo.Arg(xo.Int32, pointer=False, name='stride_in_dbl'),
xo.Arg(xo.Float64, pointer=False, name='factor'),
xo.Arg(xo.Int8, pointer=True, name='matrix_buffer'),
xo.Arg(xo.Int64, pointer=False, name='matrix_offset'),
xo.Arg(xo.Int8, pointer=True, name='res_buffer'),
xo.Arg(xo.Int64, pointer=False, name='res_offset'),
],
n_threads='nelem'
),
'p2m_rectmesh3d_xparticles': xo.Kernel(
args=[
xo.Arg(xo.Int32, pointer=False, name='nparticles'),
xo.Arg(xp.Particles.XoStruct, pointer=False, name='particles'),
xo.Arg(xo.Float64, pointer=False, name='x0'),
xo.Arg(xo.Float64, pointer=False, name='y0'),
xo.Arg(xo.Float64, pointer=False, name='z0'),
xo.Arg(xo.Float64, pointer=False, name='dx'),
xo.Arg(xo.Float64, pointer=False, name='dy'),
xo.Arg(xo.Float64, pointer=False, name='dz'),
xo.Arg(xo.Int32, pointer=False, name='nx'),
xo.Arg(xo.Int32, pointer=False, name='ny'),
xo.Arg(xo.Int32, pointer=False, name='nz'),
xo.Arg(xo.Int8, pointer=True, name='grid1d_buffer'),
xo.Arg(xo.Int64, pointer=False, name='grid1d_offset'),
],
n_threads='nparticles'
),
'p2m_rectmesh3d': xo.Kernel(
args=[
xo.Arg(xo.Int32, pointer=False, name='nparticles'),
xo.Arg(xo.Float64, pointer=True, name='x'),
xo.Arg(xo.Float64, pointer=True, name='y'),
xo.Arg(xo.Float64, pointer=True, name='z'),
xo.Arg(xo.Float64, pointer=True, name='part_weights'),
xo.Arg(xo.Int64, pointer=True, name='part_state'),
xo.Arg(xo.Float64, pointer=False, name='x0'),
xo.Arg(xo.Float64, pointer=False, name='y0'),
xo.Arg(xo.Float64, pointer=False, name='z0'),
xo.Arg(xo.Float64, pointer=False, name='dx'),
xo.Arg(xo.Float64, pointer=False, name='dy'),
xo.Arg(xo.Float64, pointer=False, name='dz'),
xo.Arg(xo.Int32, pointer=False, name='nx'),
xo.Arg(xo.Int32, pointer=False, name='ny'),
xo.Arg(xo.Int32, pointer=False, name='nz'),
xo.Arg(xo.Int8, pointer=True, name='grid1d_buffer'),
xo.Arg(xo.Int64, pointer=False, name='grid1d_offset'),
],
n_threads='nparticles'
),
'TriLinearInterpolatedFieldMap_interpolate_3d_map_vector': xo.Kernel(
args=[
xo.Arg(TriLinearInterpolatedFieldMapData, pointer=False, name='fmap'),
xo.Arg(xo.Int64, pointer=False, name='n_points'),
xo.Arg(xo.Float64, pointer=True, name='x'),
xo.Arg(xo.Float64, pointer=True, name='y'),
xo.Arg(xo.Float64, pointer=True, name='z'),
xo.Arg(xo.Int64, pointer=False, name='n_quantities'),
xo.Arg(xo.Int8, pointer=True, name='buffer_mesh_quantities'),
xo.Arg(xo.Int64, pointer=True, name='offsets_mesh_quantities'),
xo.Arg(xo.Float64, pointer=True, name='particles_quantities'),
],
n_threads='n_points'
),
}
rename_trilinear = {ff.name:'_'+ff.name for ff
in TriLinearInterpolatedFieldMapData._fields}
class TriLinearInterpolatedFieldMap(xo.dress(TriLinearInterpolatedFieldMapData,
rename=rename_trilinear)):
def __init__(self,
_context=None,
_buffer=None,
_offset=None,
x_range=None, y_range=None, z_range=None,
nx=None, ny=None, nz=None,
dx=None, dy=None, dz=None,
x_grid=None, y_grid=None, z_grid=None,
rho=None, phi=None,
solver=None,
scale_coordinates_in_solver=(1.,1.,1.),
updatable=True,
fftplan=None
):
self.updatable = updatable
self.scale_coordinates_in_solver = scale_coordinates_in_solver
self._x_grid = _configure_grid('x', x_grid, dx, x_range, nx)
self._y_grid = _configure_grid('y', y_grid, dy, y_range, ny)
self._z_grid = _configure_grid('z', z_grid, dz, z_range, nz)
nelem = self.nx*self.ny*self.nz
self.xoinitialize(
_context=_context,
_buffer=_buffer,
_offset=_offset,
x_min = self._x_grid[0],
y_min = self._y_grid[0],
z_min = self._z_grid[0],
nx = self.nx,
ny = self.ny,
nz = self.nz,
dx = self.dx,
dy = self.dy,
dz = self.dz,
rho = nelem,
phi = nelem,
dphi_dx = nelem,
dphi_dy = nelem,
dphi_dz = nelem)
self.compile_custom_kernels(only_if_needed=True)
if isinstance(solver, str):
self.solver = self.generate_solver(solver, fftplan)
else:
self.solver = solver
if rho is not None:
self.update_rho(rho, force=True)
if phi is not None:
self.update_phi(phi, force=True)
else:
if solver is not None and rho is not None:
self.update_phi_from_rho()
def _assert_updatable(self):
assert self.updatable, 'This FieldMap is not updatable!'
def get_values_at_points(self,
x, y, z,
return_rho=True,
return_phi=True,
return_dphi_dx=True,
return_dphi_dy=True,
return_dphi_dz=True):
assert len(x) == len(y) == len(z)
pos_in_buffer_of_maps_to_interp = []
if return_rho:
pos_in_buffer_of_maps_to_interp.append(
self._xobject.rho._offset + self._xobject.rho._data_offset)
if return_phi:
pos_in_buffer_of_maps_to_interp.append(
self._xobject.phi._offset + self._xobject.phi._data_offset)
if return_dphi_dx:
pos_in_buffer_of_maps_to_interp.append(
self._xobject.dphi_dx._offset + self._xobject.dphi_dx._data_offset)
if return_dphi_dy:
pos_in_buffer_of_maps_to_interp.append(
self._xobject.dphi_dy._offset + self._xobject.dphi_dy._data_offset)
if return_dphi_dz:
pos_in_buffer_of_maps_to_interp.append(
self._xobject.dphi_dz._offset + self._xobject.dphi_dz._data_offset)
context = self._buffer.context
pos_in_buffer_of_maps_to_interp = context.nparray_to_context_array(
np.array(pos_in_buffer_of_maps_to_interp, dtype=np.int64))
nmaps_to_interp = len(pos_in_buffer_of_maps_to_interp)
buffer_out = context.zeros(
shape=(nmaps_to_interp * len(x),), dtype=np.float64)
if nmaps_to_interp > 0:
context.kernels.TriLinearInterpolatedFieldMap_interpolate_3d_map_vector(
fmap=self._xobject,
n_points=len(x),
x=x, y=y, z=z,
n_quantities=nmaps_to_interp,
buffer_mesh_quantities=self._buffer.buffer,
offsets_mesh_quantities=pos_in_buffer_of_maps_to_interp,
particles_quantities=buffer_out)
particles_quantities = [buffer_out[ii*len(x):(ii+1)*len(x)]
for ii in range(nmaps_to_interp)]
return particles_quantities
def update_from_particles(self,
particles=None,
x_p=None, y_p=None, z_p=None,
ncharges_p=None, state_p=None, q0_coulomb=None,
reset=True, update_phi=True, solver=None, force=False):
if not force:
self._assert_updatable()
if reset:
self.rho[:,:,:] = 0.
context = self._buffer.context
if particles is None:
assert (len(x_p) == len(y_p) == len(z_p) == len(ncharges_p))
if state_p is None:
state_p = context.zeros(shape=x_p.shape, dtype=np.int64) + 1
else:
assert len(state_p) == len(x_p)
context.kernels.p2m_rectmesh3d(
nparticles=len(x_p),
x=x_p, y=y_p, z=z_p,
part_weights=q0_coulomb*ncharges_p,
part_state=state_p,
x0=self.x_grid[0], y0=self.y_grid[0], z0=self.z_grid[0],
dx=self.dx, dy=self.dy, dz=self.dz,
nx=self.nx, ny=self.ny, nz=self.nz,
grid1d_buffer=self._xobject.rho._buffer.buffer,
grid1d_offset=self._xobject.rho._offset
+self._xobject.rho._data_offset)
else:
assert (x_p is None and y_p is None and z_p is None
and ncharges_p is None and state_p is None)
context.kernels.p2m_rectmesh3d_xparticles(
nparticles=particles._capacity,
particles=particles,
x0=self.x_grid[0], y0=self.y_grid[0], z0=self.z_grid[0],
dx=self.dx, dy=self.dy, dz=self.dz,
nx=self.nx, ny=self.ny, nz=self.nz,
grid1d_buffer=self._xobject.rho._buffer.buffer,
grid1d_offset=self._xobject.rho._offset
+self._xobject.rho._data_offset)
if update_phi:
self.update_phi_from_rho(solver=solver)
def update_rho(self, rho, reset=True, force=False):
if not force:
self._assert_updatable()
if reset:
self.rho[:,:,:] = rho
else:
raise ValueError('Not implemented!')
def update_phi(self, phi, reset=True, force=False):
if not force:
self._assert_updatable()
if reset:
self.phi.T[:,:,:] = phi.T
else:
raise ValueError('Not implemented!')
context = self._buffer.context
context.kernels.central_diff(
nelem = self.phi.size,
row_size = self.nx,
stride_in_dbl = self.phi.strides[0]/8,
factor = 1/(2*self.dx),
matrix_buffer = self._xobject.phi._buffer.buffer,
matrix_offset = (self._xobject.phi._offset
+ self._xobject.phi._data_offset),
res_buffer = self._xobject.dphi_dx._buffer.buffer,
res_offset = (self._xobject.dphi_dx._offset
+ self._xobject.dphi_dx._data_offset))
context.kernels.central_diff(
nelem = self.phi.size,
row_size = self.ny,
stride_in_dbl = self.phi.strides[1]/8,
factor = 1/(2*self.dy),
matrix_buffer = self._xobject.phi._buffer.buffer,
matrix_offset = (self._xobject.phi._offset
+ self._xobject.phi._data_offset),
res_buffer = self._xobject.dphi_dy._buffer.buffer,
res_offset = (self._xobject.dphi_dy._offset
+ self._xobject.dphi_dy._data_offset))
context.kernels.central_diff(
nelem = self.phi.size,
row_size = self.nz,
stride_in_dbl = self.phi.strides[2]/8,
factor = 1/(2*self.dz),
matrix_buffer = self._xobject.phi._buffer.buffer,
matrix_offset = (self._xobject.phi._offset
+ self._xobject.phi._data_offset),
res_buffer = self._xobject.dphi_dz._buffer.buffer,
res_offset = (self._xobject.dphi_dz._offset
+ self._xobject.dphi_dz._data_offset))
def update_phi_from_rho(self, solver=None):
self._assert_updatable()
if solver is None:
if hasattr(self, 'solver'):
solver = self.solver
else:
raise ValueError('I have no solver to compute phi!')
new_phi = solver.solve(self.rho)
self.update_phi(new_phi)
def generate_solver(self, solver, fftplan):
scale_dx, scale_dy, scale_dz = self.scale_coordinates_in_solver
if solver == 'FFTSolver3D':
solver = FFTSolver3D(
dx=self.dx*scale_dx,
dy=self.dy*scale_dy,
dz=self.dz*scale_dz,
nx=self.nx, ny=self.ny, nz=self.nz,
context=self._buffer.context,
fftplan=fftplan)
elif solver == 'FFTSolver2p5D':
solver = FFTSolver2p5D(
dx=self.dx*scale_dx,
dy=self.dy*scale_dy,
dz=self.dz*scale_dz,
nx=self.nx, ny=self.ny, nz=self.nz,
context=self._buffer.context,
fftplan=fftplan)
else:
raise ValueError(f'solver name {solver} not recognized')
return solver
@property
def x_grid(self):
return self._x_grid
@property
def y_grid(self):
return self._y_grid
@property
def z_grid(self):
return self._z_grid
@property
def nx(self):
return len(self.x_grid)
@property
def ny(self):
return len(self.y_grid)
@property
def nz(self):
return len(self.z_grid)
@property
def dx(self):
return self.x_grid[1] - self.x_grid[0]
@property
def dy(self):
return self.y_grid[1] - self.y_grid[0]
@property
def dz(self):
return self.z_grid[1] - self.z_grid[0]
@property
def rho(self):
return self._rho.reshape(
(self.nx, self.ny, self.nz), order='F')
@property
def phi(self):
return self._phi.reshape(
(self.nx, self.ny, self.nz), order='F')
@property
def dphi_dx(self):
return self._dphi_dx.reshape(
(self.nx, self.ny, self.nz), order='F')
@property
def dphi_dy(self):
return self._dphi_dy.reshape(
(self.nx, self.ny, self.nz), order='F')
@property
def dphi_dz(self):
return self._dphi_dz.reshape(
(self.nx, self.ny, self.nz), order='F')
def _configure_grid(vname, v_grid, dv, v_range, nv):
if v_grid is not None:
assert dv is None, (f'd{vname} cannot be given '
f'if {vname}_grid is provided ')
assert nv is None, (f'n{vname} cannot be given '
f'if {vname}_grid is provided ')
assert v_range is None, (f'{vname}_range cannot be given '
f'if {vname}_grid is provided')
ddd = np.diff(v_grid)
assert np.allclose(ddd,ddd[0]), (f'{vname}_grid must be '
'unifirmly spaced')
else:
assert v_range is not None, (f'{vname}_grid or {vname}_range '
f'must be provided')
assert len(v_range)==2, (f'{vname}_range must be in the form '
f'({vname}_min, {vname}_max)')
if dv is not None:
assert nv is None, (f'n{vname} cannot be given '
f'if d{vname} is provided ')
v_grid = np.arange(v_range[0], v_range[1]+0.1*dv, dv)
else:
assert nv is not None, (f'n{vname} must be given '
f'if d{vname} is not provided ')
v_grid = np.linspace(v_range[0], v_range[1], nv)
return v_grid
| true
| true
|
1c41df5dba5fe449010704d4dfae91c58717b001
| 1,466
|
py
|
Python
|
luz_asm_sim/lib/asmlib/objectfile.py
|
eliben/luz-cpu
|
680917f975f69e7d3cd32edbcbf79a650cbbead9
|
[
"Unlicense"
] | 135
|
2015-01-19T07:18:00.000Z
|
2022-01-10T15:49:44.000Z
|
luz_asm_sim/lib/asmlib/objectfile.py
|
AhmedMounir/luz-cpu
|
680917f975f69e7d3cd32edbcbf79a650cbbead9
|
[
"Unlicense"
] | 2
|
2017-04-02T18:37:34.000Z
|
2018-08-15T13:25:40.000Z
|
luz_asm_sim/lib/asmlib/objectfile.py
|
AhmedMounir/luz-cpu
|
680917f975f69e7d3cd32edbcbf79a650cbbead9
|
[
"Unlicense"
] | 24
|
2015-02-18T02:30:32.000Z
|
2021-08-22T23:48:01.000Z
|
# Represents the object file recognized by the Luz architecture.
# An object file is relocatable. It is created by the assembler,
# and later combined with other object files by the linker into
# an executable.
#
# Luz micro-controller assembler
# Eli Bendersky (C) 2008-2010
#
class ObjectFile(object):
""" Use one of the factory methods to create ObjectFile
instances: from_assembler, from_file
The name of the object can be accessed via the .name
attribute.
"""
def __init__(self):
self.seg_data = {}
self.export_table = []
self.import_table = []
self.reloc_table = []
self.name = None
@classmethod
def from_assembler( cls,
seg_data,
export_table,
import_table,
reloc_table):
""" Create a new ObjectFile from assembler-generated data
structures.
"""
obj = cls()
assert isinstance(seg_data, dict)
for table in (export_table, import_table, reloc_table):
assert isinstance(table, list)
obj.seg_data = seg_data
obj.export_table = export_table
obj.import_table = import_table
obj.reloc_table = reloc_table
return obj
@classmethod
def from_file(cls, file):
""" 'file' is either a filename (a String), or a readable
IO object.
"""
pass
| 28.745098
| 65
| 0.592087
|
class ObjectFile(object):
def __init__(self):
self.seg_data = {}
self.export_table = []
self.import_table = []
self.reloc_table = []
self.name = None
@classmethod
def from_assembler( cls,
seg_data,
export_table,
import_table,
reloc_table):
obj = cls()
assert isinstance(seg_data, dict)
for table in (export_table, import_table, reloc_table):
assert isinstance(table, list)
obj.seg_data = seg_data
obj.export_table = export_table
obj.import_table = import_table
obj.reloc_table = reloc_table
return obj
@classmethod
def from_file(cls, file):
pass
| true
| true
|
1c41df68474777b29655bb8f57d627df3da18ac8
| 3,415
|
py
|
Python
|
src/elki_interface/Hics.py
|
eliavw/elki_interface
|
b60dd746a8deb08272f5c39cb7822dd3408188c2
|
[
"MIT"
] | null | null | null |
src/elki_interface/Hics.py
|
eliavw/elki_interface
|
b60dd746a8deb08272f5c39cb7822dd3408188c2
|
[
"MIT"
] | null | null | null |
src/elki_interface/Hics.py
|
eliavw/elki_interface
|
b60dd746a8deb08272f5c39cb7822dd3408188c2
|
[
"MIT"
] | null | null | null |
import io
import warnings
import pandas as pd
from prefect import Flow
from prefect.tasks.shell import ShellTask
from .cte import ELKI_FILEPATH
from .Elki import Elki
class Hics(Elki):
def __init__(
self, verbose=False, elki=ELKI_FILEPATH, contamination=0.1, k=5, **kwargs
):
super().__init__(verbose=verbose, elki=elki, contamination=contamination)
self.k = k
self.fit_data_fp = None
return
def fit(self, X):
super().fit(X)
self.status = self.fit_flow.run()
raw = self.status.result[self.fit_shell_task].result
res = self._filter_raw(raw, X.shape[0])
# Process
df = self._to_dataframe(res)
# Set scores and labels
# TODO: Make this more consistent.
self._scores = self._to_scores(df)
self._set_labels()
# Sometimes it struggles to properly kill the process.
del raw
del res
self.fit_shell_task = None
return
# --------------
# Parameter - Properties
# --------------
@property
def k(self):
# One more setter-call to ensure the value is correct!
self.k = self._k
return self._k
@k.setter
def k(self, value):
self._k = self._compatible_k_n_instances(value)
return
def _compatible_k_n_instances(self, k):
if self.n_instances is None:
return k
else:
res = min(k, self.n_instances-1)
if res < k:
msg = """
k was set to {}, but n_instances is {}.
ELKI won't have this. Changed k to {}
""".format(k, self.n_instances, res)
warnings.warn(msg)
return res
# --------------
# Internal Methods
# --------------
@staticmethod
def _filter_raw(raw, n_instances):
if len(raw) > n_instances:
"""
ELKI returned some additional things.
These are not outputs. Luckily, the last n_instances _will_ be outputs,
therefore, we just look at those.
This is hacky, but a lot easier than writing a custom parser, and for our purposes, it works fine.
"""
return raw[-n_instances:]
else:
return raw
@staticmethod
def _to_dataframe(res):
return pd.read_csv(
io.StringIO("\n".join(res)), delim_whitespace=True, header=None, index_col=0
)
@staticmethod
def _to_scores(dataframe):
return dataframe.sort_index().values.squeeze()
# --------------
# CLI Properties
# --------------
@property
def fit_flow(self):
shelltask = ShellTask(return_all=True, log_stderr=True)
with Flow("fit") as f:
self.fit_shell_task = shelltask(command=self.fit_command)
return f
@property
def fit_command(self):
fit_cmd = "{} {} {} {} {}".format(
self.kdd_command,
self.i_command,
self.hics_command,
self.eval_command,
self.o_command,
)
return fit_cmd
@property
def hics_command(self):
return "-algorithm outlier.meta.HiCS -lof.k {}".format(self.k)
@property
def i_command(self):
# Command snippet for input data
return "-db HashmapDatabase -dbc.in {}".format(self.data_filepath.name)
| 26.068702
| 110
| 0.562518
|
import io
import warnings
import pandas as pd
from prefect import Flow
from prefect.tasks.shell import ShellTask
from .cte import ELKI_FILEPATH
from .Elki import Elki
class Hics(Elki):
def __init__(
self, verbose=False, elki=ELKI_FILEPATH, contamination=0.1, k=5, **kwargs
):
super().__init__(verbose=verbose, elki=elki, contamination=contamination)
self.k = k
self.fit_data_fp = None
return
def fit(self, X):
super().fit(X)
self.status = self.fit_flow.run()
raw = self.status.result[self.fit_shell_task].result
res = self._filter_raw(raw, X.shape[0])
df = self._to_dataframe(res)
self._scores = self._to_scores(df)
self._set_labels()
del raw
del res
self.fit_shell_task = None
return
@property
def k(self):
self.k = self._k
return self._k
@k.setter
def k(self, value):
self._k = self._compatible_k_n_instances(value)
return
def _compatible_k_n_instances(self, k):
if self.n_instances is None:
return k
else:
res = min(k, self.n_instances-1)
if res < k:
msg = """
k was set to {}, but n_instances is {}.
ELKI won't have this. Changed k to {}
""".format(k, self.n_instances, res)
warnings.warn(msg)
return res
# --------------
# Internal Methods
# --------------
@staticmethod
def _filter_raw(raw, n_instances):
if len(raw) > n_instances:
return raw[-n_instances:]
else:
return raw
@staticmethod
def _to_dataframe(res):
return pd.read_csv(
io.StringIO("\n".join(res)), delim_whitespace=True, header=None, index_col=0
)
@staticmethod
def _to_scores(dataframe):
return dataframe.sort_index().values.squeeze()
# --------------
# CLI Properties
# --------------
@property
def fit_flow(self):
shelltask = ShellTask(return_all=True, log_stderr=True)
with Flow("fit") as f:
self.fit_shell_task = shelltask(command=self.fit_command)
return f
@property
def fit_command(self):
fit_cmd = "{} {} {} {} {}".format(
self.kdd_command,
self.i_command,
self.hics_command,
self.eval_command,
self.o_command,
)
return fit_cmd
@property
def hics_command(self):
return "-algorithm outlier.meta.HiCS -lof.k {}".format(self.k)
@property
def i_command(self):
# Command snippet for input data
return "-db HashmapDatabase -dbc.in {}".format(self.data_filepath.name)
| true
| true
|
1c41dffec20d92c8af4de2b7464ed7d62eef7243
| 23,885
|
py
|
Python
|
reader/blocksDB_diagramming.py
|
nairobi222/chainhammer
|
94ab5269a9a9c751d355b41f90ac244026ccf46b
|
[
"MIT"
] | null | null | null |
reader/blocksDB_diagramming.py
|
nairobi222/chainhammer
|
94ab5269a9a9c751d355b41f90ac244026ccf46b
|
[
"MIT"
] | null | null | null |
reader/blocksDB_diagramming.py
|
nairobi222/chainhammer
|
94ab5269a9a9c751d355b41f90ac244026ccf46b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
@summary: for the jupyter notebooks: tools, column creators, diagramming routines, etc.
@version: v40 (29/November/2018)
@since: 26/June/2018
@organization:
@author: https://github.com/drandreaskrueger
@see: https://github.com/drandreaskrueger/chainhammer for updates
@TODO: this needs usage comments; not every function has a docstring yet
"""
#global DBFILE, NAME_PREFIX
#DBFILE = "temp.db"
#NAME_PREFIX = "TEMP"
################
## Dependencies:
# standard library
import sys, os, json, time
import sqlite3
from pprint import pprint
# pypi:
import pandas
import numpy
import matplotlib
import matplotlib.pyplot as plt
# chainhammer
# extend sys.path for imports:
if __name__ == '__main__' and __package__ is None:
from os import sys, path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
from hammer.config import RPCaddress, EMPTY_BLOCKS_AT_END
################
def DB_query(SQL, conn):
"""
any SQL query, with many answers
"""
cur = conn.cursor()
cur.execute(SQL)
result = cur.fetchall()
return result
def DB_tableSize(tablename, conn):
"""
prints number of rows
"""
count = DB_query("SELECT COUNT(*) FROM %s" % tablename, conn)
print ("TABLE %s has %d rows" % (tablename, count[0][0]))
return count[0][0]
def maxBlockNumber(conn):
"""
what is the first & last block we have?
"""
result = DB_query("SELECT MIN(blocknumber), MAX(blocknumber) FROM blocks", conn)
print ("MIN(blocknumber), MAX(blocknumber) = %s " % (result) )
return result
def check_whether_complete(blocknumbers):
"""
do we have consecutive blocks, none missing?
"""
start = min(blocknumbers)[0]
last = max(blocknumbers)[0]
old = start-1
total=0
for bn in blocknumbers:
bn = bn[0]
missing=bn-old-1
if missing>0:
print ("from ", old+1, "to", bn - 1, "there are ", missing, " missing")
total+=missing
old = bn
print()
complete = (not total)
print ("complete" if complete else "some %d blocks missing" % total, end=" ")
print ("between blocks %d and %d." %(min(blocknumbers)[0], max(blocknumbers)[0]))
return complete
##################
## add columns
def add_blocktime(df):
"""
blocktime = timestamp[n] - timestamp[n-1]
"""
df['blocktime'] = df['timestamp'] - df['timestamp'].shift()
df.loc[1, "blocktime"] = numpy.nan
def add_TPS(df, numBlocks):
"""
transactions per second
with differently sized (rectangular) windows
"""
name = 'TPS_%dblks'%numBlocks if numBlocks>1 else 'TPS_%dblk'%numBlocks
df[name]=df['txcount'].rolling(numBlocks).sum() / df['blocktime'].rolling(numBlocks).sum()
def add_GUPS(df, numBlocks):
"""
gasUsed per second
"""
name = 'GUPS_%dblks'%numBlocks if numBlocks>1 else 'GUPS_%dblk'%numBlocks
df[name]=df['gasUsed'].rolling(numBlocks).sum() / df['blocktime'].rolling(numBlocks).sum()
def add_GLPS(df, numBlocks):
"""
gasLimit per second
"""
name = 'GLPS_%dblks'%numBlocks if numBlocks>1 else 'GLPS_%dblk'%numBlocks
df[name]=df['gasLimit'].rolling(numBlocks).sum() / df['blocktime'].rolling(numBlocks).sum()
##################################################
## diagramming stand-alone
## does the same as the jupyter notebook
## but more convenient for cloud server
## ... on the command line
##
##################################################
## TODOs:
## * also get the simpler single diagrams ?
## from the original blocksDB_analyze.ipynb
## * doc strings for the following routines:
##################################################
def load_dependencies():
import sqlite3; print("sqlite3 version", sqlite3.version)
import pandas; print("pandas version", pandas.__version__)
import numpy; print("numpy version", numpy.__version__)
import matplotlib; print("matplotlib version", matplotlib.__version__)
from matplotlib import pyplot as plt
backend=matplotlib.get_backend()
print("matplotlib backend", backend)
# get_ipython().run_line_magic('matplotlib', 'inline')
# https://github.com/matplotlib/matplotlib/issues/5907#issuecomment-179001811
matplotlib.rcParams['agg.path.chunksize'] = 10000
# my own routines are now all in separate .py file:
# from blocksDB_diagramming import DB_query, DB_tableSize, maxBlockNumber, check_whether_complete
# from blocksDB_diagramming import add_blocktime, add_TPS, add_GUPS, add_GLPS
def load_db_and_check_complete(DBFILE):
print ("\nReading blocks table from", DBFILE)
# open database connection
conn = sqlite3.connect(DBFILE)
print ("DB table names: ",
DB_query("SELECT name FROM sqlite_master WHERE type='table';", conn)[0])
# number of rows?
_=DB_tableSize("blocks", conn)
# what is the first & last block we have?
minblock, maxblock = maxBlockNumber(conn)[0]
blocknumbers = DB_query("SELECT blocknumber FROM blocks ORDER BY blocknumber", conn)
print ("len(blocknumbers)=", len(blocknumbers))
# do we have consecutive blocks, none missing?
check_whether_complete(blocknumbers)
print ()
return conn, blocknumbers
def simple_stats(conn):
# simple statistics
txcount_sum = DB_query("SELECT SUM(txcount) FROM blocks", conn); print ("txcount_sum", txcount_sum[0][0])
size_max = DB_query("SELECT MAX(size) FROM blocks", conn); print ("blocksize_max", size_max[0][0])
txcount_max = DB_query("SELECT MAX(txcount) FROM blocks", conn); print ("txcount_max", txcount_max[0][0])
txcount_av = DB_query("SELECT AVG(txcount) FROM blocks", conn); print ("txcount average per block", txcount_av[0][0])
blocks_nonempty_count = DB_query("SELECT COUNT(blocknumber) FROM blocks WHERE txcount != 0", conn); print ("blocks_nonempty_count", blocks_nonempty_count[0][0])
print ("txcount average per NONEMPTY blocks = ", txcount_sum[0][0] / blocks_nonempty_count[0][0] )
print ()
def read_whole_table_into_dataframe(conn):
# SQL="SELECT * FROM blocks WHERE 48500<blocknumber and blocknumber<49000 ORDER BY blocknumber"
SQL="SELECT * FROM blocks ORDER BY blocknumber"
df = pandas.read_sql(SQL, conn)
return df
def check_timestamp_format(df):
"""
some clients report absolute blocktime as epochtime in seconds,
some in nanoseconds
that should have been handled already, in the timestampToSeconds() function
but if it hasn't, the problem would show up here.
"""
# print ("example- first 4 rows:")
# print (df[0:4])
# better come up with an automated test, not just visual inspection:
# print (" is timestamp in seconds?")
# ### `geth` based clients have a nanosecond timestamp
# not anymore?
# transform nanoseconds to seconds
# df["timestamp"]=df["timestamp"]/1000000000
problematic = []
for ts in df["timestamp"]:
# year 2001 year 2255 testrpc-py issue https://github.com/pipermerriam/eth-testrpc/issues/117
if not ((1000000000 < ts < 9000000000) or (6000000 < ts < 8000000)):
problematic.append(ts)
if problematic:
print ("%d problematic timestamps = probably not in unit of seconds" % len(problematic))
try:# try, for the case that the list is short
problematic = problematic[:3] + problematic[-3:]
problematic = sorted(list(set(problematic))) # remove duplicates
except:
pass
print ("examples:", problematic)
# hello year 2255, you might have a Y2286 problem
# when epochtime goes from 9999999999 to 10000000000
# someone warned you 30 years earlier. Hahaha :-)
return not problematic
def add_columns(df):
# blocktime = timestamp[n] - timestamp[n-1]
add_blocktime(df)
#df["TPS_1"]=df['txcount']/df['blocktime']
#df
# transactions per second
# with differently sized (rectangular) windows
add_TPS(df, numBlocks=1)
add_TPS(df, numBlocks=3)
add_TPS(df, numBlocks=5)
add_TPS(df, numBlocks=10)
# gasUsed and gasLimit per second
add_GUPS(df, numBlocks=1)
add_GUPS(df, numBlocks=3)
add_GUPS(df, numBlocks=5)
add_GLPS(df, numBlocks=1)
add_GLPS(df, numBlocks=3)
add_GLPS(df, numBlocks=5)
print ("\nColumns added. Now: ", df.columns.tolist() )
print ()
def show_peak_TPS(df):
columns = ['blocknumber',
'TPS_1blk', 'TPS_3blks', 'TPS_5blks', 'TPS_10blks',
'txcount', 'size', 'gasUsed', 'gasLimit', 'timestamp', 'blocktime']
print ("peak TPS single block:")
df1 = df.sort_values(by=['TPS_1blk'], ascending=False)[0:10]
max1 = max(df1['TPS_1blk'])
pprint (df1[columns])
print ("\npeak TPS over ten blocks:")
df10 = df.sort_values(by=['TPS_10blks'], ascending=False)[0:10]
max10 = max(df10['TPS_10blks'])
pprint (df10[columns])
print ("\nSingle block, vs averaged over 10 blocks:")
print ("peak( TPS_1blk) = %.2f \npeak(TPS_10blk) = %.2f" % (max1,max10))
return max1, max10
def diagrams_oldversion(df, blockFrom, blockTo, prefix="", gas_logy=True, bt_logy=True, imgpath="img"):
"""
OBSOLETE NOW!
"""
from matplotlib import pyplot as plt
# https://github.com/matplotlib/matplotlib/issues/5907#issuecomment-179001811
matplotlib.rcParams['agg.path.chunksize'] = 10000
###################################################
# prepare 2x2 subplots
# plt = matplotlib.pyplot
fig, axes = plt.subplots(nrows=2, ncols=2,figsize=(15,10))
plt.tight_layout(pad=6.0, w_pad=6.0, h_pad=7.5)
title = prefix + " blocks %d to %d" % (blockFrom, blockTo)
plt.suptitle(title, fontsize=16)
####################################
# TPS
# TPS averages --> legend
cols=['TPS_1blk', 'TPS_3blks', 'TPS_5blks', 'TPS_10blks']
averages=df[cols][blockFrom:blockTo].mean()
legend = [col + " (av %.1f)" % averages[col] for col in cols]
# print (legend)
# TPS diagram
cols = ['blocknumber'] + cols
ax=df[cols][blockFrom:blockTo].plot(x='blocknumber', rot=90, ax=axes[0,0])
ax.set_title("transactions per second")
ax.get_xaxis().get_major_formatter().set_useOffset(False)
ax.legend(legend);
###########################################
# bar charts or line charts
# bar charts are too expensive when too many blocks
numBlocks = blockTo - blockFrom
kind = 'bar' if numBlocks<2000 else 'line'
#############################################
# BT
ax=df[['blocknumber', 'blocktime']][blockFrom:blockTo].plot(x='blocknumber', kind=kind, ax=axes[0,1],
logy=bt_logy)
ax.set_title("blocktime since last block")
ax.locator_params(nbins=1, axis='x') # TODO: matplotlib's ticks - how to autoselect few? Any idea welcome
#############################################
# blocksize
ax=df[['blocknumber', 'size']][blockFrom:blockTo].plot(x='blocknumber', rot=90, kind=kind, ax=axes[1,0])
# ax.get_xaxis().get_major_formatter().set_useOffset(False)
ax.get_yaxis().get_major_formatter().set_scientific(False)
ax.set_title("blocksize in bytes")
ax.locator_params(nbins=1, axis='x') # TODO: matplotlib's ticks - how to autoselect few? Any idea welcome
####################################
# gas
ax=df[['blocknumber', 'GLPS_1blk', 'GUPS_1blk']][blockFrom:blockTo].plot(x='blocknumber',
rot=90, ax=axes[1,1],
logy=gas_logy)
ax.get_xaxis().get_major_formatter().set_useOffset(False)
if not gas_logy:
ax.get_yaxis().get_major_formatter().set_scientific(False)
ax.set_title("gasUsed and gasLimit per second")
##############################################
# save diagram to PNG file
filename = "%s_tps-bt-bs-gas_blks%d-%d.png" % (prefix,blockFrom,blockTo)
filepath = os.path.join(imgpath, filename)
fig.savefig(filepath)
return filepath
################################################################################
# new diagrams
# completely overhauled, mostly written new actually
################################################################################
def experiment_slice(df, FROM_BLOCK, TO_BLOCK, emptyBlocks):
"""
cut out the dataframe from FROM_BLOCK to TO_BLOCK+emptyBlocks (incl that last one)
can handle that df starts not at block 0
can handle that limits are smaller or larger than available blocknumbers
"""
assert FROM_BLOCK <= TO_BLOCK
index_from = min( df[df['blocknumber'] >= FROM_BLOCK].index.tolist() )
# print (slice_from)
index_to = max( df[df['blocknumber'] <= TO_BLOCK+emptyBlocks].index.tolist() )
# print(slice_to)
dfs = df[index_from:index_to + 1]
return dfs, index_from, index_to
def averageTps_wholeExperiment(dfs, FROM_BLOCK, TO_BLOCK):
"""
works on already sliced dataframe,
where first experiment block is index 0
and last experiment(!) block is index [TO_BLOCK - FROM_BLOCK],
(so the 10 empty blocks at the end are NOT part of this!)
N.B.:
we cannot rely on the blocktime of very first block
so we simply leave the transactions out of the summation, and
the duration is from when that first block WAS MINED = its timestamp.
"""
blocks = TO_BLOCK - FROM_BLOCK + 1
ts1 = dfs.iloc[0]['timestamp'] # stop clock starts WHEN block 0 is in already!
bn1 = dfs.iloc[0]['blocknumber']
ts2 = dfs.iloc[blocks-1]['timestamp'] # and clock ends at last filled block
bn2 = dfs.iloc[blocks-1]['blocknumber']
duration = ts2-ts1
txs=sum(dfs['txcount'][1:blocks]) # N.B.: start summing at block 1 not 0 !
tps=(txs/duration)
print ("second to last experiment block, averaging:")
txt="blocks %d-%d, timestamps %d-%d, duration %d seconds, txcount %d, tps %.1f"
print (txt % (bn1, bn2, ts1, ts2, duration, txs, tps))
print()
return tps, "%.1f" % tps
def averager(dfs, col, emptyBlocks, fmt="%.1f"):
"""
We want the real average of that 'col', taken only over the non-empty blocks.
N.B.: this assumes that there are actually enough emptyBlocks at the end!
"""
filledSlice = dfs[col] [:len(dfs)-emptyBlocks-1]
av = avCopy = filledSlice .mean()
if fmt=="%d":
avCopy = int(round(av))
avTxt = fmt % avCopy
return av, avTxt
def avgLine(ax, dfs, emptyBlocks, avg, avgTxt):
"""
horizontal line plus text on white background
"""
lastFilledBlock_index = len(dfs)-emptyBlocks-1
blMin, blMax = min(dfs["blocknumber"])+1, max(dfs["blocknumber"][:lastFilledBlock_index])
ax.plot([blMin, blMax], [avg, avg], "k-")
ax.text(blMin + (blMax-blMin + emptyBlocks)*0.95, avg, avgTxt,
bbox=dict(facecolor='white', edgecolor='white'))
def axes_simplifier(ax, logYscale=False):
"""
otherwise matplotlib automatically switches on notations on the ticks
that might be confusing to non-technical people
"""
ax.get_xaxis().get_major_formatter().set_useOffset(False)
ax.get_xaxis().get_major_formatter().set_scientific(False)
if not logYscale:
ax.get_yaxis().get_major_formatter().set_useOffset(False)
ax.get_yaxis().get_major_formatter().set_scientific(False)
def tps_plotter(ax, dfs, FROM_BLOCK, TO_BLOCK, emptyBlocks):
"""
TPS average calculated only over non-empty blocks!
average calculated for TPS (not for smoothed 3, 5, 10 blocks averages)
N.B.: this assumes that in dfs there are actually enough emptyBlocks at the end!
"""
cols=['TPS_1blk', 'TPS_3blks', 'TPS_5blks', 'TPS_10blks']
for col in cols:
ax.plot(dfs['blocknumber'], dfs[col])
axes_simplifier(ax)
#avg1, avg1Txt = averager(dfs, cols[0], emptyBlocks, "%.1f")
#legend = [cols[0] + " (avg1 %s)"%avg1Txt ] + cols[1:]
ax.legend(cols);
avg, avgTxt = averageTps_wholeExperiment(dfs, FROM_BLOCK, TO_BLOCK)
avgLine(ax, dfs, emptyBlocks, avg, avgTxt)
print ("averaged over whole experiment: %s TPS" %avgTxt)
ax.set_title("avg TPS %s = #TX whole experiment / blocktimes diff" % avgTxt)
return avg
def blocktimes_plotter(ax, dfs):
"plot the blocktimes"
ax.set_title("blocktime seconds since last block")
ax.scatter(x=dfs['blocknumber'], y=dfs['blocktime'], c="b", marker="x")
axes_simplifier(ax)
def blocksizes_plotter(ax, dfs, emptyBlocks):
"""
blocksizes
plus average line
"""
ax.scatter(dfs['blocknumber'], dfs['size'], c="g", marker="o")
ax.plot( dfs['blocknumber'], dfs['size'], "g-")
avg, avgTxt = averager(dfs, 'size', emptyBlocks, "%d")
avgLine(ax, dfs, emptyBlocks, avg, avgTxt)
print ('averaged ( " ) blocksize: %s bytes' % avgTxt)
ax.set_title("blocksizes in bytes")
axes_simplifier(ax)
def gas_plotter(ax, dfs):
"""
plot gasUsed and gasLimit per second
"""
ax.set_title("gasUsed and gasLimit per second")
ax.plot( dfs['blocknumber'], dfs['GLPS_1blk']) # , "g-")
ax.plot( dfs['blocknumber'], dfs['GUPS_1blk']) #
ax.set_yscale('log')
axes_simplifier(ax, logYscale=True)
ax.legend (["gasLimit/sec", "gasUsed/sec"] )
def diagrams(prefix, df, blockFrom, blockTo, emptyBlocks):
"""
new version
more precise & consistent
* slice of whole experiment (from/to), plus some emptyBlocks at the end
* averages are calc'ed over the experiment blocks only!
* average lines & number for tps & block size
* title shows more infos about experiment
* x-axis ticks issues solved
"""
# offset=min(df["blocknumber"])
# just the slice of the experiment + 10 extra blocks:
# dfs = df[FROM_BLOCK-offset:TO_BLOCK-offset+emptyBlocks+1]
dfs, index_from, index_to = experiment_slice(df, blockFrom, blockTo, emptyBlocks)
# https://github.com/matplotlib/matplotlib/issues/5907#issuecomment-179001811
import matplotlib
matplotlib.rcParams['agg.path.chunksize'] = 10000
fig, axes = plt.subplots(2, 2, figsize=(16,9)) #, sharex=True)
fig.subplots_adjust(hspace=0.25, wspace=0.20)
tpsAv = tps_plotter(axes[0,0], dfs, blockFrom, blockTo, emptyBlocks)
blocktimes_plotter(axes[0,1], dfs)
blocksizes_plotter(axes[1,0], dfs, emptyBlocks)
gas_plotter(axes[1,1], dfs)
txs=sum(dfs['txcount'][0:-emptyBlocks+1])
title = prefix + " blocks %d-%d with %d txs ~ %d txs/block"
title = title % (blockFrom, blockTo, txs, round(txs/(blockTo-blockFrom+1)))
fig.suptitle(title, fontsize=16)
return fig, axes, dfs, txs, tpsAv
def read_experiment_infofile(fn):
"""
now the experiments are all writing out basic information.
read this in here, to know the range of blocks.
"""
with open(fn, "r") as f:
info = json.load(f)
return info
def timestamp_humanreadable(epoch):
return time.strftime("%Y%m%d-%H%M", time.localtime(epoch))
def savePlot(fig, prefix, blockFrom, blockTo, imgpath, INFOFILE=None):
if INFOFILE:
info = read_experiment_infofile(INFOFILE)
ts = timestamp_humanreadable(info['tps']['start_epochtime'])
prefix = prefix + "-" +ts
filename = "%s_blks%d-%d.png" % (prefix,blockFrom,blockTo)
filepath = os.path.join(imgpath, filename)
fig.savefig(filepath)
return filepath
def add_to_infofile(INFOFILE, img_fn, tpsAv, prefix):
info = read_experiment_infofile(fn=INFOFILE)
info['diagrams']={}
info['diagrams']['filename'] = img_fn
info['diagrams']['blocktimestampsTpsAv'] = tpsAv
info['diagrams']['prefix'] = prefix
with open(INFOFILE, "w") as f:
json.dump(info, f)
################################################################################
def load_prepare_plot_save(DBFILE, NAME_PREFIX,
FROM_BLOCK, TO_BLOCK, EMPTY_BLOCKS,
INFOFILE, imgpath="img"):
load_dependencies()
conn, blocknumbers = load_db_and_check_complete(DBFILE)
simple_stats(conn)
df = read_whole_table_into_dataframe(conn)
conn.close()
assert check_timestamp_format(df)
add_columns(df)
show_peak_TPS(df)
if FROM_BLOCK==-1: FROM_BLOCK = min(blocknumbers)[0]
if TO_BLOCK==-1: TO_BLOCK = max(blocknumbers)[0]
# print (FROM_BLOCK, TO_BLOCK); exit()
print()
# fn = diagrams_oldversion(df, FROM_BLOCK, TO_BLOCK, NAME_PREFIX, gas_logy=True, bt_logy=True, imgpath=imgpath)
fig, axes, dfs, txs, tpsAv = diagrams(NAME_PREFIX, df, FROM_BLOCK, TO_BLOCK,
emptyBlocks=EMPTY_BLOCKS)
fn = savePlot(fig, NAME_PREFIX, FROM_BLOCK, TO_BLOCK, imgpath, INFOFILE)
print ("\ndiagrams saved to: ", fn)
if INFOFILE:
add_to_infofile(INFOFILE, fn, tpsAv, NAME_PREFIX)
return fn
###############################################################################
def sanify(mystring):
"""
from given string, make something that can be used as filename
"""
keepcharacters = ('-','.','_')
sane = "".join(c for c in mystring if c.isalnum() or c in keepcharacters)
sane = sane.rstrip()
return sane
def CLI_params():
if len(sys.argv) not in (3, 4, 5):
print ("Please give\n"
"THREE arguments DBFILE PREFIX INFOFILE\n\n"
"Or give FOUR arguments, \n"
"the filename DBFILE ___.db, \n"
"a PREFIX for characterising the diagram output files; \n"
"and FROM_BLOCK and TO_BLOCK for where to zoom,\n"
"or\n"
"give only the first TWO arguments, for the whole chain\n\n"
"examples:\n"
"%s temp.db TEMP ../hammer/last-experiment.json\n"
"%s temp.db TEMP 115 230\n"
"%s temp.db TEMP\n" % (sys.argv[0], sys.argv[0], sys.argv[0]))
exit(1)
DBFILE=sys.argv[1]
NAME_PREFIX = sanify( sys.argv[2] )
print ("using DBFILE=%s NAME_PREFIX=%s" % (DBFILE, NAME_PREFIX))
if len(sys.argv)==3:
FROM_BLOCK=-1
TO_BLOCK=-1
print ("for the whole chain, first to last block")
INFOFILE=None
EMPTY_BLOCKS = EMPTY_BLOCKS_AT_END
if len(sys.argv) == 4:
INFOFILE=sys.argv[3]
print ("reading blocks range from", INFOFILE)
info = read_experiment_infofile(fn=INFOFILE)
# pprint(info); exit()
FROM_BLOCK = info['send']['block_first']
TO_BLOCK = info['send']['block_last']
EMPTY_BLOCKS = info['send']['empty_blocks']
txt = "from block %d to block %d, with %d empty blocks afterwards"
print (txt % (FROM_BLOCK, TO_BLOCK, EMPTY_BLOCKS) )
if len(sys.argv)==5:
FROM_BLOCK=int(sys.argv[3])
TO_BLOCK =int(sys.argv[4])
print ("from block %d to block %d" % (FROM_BLOCK, TO_BLOCK) )
print ()
return DBFILE, NAME_PREFIX, FROM_BLOCK, TO_BLOCK, EMPTY_BLOCKS, INFOFILE
if __name__ == '__main__':
# ./blocksDB_diagramming.py temp1.db TEMP 54 124
params = CLI_params();
# params = ("temp1.db", "TEMP", 54, 124)
# params = ("temp1.db", "TEMP", 0, 233)
# params = ("temp2.db", "TEMP", 0, 5000)
load_prepare_plot_save(*params)
print ("Done.")
| 33.312413
| 164
| 0.614402
|
b.pyplot as plt
if __name__ == '__main__' and __package__ is None:
from os import sys, path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
from hammer.config import RPCaddress, EMPTY_BLOCKS_AT_END
def DB_tableSize(tablename, conn):
count = DB_query("SELECT COUNT(*) FROM %s" % tablename, conn)
print ("TABLE %s has %d rows" % (tablename, count[0][0]))
return count[0][0]
def maxBlockNumber(conn):
result = DB_query("SELECT MIN(blocknumber), MAX(blocknumber) FROM blocks", conn)
print ("MIN(blocknumber), MAX(blocknumber) = %s " % (result) )
return result
def check_whether_complete(blocknumbers):
start = min(blocknumbers)[0]
last = max(blocknumbers)[0]
old = start-1
total=0
for bn in blocknumbers:
bn = bn[0]
missing=bn-old-1
if missing>0:
print ("from ", old+1, "to", bn - 1, "there are ", missing, " missing")
total+=missing
old = bn
print()
complete = (not total)
print ("complete" if complete else "some %d blocks missing" % total, end=" ")
print ("between blocks %d and %d." %(min(blocknumbers)[0], max(blocknumbers)[0]))
return complete
me = 'TPS_%dblks'%numBlocks if numBlocks>1 else 'TPS_%dblk'%numBlocks
df[name]=df['txcount'].rolling(numBlocks).sum() / df['blocktime'].rolling(numBlocks).sum()
def add_GUPS(df, numBlocks):
name = 'GUPS_%dblks'%numBlocks if numBlocks>1 else 'GUPS_%dblk'%numBlocks
df[name]=df['gasUsed'].rolling(numBlocks).sum() / df['blocktime'].rolling(numBlocks).sum()
def add_GLPS(df, numBlocks):
name = 'GLPS_%dblks'%numBlocks if numBlocks>1 else 'GLPS_%dblk'%numBlocks
df[name]=df['gasLimit'].rolling(numBlocks).sum() / df['blocktime'].rolling(numBlocks).sum()
| true
| true
|
1c41e0d798c916b889d6eadadf47ec3f3f6577e2
| 4,372
|
py
|
Python
|
util/text_to_json.py
|
naamancampbell/rasc
|
81856c8d3ae06030655097fa3ccdf7fe739f39cf
|
[
"MIT"
] | null | null | null |
util/text_to_json.py
|
naamancampbell/rasc
|
81856c8d3ae06030655097fa3ccdf7fe739f39cf
|
[
"MIT"
] | null | null | null |
util/text_to_json.py
|
naamancampbell/rasc
|
81856c8d3ae06030655097fa3ccdf7fe739f39cf
|
[
"MIT"
] | null | null | null |
import json
import os
import re
import sys
util_dir = os.path.dirname(os.path.abspath(__file__))
root_dir = os.path.dirname(util_dir)
rasc_path = os.path.join(root_dir, 'data', 'rasc.txt')
rasc_file = open(rasc_path, 'r', encoding='utf-8')
rasc_dict = { 'seasons': [] }
season_index = -1
episode_index = -1
track_index = -1
previous_blank = False
catalogues = False
for line in rasc_file:
line = line.strip()
if line == '':
previous_blank = True
catalogues = False
continue
if line == 'Catalogues:':
catalogues = True
rasc_dict['catalogue_sources'] = []
continue
thumbnails_match = re.search(r'Thumbnails: (.*)', line)
if thumbnails_match:
rasc_dict['thumbnail_source'] = thumbnails_match.group(1)
continue
if catalogues:
catalogue_match = re.search(r'(.*) \| (.*)', line)
catalogue = {
'title': catalogue_match.group(1),
'url': catalogue_match.group(2)
}
rasc_dict['catalogue_sources'].append(catalogue)
continue
season_match = re.search(r'Season (\d+)', line)
if season_match:
season_index += 1
episode_index = -1
season = { 'season': int(season_match.group(1)) }
season['episodes'] = []
rasc_dict['seasons'].append(season)
continue
if previous_blank:
episode_index += 1
track_index = -1
episode = { 'episode': episode_index + 1 }
episode['episode_title'] = line
episode['tracks'] = []
rasc_dict['seasons'][season_index]['episodes'].append(episode)
previous_blank = False
continue
no_music_match = re.search(r'No.*music.', line)
if no_music_match:
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'].append({'no_music': line})
continue
track_match = re.search(r'^(\w.*) by (.*)\. (\([\d\w\W]*\))', line)
if track_match:
# tracks with movements
track_index += 1
track_dict = {
'track': track_index + 1,
'title': track_match.group(1),
'movement': track_match.group(3),
'artist': track_match.group(2)
}
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'].append(track_dict)
continue
track_match = re.search(r'^(\w.*) by (.*)\.', line)
if track_match:
track_index += 1
title = track_match.group(1)
artist = track_match.group(2)
notes_match = re.search(r'^(.*) (\(.*\))', title)
if notes_match:
track_dict = {
'track': track_index + 1,
'title': notes_match.group(1),
'notes': notes_match.group(2),
'artist': artist
}
else:
track_dict = {
'track': track_index + 1,
'title': title,
'artist': artist
}
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'].append(track_dict)
continue
scene_match = re.search(
r'^(?:\[(.*)\] )?\((\d+:\d+)\) \[(\d+:\d+)\] (?:\((t\d+c\d+)\) )?(.*)',
line)
if scene_match:
if scene_match.group(1):
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'][track_index]['segment'] = scene_match.group(1)
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'][track_index]['dvd_time'] = scene_match.group(2)
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'][track_index]['vlc_time'] = scene_match.group(3)
if scene_match.group(4):
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'][track_index]['vlc_location'] = scene_match.group(4)
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'][track_index]['scene'] = scene_match.group(5)
continue
# line does not match patterns
print(
'Line did not match expected format: {}'.format(line), file=sys.stderr)
sys.exit(1)
rasc_json = os.path.join(root_dir, 'data', 'rasc.json')
with open(rasc_json, 'w') as json_file:
json.dump(rasc_dict, json_file, indent=2)
rasc_file.close()
| 35.544715
| 79
| 0.569533
|
import json
import os
import re
import sys
util_dir = os.path.dirname(os.path.abspath(__file__))
root_dir = os.path.dirname(util_dir)
rasc_path = os.path.join(root_dir, 'data', 'rasc.txt')
rasc_file = open(rasc_path, 'r', encoding='utf-8')
rasc_dict = { 'seasons': [] }
season_index = -1
episode_index = -1
track_index = -1
previous_blank = False
catalogues = False
for line in rasc_file:
line = line.strip()
if line == '':
previous_blank = True
catalogues = False
continue
if line == 'Catalogues:':
catalogues = True
rasc_dict['catalogue_sources'] = []
continue
thumbnails_match = re.search(r'Thumbnails: (.*)', line)
if thumbnails_match:
rasc_dict['thumbnail_source'] = thumbnails_match.group(1)
continue
if catalogues:
catalogue_match = re.search(r'(.*) \| (.*)', line)
catalogue = {
'title': catalogue_match.group(1),
'url': catalogue_match.group(2)
}
rasc_dict['catalogue_sources'].append(catalogue)
continue
season_match = re.search(r'Season (\d+)', line)
if season_match:
season_index += 1
episode_index = -1
season = { 'season': int(season_match.group(1)) }
season['episodes'] = []
rasc_dict['seasons'].append(season)
continue
if previous_blank:
episode_index += 1
track_index = -1
episode = { 'episode': episode_index + 1 }
episode['episode_title'] = line
episode['tracks'] = []
rasc_dict['seasons'][season_index]['episodes'].append(episode)
previous_blank = False
continue
no_music_match = re.search(r'No.*music.', line)
if no_music_match:
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'].append({'no_music': line})
continue
track_match = re.search(r'^(\w.*) by (.*)\. (\([\d\w\W]*\))', line)
if track_match:
track_index += 1
track_dict = {
'track': track_index + 1,
'title': track_match.group(1),
'movement': track_match.group(3),
'artist': track_match.group(2)
}
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'].append(track_dict)
continue
track_match = re.search(r'^(\w.*) by (.*)\.', line)
if track_match:
track_index += 1
title = track_match.group(1)
artist = track_match.group(2)
notes_match = re.search(r'^(.*) (\(.*\))', title)
if notes_match:
track_dict = {
'track': track_index + 1,
'title': notes_match.group(1),
'notes': notes_match.group(2),
'artist': artist
}
else:
track_dict = {
'track': track_index + 1,
'title': title,
'artist': artist
}
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'].append(track_dict)
continue
scene_match = re.search(
r'^(?:\[(.*)\] )?\((\d+:\d+)\) \[(\d+:\d+)\] (?:\((t\d+c\d+)\) )?(.*)',
line)
if scene_match:
if scene_match.group(1):
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'][track_index]['segment'] = scene_match.group(1)
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'][track_index]['dvd_time'] = scene_match.group(2)
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'][track_index]['vlc_time'] = scene_match.group(3)
if scene_match.group(4):
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'][track_index]['vlc_location'] = scene_match.group(4)
rasc_dict['seasons'][season_index]['episodes'][episode_index]\
['tracks'][track_index]['scene'] = scene_match.group(5)
continue
print(
'Line did not match expected format: {}'.format(line), file=sys.stderr)
sys.exit(1)
rasc_json = os.path.join(root_dir, 'data', 'rasc.json')
with open(rasc_json, 'w') as json_file:
json.dump(rasc_dict, json_file, indent=2)
rasc_file.close()
| true
| true
|
1c41e1238670b013dfa4b916d6f4f4cf6f9751df
| 707
|
py
|
Python
|
sandbox/src1/TCSE3-3rd-examples/src/setup.py
|
sniemi/SamPy
|
e048756feca67197cf5f995afd7d75d8286e017b
|
[
"BSD-2-Clause"
] | 5
|
2016-05-28T14:12:28.000Z
|
2021-04-22T10:23:12.000Z
|
sandbox/src1/TCSE3-3rd-examples/src/setup.py
|
sniemi/SamPy
|
e048756feca67197cf5f995afd7d75d8286e017b
|
[
"BSD-2-Clause"
] | null | null | null |
sandbox/src1/TCSE3-3rd-examples/src/setup.py
|
sniemi/SamPy
|
e048756feca67197cf5f995afd7d75d8286e017b
|
[
"BSD-2-Clause"
] | 2
|
2015-07-13T10:04:10.000Z
|
2021-04-22T10:23:23.000Z
|
#!/usr/bin/env python
from distutils.core import setup, Extension
import os, glob
os.chdir('tools')
# all scripts in src/tools are executables (subst.py is also a module)
scripts = os.listdir(os.curdir)
for del_dir in 'scitools', 'CVS', '.svn', 'build':
try:
del scripts[scripts.index(del_dir)]
except:
pass
packages = ['scitools']
modules = ['subst']
setup(
name='scitools',
version='2.0', # for 2nd edition of the book below
description='Software for the book "Python Scripting for Computational Science" by H. P. Langtangen',
author="H. P. Langtangen",
author_email='hpl@simula.no',
packages=packages,
scripts=scripts,
py_modules=modules,
)
| 27.192308
| 105
| 0.674682
|
from distutils.core import setup, Extension
import os, glob
os.chdir('tools')
scripts = os.listdir(os.curdir)
for del_dir in 'scitools', 'CVS', '.svn', 'build':
try:
del scripts[scripts.index(del_dir)]
except:
pass
packages = ['scitools']
modules = ['subst']
setup(
name='scitools',
version='2.0',
description='Software for the book "Python Scripting for Computational Science" by H. P. Langtangen',
author="H. P. Langtangen",
author_email='hpl@simula.no',
packages=packages,
scripts=scripts,
py_modules=modules,
)
| true
| true
|
1c41e2855ec54676929a52df85329c818a7ea578
| 4,105
|
py
|
Python
|
setup.py
|
MacHu-GWU/flask-restless-api-client-project
|
c93ac4a00c7562e6eb50d5dc9a92beeda7798c68
|
[
"MIT"
] | null | null | null |
setup.py
|
MacHu-GWU/flask-restless-api-client-project
|
c93ac4a00c7562e6eb50d5dc9a92beeda7798c68
|
[
"MIT"
] | null | null | null |
setup.py
|
MacHu-GWU/flask-restless-api-client-project
|
c93ac4a00c7562e6eb50d5dc9a92beeda7798c68
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Frequent used classifiers List = [
"Development Status :: 1 - Planning",
"Development Status :: 2 - Pre-Alpha",
"Development Status :: 3 - Alpha",
"Development Status :: 4 - Beta",
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Development Status :: 7 - Inactive",
"Intended Audience :: Customer Service",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Financial and Insurance Industry",
"Intended Audience :: Healthcare Industry",
"Intended Audience :: Information Technology",
"Intended Audience :: Legal Industry",
"Intended Audience :: Manufacturing",
"Intended Audience :: Other Audience",
"Intended Audience :: Religion",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"Intended Audience :: Telecommunications Industry",
"License :: OSI Approved :: BSD License",
"License :: OSI Approved :: MIT License",
"License :: OSI Approved :: Apache Software License",
"License :: OSI Approved :: GNU General Public License (GPL)",
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
"Natural Language :: English",
"Natural Language :: Chinese (Simplified)",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2 :: Only",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3 :: Only",
]
"""
from setuptools import setup, find_packages
from datetime import datetime
import os
GITHUB_ACCOUNT = "MacHu-GWU" # your GitHub account name
RELEASE_TAG = "2016-01-20" # the GitHub release tag
NAME = "flaskrestlessapiclient" # name your package
VERSION = __import__(NAME).__version__
PACKAGES = [NAME] + ["%s.%s" % (NAME, i) for i in find_packages(NAME)]
PACKAGE_DATA = {
}
SHORT_DESCRIPTION = __import__(NAME).__short_description__ # GitHub Short Description
AUTHOR = "Sanhe Hu"
AUTHOR_EMAIL = "husanhe@gmail.com"
MAINTAINER = AUTHOR
MAINTAINER_EMAIL = AUTHOR_EMAIL
PROJECT_NAME = os.path.basename(os.getcwd()) # the project dir is the project name
URL = "https://github.com/{0}/{1}".format(GITHUB_ACCOUNT, PROJECT_NAME)
DOWNLOAD_URL = "https://github.com/{0}/{1}/tarball/{2}".format(
GITHUB_ACCOUNT, PROJECT_NAME, RELEASE_TAG)
with open("readme.rst", "rb") as f:
LONG_DESCRIPTION = f.read().decode("utf-8")
LICENSE = "MIT"
PLATFORMS = ["Windows", "MacOS", "Unix"]
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
]
with open("requirements.txt", "rb") as f:
REQUIRES = [i.strip() for i in f.read().decode("utf-8").split("\n")]
setup(
name = NAME,
packages = PACKAGES,
include_package_data = True,
package_data = PACKAGE_DATA,
version = VERSION,
author = AUTHOR,
author_email = AUTHOR_EMAIL,
maintainer = MAINTAINER,
maintainer_email = MAINTAINER_EMAIL,
url = URL,
description = SHORT_DESCRIPTION,
long_description = LONG_DESCRIPTION,
download_url = DOWNLOAD_URL,
classifiers = CLASSIFIERS,
platforms = PLATFORMS,
license = LICENSE,
install_requires = REQUIRES,
)
| 34.208333
| 85
| 0.665773
|
from setuptools import setup, find_packages
from datetime import datetime
import os
GITHUB_ACCOUNT = "MacHu-GWU"
RELEASE_TAG = "2016-01-20"
NAME = "flaskrestlessapiclient"
VERSION = __import__(NAME).__version__
PACKAGES = [NAME] + ["%s.%s" % (NAME, i) for i in find_packages(NAME)]
PACKAGE_DATA = {
}
SHORT_DESCRIPTION = __import__(NAME).__short_description__
AUTHOR = "Sanhe Hu"
AUTHOR_EMAIL = "husanhe@gmail.com"
MAINTAINER = AUTHOR
MAINTAINER_EMAIL = AUTHOR_EMAIL
PROJECT_NAME = os.path.basename(os.getcwd())
URL = "https://github.com/{0}/{1}".format(GITHUB_ACCOUNT, PROJECT_NAME)
DOWNLOAD_URL = "https://github.com/{0}/{1}/tarball/{2}".format(
GITHUB_ACCOUNT, PROJECT_NAME, RELEASE_TAG)
with open("readme.rst", "rb") as f:
LONG_DESCRIPTION = f.read().decode("utf-8")
LICENSE = "MIT"
PLATFORMS = ["Windows", "MacOS", "Unix"]
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
]
with open("requirements.txt", "rb") as f:
REQUIRES = [i.strip() for i in f.read().decode("utf-8").split("\n")]
setup(
name = NAME,
packages = PACKAGES,
include_package_data = True,
package_data = PACKAGE_DATA,
version = VERSION,
author = AUTHOR,
author_email = AUTHOR_EMAIL,
maintainer = MAINTAINER,
maintainer_email = MAINTAINER_EMAIL,
url = URL,
description = SHORT_DESCRIPTION,
long_description = LONG_DESCRIPTION,
download_url = DOWNLOAD_URL,
classifiers = CLASSIFIERS,
platforms = PLATFORMS,
license = LICENSE,
install_requires = REQUIRES,
)
| true
| true
|
1c41e2f216d760da5ee9d73785b2a83179b2fa3d
| 1,005
|
py
|
Python
|
stubs/micropython-v1_11-esp8266/uctypes.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/micropython-v1_11-esp8266/uctypes.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/micropython-v1_11-esp8266/uctypes.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
"""
Module: 'uctypes' on esp8266 v1.11
"""
# MCU: (sysname='esp8266', nodename='esp8266', release='2.2.0-dev(9422289)', version='v1.11-8-g48dcbbe60 on 2019-05-29', machine='ESP module with ESP8266')
# Stubber: 1.1.0 - updated
from typing import Any
ARRAY = -1073741824
BFINT16 = -671088640
BFINT32 = -402653184
BFINT8 = -939524096
BFUINT16 = -805306368
BFUINT32 = -536870912
BFUINT8 = -1073741824
BF_LEN = 22
BF_POS = 17
BIG_ENDIAN = 1
FLOAT32 = -268435456
FLOAT64 = -134217728
INT = 671088640
INT16 = 402653184
INT32 = 671088640
INT64 = 939524096
INT8 = 134217728
LITTLE_ENDIAN = 0
LONG = 671088640
LONGLONG = 939524096
NATIVE = 2
PTR = 536870912
SHORT = 402653184
UINT = 536870912
UINT16 = 268435456
UINT32 = 536870912
UINT64 = 805306368
UINT8 = 0
ULONG = 536870912
ULONGLONG = 805306368
USHORT = 268435456
VOID = 0
def addressof(*args) -> Any:
pass
def bytearray_at(*args) -> Any:
pass
def bytes_at(*args) -> Any:
pass
def sizeof(*args) -> Any:
pass
class struct:
""""""
| 16.75
| 155
| 0.703483
|
from typing import Any
ARRAY = -1073741824
BFINT16 = -671088640
BFINT32 = -402653184
BFINT8 = -939524096
BFUINT16 = -805306368
BFUINT32 = -536870912
BFUINT8 = -1073741824
BF_LEN = 22
BF_POS = 17
BIG_ENDIAN = 1
FLOAT32 = -268435456
FLOAT64 = -134217728
INT = 671088640
INT16 = 402653184
INT32 = 671088640
INT64 = 939524096
INT8 = 134217728
LITTLE_ENDIAN = 0
LONG = 671088640
LONGLONG = 939524096
NATIVE = 2
PTR = 536870912
SHORT = 402653184
UINT = 536870912
UINT16 = 268435456
UINT32 = 536870912
UINT64 = 805306368
UINT8 = 0
ULONG = 536870912
ULONGLONG = 805306368
USHORT = 268435456
VOID = 0
def addressof(*args) -> Any:
pass
def bytearray_at(*args) -> Any:
pass
def bytes_at(*args) -> Any:
pass
def sizeof(*args) -> Any:
pass
class struct:
| true
| true
|
1c41e4b19073a92f2f99f0428fb1948fedeba3ee
| 2,144
|
py
|
Python
|
1_Lecun_Network/LeNet_keras.py
|
Cynthia-QX-Li/Convolutional-neural-network
|
76335a96ce3a1779f512e4f7f3ed8b394f83b29a
|
[
"MIT"
] | 5
|
2017-09-26T02:14:43.000Z
|
2021-09-09T15:50:31.000Z
|
1_Lecun_Network/LeNet_keras.py
|
Ken-Leo/BIGBALLONcifar-10-cnn
|
76335a96ce3a1779f512e4f7f3ed8b394f83b29a
|
[
"MIT"
] | null | null | null |
1_Lecun_Network/LeNet_keras.py
|
Ken-Leo/BIGBALLONcifar-10-cnn
|
76335a96ce3a1779f512e4f7f3ed8b394f83b29a
|
[
"MIT"
] | null | null | null |
import keras
from keras import optimizers
from keras.datasets import cifar10
from keras.models import Sequential
from keras.layers import Conv2D, Dense, Flatten, MaxPooling2D
from keras.callbacks import LearningRateScheduler, TensorBoard
batch_size = 128
epochs = 180
iterations = 391
num_classes = 10
log_filepath = './lenet'
def build_model():
model = Sequential()
model.add(Conv2D(6, (5, 5), padding='valid', activation = 'relu', kernel_initializer='he_normal', input_shape=(32,32,3)))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(Conv2D(16, (5, 5), padding='valid', activation = 'relu', kernel_initializer='he_normal'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(Flatten())
model.add(Dense(120, activation = 'relu', kernel_initializer='he_normal'))
model.add(Dense(84, activation = 'relu', kernel_initializer='he_normal'))
model.add(Dense(10, activation = 'softmax', kernel_initializer='he_normal'))
sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
return model
def scheduler(epoch):
learning_rate_init = 0.02
if epoch >= 80:
learning_rate_init = 0.01
if epoch >= 150:
learning_rate_init = 0.004
return learning_rate_init
if __name__ == '__main__':
# load data
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
# build network
model = build_model()
print(model.summary())
# set callback
tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=0)
change_lr = LearningRateScheduler(scheduler)
cbks = [change_lr,tb_cb]
# start traing
model.fit(x_train, y_train,batch_size=batch_size,epochs=epochs,callbacks=cbks,
validation_data=(x_test, y_test), shuffle=True)
# save model
model.save('lenet.h5')
| 34.031746
| 125
| 0.693097
|
import keras
from keras import optimizers
from keras.datasets import cifar10
from keras.models import Sequential
from keras.layers import Conv2D, Dense, Flatten, MaxPooling2D
from keras.callbacks import LearningRateScheduler, TensorBoard
batch_size = 128
epochs = 180
iterations = 391
num_classes = 10
log_filepath = './lenet'
def build_model():
model = Sequential()
model.add(Conv2D(6, (5, 5), padding='valid', activation = 'relu', kernel_initializer='he_normal', input_shape=(32,32,3)))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(Conv2D(16, (5, 5), padding='valid', activation = 'relu', kernel_initializer='he_normal'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(Flatten())
model.add(Dense(120, activation = 'relu', kernel_initializer='he_normal'))
model.add(Dense(84, activation = 'relu', kernel_initializer='he_normal'))
model.add(Dense(10, activation = 'softmax', kernel_initializer='he_normal'))
sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
return model
def scheduler(epoch):
learning_rate_init = 0.02
if epoch >= 80:
learning_rate_init = 0.01
if epoch >= 150:
learning_rate_init = 0.004
return learning_rate_init
if __name__ == '__main__':
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
model = build_model()
print(model.summary())
tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=0)
change_lr = LearningRateScheduler(scheduler)
cbks = [change_lr,tb_cb]
model.fit(x_train, y_train,batch_size=batch_size,epochs=epochs,callbacks=cbks,
validation_data=(x_test, y_test), shuffle=True)
model.save('lenet.h5')
| true
| true
|
1c41e4e4bef2f5b29177e4a6aa096761b12e2917
| 1,330
|
py
|
Python
|
tests_requre/openshift_integration/base.py
|
csomh/packit-service
|
d12836284f5e34f54dc6de20279175493b5f5d31
|
[
"MIT"
] | null | null | null |
tests_requre/openshift_integration/base.py
|
csomh/packit-service
|
d12836284f5e34f54dc6de20279175493b5f5d31
|
[
"MIT"
] | null | null | null |
tests_requre/openshift_integration/base.py
|
csomh/packit-service
|
d12836284f5e34f54dc6de20279175493b5f5d31
|
[
"MIT"
] | null | null | null |
import os
from shutil import copy
from pathlib import Path
import unittest
from requre.constants import RELATIVE_TEST_DATA_DIRECTORY
from requre.cassette import StorageMode
from packit.config import RunCommandType
from packit_service.worker.jobs import SteveJobs
from glob import glob
PROJECT_DIR = Path(__file__).parent.parent.parent
DATA_DIR = PROJECT_DIR / "tests" / "data"
class PackitServiceTestCase(unittest.TestCase):
def setUp(self):
self._steve = None
@property
def steve(self):
if not self._steve:
self._steve = SteveJobs()
self._steve.service_config.command_handler = RunCommandType.local
self._steve.service_config.command_handler_work_dir = "/tmp/hello-world"
return self._steve
def cassette_teardown(self, cassette):
# copy files to destination, where is mounted persistent volume
cassette.dump()
if cassette.mode == StorageMode.write:
destdir = (
Path("/tmp")
/ Path(RELATIVE_TEST_DATA_DIRECTORY)
/ Path(cassette.storage_file).parent.name
)
os.makedirs(destdir, exist_ok=True)
storage_file = Path(cassette.storage_file)
for filename in glob(f"{storage_file}*"):
copy(filename, destdir)
| 33.25
| 84
| 0.67218
|
import os
from shutil import copy
from pathlib import Path
import unittest
from requre.constants import RELATIVE_TEST_DATA_DIRECTORY
from requre.cassette import StorageMode
from packit.config import RunCommandType
from packit_service.worker.jobs import SteveJobs
from glob import glob
PROJECT_DIR = Path(__file__).parent.parent.parent
DATA_DIR = PROJECT_DIR / "tests" / "data"
class PackitServiceTestCase(unittest.TestCase):
def setUp(self):
self._steve = None
@property
def steve(self):
if not self._steve:
self._steve = SteveJobs()
self._steve.service_config.command_handler = RunCommandType.local
self._steve.service_config.command_handler_work_dir = "/tmp/hello-world"
return self._steve
def cassette_teardown(self, cassette):
cassette.dump()
if cassette.mode == StorageMode.write:
destdir = (
Path("/tmp")
/ Path(RELATIVE_TEST_DATA_DIRECTORY)
/ Path(cassette.storage_file).parent.name
)
os.makedirs(destdir, exist_ok=True)
storage_file = Path(cassette.storage_file)
for filename in glob(f"{storage_file}*"):
copy(filename, destdir)
| true
| true
|
1c41e5e92a822a7c6f95fd7730d5429ca21cac9f
| 11,886
|
py
|
Python
|
packages/api-server/api_server/app.py
|
cnboonhan/rmf-web
|
4389c53ef87e95c9a4bf1a6a72ed11f6f441a33f
|
[
"Apache-2.0"
] | null | null | null |
packages/api-server/api_server/app.py
|
cnboonhan/rmf-web
|
4389c53ef87e95c9a4bf1a6a72ed11f6f441a33f
|
[
"Apache-2.0"
] | null | null | null |
packages/api-server/api_server/app.py
|
cnboonhan/rmf-web
|
4389c53ef87e95c9a4bf1a6a72ed11f6f441a33f
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import logging
import os
import signal
import sys
import threading
from typing import Any, Callable, Coroutine, List, Optional, Union
import rclpy
import rclpy.executors
from fastapi import HTTPException
from fastapi.logger import logger
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from tortoise import Tortoise
from . import routes
from .app_config import AppConfig, load_config
from .authenticator import AuthenticationError, JwtAuthenticator, StubAuthenticator
from .base_app import BaseApp
from .dependencies import rmf_repo as rmf_repo_dep
from .fast_io import FastIO
from .gateway import RmfGateway
from .models import (
DispenserHealth,
DispenserState,
DoorHealth,
DoorState,
FleetState,
IngestorHealth,
IngestorState,
LiftHealth,
LiftState,
RobotHealth,
)
from .models import tortoise_models as ttm
from .repositories import StaticFilesRepository
from .rmf_io import HealthWatchdog, RmfBookKeeper, RmfEvents
from .types import is_coroutine
class App(FastIO, BaseApp):
def __init__(
self,
*,
app_config: AppConfig = None,
rmf_gateway_fc: Callable[
[RmfEvents, StaticFilesRepository], RmfGateway
] = RmfGateway,
):
super().__init__(title="RMF API Server")
self.app_config = app_config or load_config(
os.environ.get(
"RMF_API_SERVER_CONFIG",
f"{os.path.dirname(__file__)}/default_config.py",
)
)
self.loop: asyncio.AbstractEventLoop
self.logger = logger
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
self.logger.addHandler(handler)
self.logger.setLevel(self.app_config.log_level)
if self.app_config.jwt_public_key:
if self.app_config.iss is None:
raise ValueError("iss is required")
self.authenticator = JwtAuthenticator(
self.app_config.jwt_public_key,
self.app_config.aud,
self.app_config.iss,
oidc_url=self.app_config.oidc_url or "",
)
else:
self.authenticator = StubAuthenticator()
self.logger.warning("authentication is disabled")
self.user_dep = self.authenticator.fastapi_dep()
async def on_connect(sid: str, _environ: dict, auth: Optional[dict] = None):
session = await self.sio.get_session(sid)
token = None
if auth:
token = auth["token"]
try:
user = await self.authenticator.verify_token(token)
session["user"] = user
return True
except AuthenticationError as e:
self.logger.info(f"authentication failed: {e}")
return False
self.sio.on("connect", on_connect)
self.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=False,
allow_methods=["*"],
allow_headers=["*"],
)
os.makedirs(self.app_config.static_directory, exist_ok=True)
self.mount(
"/static",
StaticFiles(directory=self.app_config.static_directory),
name="static",
)
# will be called in reverse order on app shutdown
shutdown_cbs: List[Union[Coroutine[Any, Any, Any], Callable[[], None]]] = []
self._rmf_events = RmfEvents()
self.rmf_repo = rmf_repo_dep(self.user_dep)
self.static_files_repo = StaticFilesRepository(
f"{self.app_config.public_url.geturl()}/static",
self.app_config.static_directory,
self.logger.getChild("static_files"),
)
self._rmf_gateway: RmfGateway
self._rmf_bookkeeper = RmfBookKeeper(
self._rmf_events, logger=self.logger.getChild("BookKeeper")
)
self.include_router(routes.main_router(self))
self.include_router(routes.BuildingMapRouter(self), prefix="/building_map")
self.include_router(routes.DoorsRouter(self), prefix="/doors")
self.include_router(routes.LiftsRouter(self), prefix="/lifts")
self.include_router(routes.TasksRouter(self), prefix="/tasks")
self.include_router(routes.DispensersRouter(self), prefix="/dispensers")
self.include_router(routes.IngestorsRouter(self), prefix="/ingestors")
self.include_router(routes.FleetsRouter(self), prefix="/fleets")
self.include_router(routes.admin_router(self), prefix="/admin")
@self.on_event("startup")
async def on_startup():
self.loop = asyncio.get_event_loop()
# shutdown event is not called when the app crashes, this can cause the app to be
# "locked up" as some dependencies like tortoise does not allow python to exit until
# it is closed "gracefully".
def on_signal(sig, frame):
task = self.loop.create_task(on_shutdown())
if not self.loop.is_running():
self.loop.run_until_complete(task)
if sig == signal.SIGINT and callable(prev_sigint):
prev_sigint(sig, frame)
elif sig == signal.SIGTERM and callable(prev_sigterm):
prev_sigterm(sig, frame)
if threading.currentThread() is threading.main_thread():
prev_sigint = signal.signal(signal.SIGINT, on_signal)
prev_sigterm = signal.signal(signal.SIGTERM, on_signal)
await Tortoise.init(
db_url=self.app_config.db_url,
modules={"models": ["api_server.models.tortoise_models"]},
)
await Tortoise.generate_schemas()
shutdown_cbs.append(Tortoise.close_connections())
await ttm.User.update_or_create(
{"is_admin": True}, username=self.app_config.builtin_admin
)
use_sim_time_env = os.environ.get("RMF_SERVER_USE_SIM_TIME", None)
if use_sim_time_env:
use_sim_time = not use_sim_time_env.lower() in ["0", "false"]
else:
use_sim_time = False
if use_sim_time:
rclpy.init(args=["--ros-args", "-p", "use_sim_time:=true"])
else:
rclpy.init()
shutdown_cbs.append(rclpy.shutdown)
self._rmf_gateway = rmf_gateway_fc(self._rmf_events, self.static_files_repo)
self._rmf_gateway.spin_background()
shutdown_cbs.append(self._rmf_gateway.stop_spinning)
# Order is important here
# 1. load states from db, this populate the sio/fast_io rooms with the latest data
await self._load_states()
# 2. start the services after loading states so that the loaded states are not
# used. Failing to do so will cause for example, book keeper to save the loaded states
# back into the db and mess up health watchdog's heartbeat system.
await self._rmf_bookkeeper.start()
shutdown_cbs.append(self._rmf_bookkeeper.stop())
health_watchdog = HealthWatchdog(
self._rmf_events,
logger=self.logger.getChild("HealthWatchdog"),
)
await health_watchdog.start()
self._rmf_gateway.subscribe_all()
shutdown_cbs.append(self._rmf_gateway.unsubscribe_all)
self.logger.info("started app")
@self.on_event("shutdown")
async def on_shutdown():
while shutdown_cbs:
cb = shutdown_cbs.pop()
if is_coroutine(cb):
await cb
elif callable(cb):
cb()
self.logger.info("shutdown app")
async def _load_states(self):
self.logger.info("loading states from database...")
door_states = [DoorState.from_tortoise(x) for x in await ttm.DoorState.all()]
for state in door_states:
self._rmf_events.door_states.on_next(state)
self.logger.info(f"loaded {len(door_states)} door states")
door_health = [
await DoorHealth.from_tortoise(x) for x in await ttm.DoorHealth.all()
]
for health in door_health:
self._rmf_events.door_health.on_next(health)
self.logger.info(f"loaded {len(door_health)} door health")
lift_states = [LiftState.from_tortoise(x) for x in await ttm.LiftState.all()]
for state in lift_states:
self._rmf_events.lift_states.on_next(state)
self.logger.info(f"loaded {len(lift_states)} lift states")
lift_health = [
await LiftHealth.from_tortoise(x) for x in await ttm.LiftHealth.all()
]
for health in lift_health:
self._rmf_events.lift_health.on_next(health)
self.logger.info(f"loaded {len(lift_health)} lift health")
dispenser_states = [
DispenserState.from_tortoise(x) for x in await ttm.DispenserState.all()
]
for state in dispenser_states:
self._rmf_events.dispenser_states.on_next(state)
self.logger.info(f"loaded {len(dispenser_states)} dispenser states")
dispenser_health = [
await DispenserHealth.from_tortoise(x)
for x in await ttm.DispenserHealth.all()
]
for health in dispenser_health:
self._rmf_events.dispenser_health.on_next(health)
self.logger.info(f"loaded {len(dispenser_health)} dispenser health")
ingestor_states = [
IngestorState.from_tortoise(x) for x in await ttm.IngestorState.all()
]
for state in ingestor_states:
self._rmf_events.ingestor_states.on_next(state)
self.logger.info(f"loaded {len(ingestor_states)} ingestor states")
ingestor_health = [
await IngestorHealth.from_tortoise(x)
for x in await ttm.IngestorHealth.all()
]
for health in ingestor_health:
self._rmf_events.ingestor_health.on_next(health)
self.logger.info(f"loaded {len(ingestor_health)} ingestor health")
fleet_states = [FleetState.from_tortoise(x) for x in await ttm.FleetState.all()]
for state in fleet_states:
self._rmf_events.fleet_states.on_next(state)
self.logger.info(f"loaded {len(fleet_states)} fleet states")
robot_health = [
await RobotHealth.from_tortoise(x) for x in await ttm.RobotHealth.all()
]
for health in robot_health:
self._rmf_events.robot_health.on_next(health)
self.logger.info(f"loaded {len(robot_health)} robot health")
self.logger.info("updating tasks from RMF")
try:
# Sometimes the node has not finished discovery so we need to call
# `wait_for_service` here.
# As of rclpy 3.0, `wait_for_service` uses a blocking sleep in a loop so
# using it is not recommended after the app has finish startup.
ready = self._rmf_gateway.get_tasks_srv.wait_for_service(1)
if not ready:
raise HTTPException(503, "ros service not ready")
tasks = await self._rmf_gateway.get_tasks()
for t in tasks:
await t.save()
except HTTPException as e:
self.logger.error(f"failed to update tasks from RMF ({e.detail})")
self.logger.info("successfully loaded all states")
def rmf_events(self) -> RmfEvents:
return self._rmf_events
def rmf_gateway(self) -> RmfGateway:
return self._rmf_gateway
def rmf_bookkeeper(self) -> RmfBookKeeper:
return self._rmf_bookkeeper
| 38.341935
| 98
| 0.627966
|
import asyncio
import logging
import os
import signal
import sys
import threading
from typing import Any, Callable, Coroutine, List, Optional, Union
import rclpy
import rclpy.executors
from fastapi import HTTPException
from fastapi.logger import logger
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from tortoise import Tortoise
from . import routes
from .app_config import AppConfig, load_config
from .authenticator import AuthenticationError, JwtAuthenticator, StubAuthenticator
from .base_app import BaseApp
from .dependencies import rmf_repo as rmf_repo_dep
from .fast_io import FastIO
from .gateway import RmfGateway
from .models import (
DispenserHealth,
DispenserState,
DoorHealth,
DoorState,
FleetState,
IngestorHealth,
IngestorState,
LiftHealth,
LiftState,
RobotHealth,
)
from .models import tortoise_models as ttm
from .repositories import StaticFilesRepository
from .rmf_io import HealthWatchdog, RmfBookKeeper, RmfEvents
from .types import is_coroutine
class App(FastIO, BaseApp):
def __init__(
self,
*,
app_config: AppConfig = None,
rmf_gateway_fc: Callable[
[RmfEvents, StaticFilesRepository], RmfGateway
] = RmfGateway,
):
super().__init__(title="RMF API Server")
self.app_config = app_config or load_config(
os.environ.get(
"RMF_API_SERVER_CONFIG",
f"{os.path.dirname(__file__)}/default_config.py",
)
)
self.loop: asyncio.AbstractEventLoop
self.logger = logger
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
self.logger.addHandler(handler)
self.logger.setLevel(self.app_config.log_level)
if self.app_config.jwt_public_key:
if self.app_config.iss is None:
raise ValueError("iss is required")
self.authenticator = JwtAuthenticator(
self.app_config.jwt_public_key,
self.app_config.aud,
self.app_config.iss,
oidc_url=self.app_config.oidc_url or "",
)
else:
self.authenticator = StubAuthenticator()
self.logger.warning("authentication is disabled")
self.user_dep = self.authenticator.fastapi_dep()
async def on_connect(sid: str, _environ: dict, auth: Optional[dict] = None):
session = await self.sio.get_session(sid)
token = None
if auth:
token = auth["token"]
try:
user = await self.authenticator.verify_token(token)
session["user"] = user
return True
except AuthenticationError as e:
self.logger.info(f"authentication failed: {e}")
return False
self.sio.on("connect", on_connect)
self.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=False,
allow_methods=["*"],
allow_headers=["*"],
)
os.makedirs(self.app_config.static_directory, exist_ok=True)
self.mount(
"/static",
StaticFiles(directory=self.app_config.static_directory),
name="static",
)
shutdown_cbs: List[Union[Coroutine[Any, Any, Any], Callable[[], None]]] = []
self._rmf_events = RmfEvents()
self.rmf_repo = rmf_repo_dep(self.user_dep)
self.static_files_repo = StaticFilesRepository(
f"{self.app_config.public_url.geturl()}/static",
self.app_config.static_directory,
self.logger.getChild("static_files"),
)
self._rmf_gateway: RmfGateway
self._rmf_bookkeeper = RmfBookKeeper(
self._rmf_events, logger=self.logger.getChild("BookKeeper")
)
self.include_router(routes.main_router(self))
self.include_router(routes.BuildingMapRouter(self), prefix="/building_map")
self.include_router(routes.DoorsRouter(self), prefix="/doors")
self.include_router(routes.LiftsRouter(self), prefix="/lifts")
self.include_router(routes.TasksRouter(self), prefix="/tasks")
self.include_router(routes.DispensersRouter(self), prefix="/dispensers")
self.include_router(routes.IngestorsRouter(self), prefix="/ingestors")
self.include_router(routes.FleetsRouter(self), prefix="/fleets")
self.include_router(routes.admin_router(self), prefix="/admin")
@self.on_event("startup")
async def on_startup():
self.loop = asyncio.get_event_loop()
def on_signal(sig, frame):
task = self.loop.create_task(on_shutdown())
if not self.loop.is_running():
self.loop.run_until_complete(task)
if sig == signal.SIGINT and callable(prev_sigint):
prev_sigint(sig, frame)
elif sig == signal.SIGTERM and callable(prev_sigterm):
prev_sigterm(sig, frame)
if threading.currentThread() is threading.main_thread():
prev_sigint = signal.signal(signal.SIGINT, on_signal)
prev_sigterm = signal.signal(signal.SIGTERM, on_signal)
await Tortoise.init(
db_url=self.app_config.db_url,
modules={"models": ["api_server.models.tortoise_models"]},
)
await Tortoise.generate_schemas()
shutdown_cbs.append(Tortoise.close_connections())
await ttm.User.update_or_create(
{"is_admin": True}, username=self.app_config.builtin_admin
)
use_sim_time_env = os.environ.get("RMF_SERVER_USE_SIM_TIME", None)
if use_sim_time_env:
use_sim_time = not use_sim_time_env.lower() in ["0", "false"]
else:
use_sim_time = False
if use_sim_time:
rclpy.init(args=["--ros-args", "-p", "use_sim_time:=true"])
else:
rclpy.init()
shutdown_cbs.append(rclpy.shutdown)
self._rmf_gateway = rmf_gateway_fc(self._rmf_events, self.static_files_repo)
self._rmf_gateway.spin_background()
shutdown_cbs.append(self._rmf_gateway.stop_spinning)
await self._load_states()
await self._rmf_bookkeeper.start()
shutdown_cbs.append(self._rmf_bookkeeper.stop())
health_watchdog = HealthWatchdog(
self._rmf_events,
logger=self.logger.getChild("HealthWatchdog"),
)
await health_watchdog.start()
self._rmf_gateway.subscribe_all()
shutdown_cbs.append(self._rmf_gateway.unsubscribe_all)
self.logger.info("started app")
@self.on_event("shutdown")
async def on_shutdown():
while shutdown_cbs:
cb = shutdown_cbs.pop()
if is_coroutine(cb):
await cb
elif callable(cb):
cb()
self.logger.info("shutdown app")
async def _load_states(self):
self.logger.info("loading states from database...")
door_states = [DoorState.from_tortoise(x) for x in await ttm.DoorState.all()]
for state in door_states:
self._rmf_events.door_states.on_next(state)
self.logger.info(f"loaded {len(door_states)} door states")
door_health = [
await DoorHealth.from_tortoise(x) for x in await ttm.DoorHealth.all()
]
for health in door_health:
self._rmf_events.door_health.on_next(health)
self.logger.info(f"loaded {len(door_health)} door health")
lift_states = [LiftState.from_tortoise(x) for x in await ttm.LiftState.all()]
for state in lift_states:
self._rmf_events.lift_states.on_next(state)
self.logger.info(f"loaded {len(lift_states)} lift states")
lift_health = [
await LiftHealth.from_tortoise(x) for x in await ttm.LiftHealth.all()
]
for health in lift_health:
self._rmf_events.lift_health.on_next(health)
self.logger.info(f"loaded {len(lift_health)} lift health")
dispenser_states = [
DispenserState.from_tortoise(x) for x in await ttm.DispenserState.all()
]
for state in dispenser_states:
self._rmf_events.dispenser_states.on_next(state)
self.logger.info(f"loaded {len(dispenser_states)} dispenser states")
dispenser_health = [
await DispenserHealth.from_tortoise(x)
for x in await ttm.DispenserHealth.all()
]
for health in dispenser_health:
self._rmf_events.dispenser_health.on_next(health)
self.logger.info(f"loaded {len(dispenser_health)} dispenser health")
ingestor_states = [
IngestorState.from_tortoise(x) for x in await ttm.IngestorState.all()
]
for state in ingestor_states:
self._rmf_events.ingestor_states.on_next(state)
self.logger.info(f"loaded {len(ingestor_states)} ingestor states")
ingestor_health = [
await IngestorHealth.from_tortoise(x)
for x in await ttm.IngestorHealth.all()
]
for health in ingestor_health:
self._rmf_events.ingestor_health.on_next(health)
self.logger.info(f"loaded {len(ingestor_health)} ingestor health")
fleet_states = [FleetState.from_tortoise(x) for x in await ttm.FleetState.all()]
for state in fleet_states:
self._rmf_events.fleet_states.on_next(state)
self.logger.info(f"loaded {len(fleet_states)} fleet states")
robot_health = [
await RobotHealth.from_tortoise(x) for x in await ttm.RobotHealth.all()
]
for health in robot_health:
self._rmf_events.robot_health.on_next(health)
self.logger.info(f"loaded {len(robot_health)} robot health")
self.logger.info("updating tasks from RMF")
try:
# Sometimes the node has not finished discovery so we need to call
# `wait_for_service` here.
# As of rclpy 3.0, `wait_for_service` uses a blocking sleep in a loop so
# using it is not recommended after the app has finish startup.
ready = self._rmf_gateway.get_tasks_srv.wait_for_service(1)
if not ready:
raise HTTPException(503, "ros service not ready")
tasks = await self._rmf_gateway.get_tasks()
for t in tasks:
await t.save()
except HTTPException as e:
self.logger.error(f"failed to update tasks from RMF ({e.detail})")
self.logger.info("successfully loaded all states")
def rmf_events(self) -> RmfEvents:
return self._rmf_events
def rmf_gateway(self) -> RmfGateway:
return self._rmf_gateway
def rmf_bookkeeper(self) -> RmfBookKeeper:
return self._rmf_bookkeeper
| true
| true
|
1c41e622e4f8812e44c048ade2fd2b6dbe66de8d
| 20
|
py
|
Python
|
gui/PyUi/__init__.py
|
YuriShporhun/TheCarMonitor
|
fd9bd436413ef1b70e9ded119bb5acd672f7316f
|
[
"Apache-2.0"
] | 19
|
2021-04-17T03:25:41.000Z
|
2022-02-10T03:53:01.000Z
|
gui/PyUi/__init__.py
|
YuriShporhun/TheCarMonitor
|
fd9bd436413ef1b70e9ded119bb5acd672f7316f
|
[
"Apache-2.0"
] | null | null | null |
gui/PyUi/__init__.py
|
YuriShporhun/TheCarMonitor
|
fd9bd436413ef1b70e9ded119bb5acd672f7316f
|
[
"Apache-2.0"
] | 12
|
2021-04-18T08:25:43.000Z
|
2022-02-17T06:30:13.000Z
|
__author__ = 'yuri'
| 10
| 19
| 0.7
|
__author__ = 'yuri'
| true
| true
|
1c41e633019e962f66cc3d82f3ae6054e6a020f7
| 1,828
|
py
|
Python
|
requests_pytest_plugin.py
|
ColdHeat/Advocate
|
6d699aed899784dfae5fac28e29567936bed81a3
|
[
"Apache-2.0"
] | null | null | null |
requests_pytest_plugin.py
|
ColdHeat/Advocate
|
6d699aed899784dfae5fac28e29567936bed81a3
|
[
"Apache-2.0"
] | null | null | null |
requests_pytest_plugin.py
|
ColdHeat/Advocate
|
6d699aed899784dfae5fac28e29567936bed81a3
|
[
"Apache-2.0"
] | null | null | null |
import socket
import doctest
import pytest
import requests
import advocate
import advocate.api
from advocate.exceptions import MountDisabledException, ProxyDisabledException
from advocate.packages import ipaddress
from test.monkeypatching import CheckedSocket
SKIP_EXCEPTIONS = (MountDisabledException, ProxyDisabledException)
def pytest_runtestloop():
validator = advocate.AddrValidator(
ip_whitelist={
# requests needs to be able to hit these for its tests!
ipaddress.ip_network("127.0.0.1"),
ipaddress.ip_network("127.0.1.1"),
ipaddress.ip_network("10.255.255.1"),
},
# the `httpbin` fixture uses a random port, we need to allow all ports
port_whitelist=set(range(0, 65535)),
)
# this will yell at us if we failed to patch something
socket.socket = CheckedSocket
# requests' tests rely on being able to pickle a `Session`
advocate.api.RequestsAPIWrapper.SUPPORT_WRAPPER_PICKLING = True
wrapper = advocate.api.RequestsAPIWrapper(validator)
for attr in advocate.api.__all__:
setattr(requests, attr, getattr(wrapper, attr))
def pytest_runtest_makereport(item, call):
# This is necessary because we pull in requests' test suite,
# which sometimes tests `session.mount()`. We disable that
# method, so we need to ignore tests that use it.
from _pytest.runner import pytest_runtest_makereport as mr
report = mr(item, call)
if call.excinfo is not None:
exc = call.excinfo.value
if isinstance(exc, doctest.UnexpectedException):
exc = call.excinfo.value.exc_info[1]
if isinstance(exc, SKIP_EXCEPTIONS):
report.outcome = 'skipped'
report.wasxfail = "reason: Advocate is not meant to support this"
return report
| 30.983051
| 78
| 0.702407
|
import socket
import doctest
import pytest
import requests
import advocate
import advocate.api
from advocate.exceptions import MountDisabledException, ProxyDisabledException
from advocate.packages import ipaddress
from test.monkeypatching import CheckedSocket
SKIP_EXCEPTIONS = (MountDisabledException, ProxyDisabledException)
def pytest_runtestloop():
validator = advocate.AddrValidator(
ip_whitelist={
ipaddress.ip_network("127.0.0.1"),
ipaddress.ip_network("127.0.1.1"),
ipaddress.ip_network("10.255.255.1"),
},
port_whitelist=set(range(0, 65535)),
)
socket.socket = CheckedSocket
advocate.api.RequestsAPIWrapper.SUPPORT_WRAPPER_PICKLING = True
wrapper = advocate.api.RequestsAPIWrapper(validator)
for attr in advocate.api.__all__:
setattr(requests, attr, getattr(wrapper, attr))
def pytest_runtest_makereport(item, call):
# This is necessary because we pull in requests' test suite,
from _pytest.runner import pytest_runtest_makereport as mr
report = mr(item, call)
if call.excinfo is not None:
exc = call.excinfo.value
if isinstance(exc, doctest.UnexpectedException):
exc = call.excinfo.value.exc_info[1]
if isinstance(exc, SKIP_EXCEPTIONS):
report.outcome = 'skipped'
report.wasxfail = "reason: Advocate is not meant to support this"
return report
| true
| true
|
1c41e705037d64a717a007fbace06ce645b19c6b
| 2,805
|
py
|
Python
|
analyze.py
|
kongyanye/paper_search
|
a5e8ab6210dd73988a5b0912bcbfc814b8a09f5e
|
[
"MIT"
] | null | null | null |
analyze.py
|
kongyanye/paper_search
|
a5e8ab6210dd73988a5b0912bcbfc814b8a09f5e
|
[
"MIT"
] | null | null | null |
analyze.py
|
kongyanye/paper_search
|
a5e8ab6210dd73988a5b0912bcbfc814b8a09f5e
|
[
"MIT"
] | null | null | null |
"""
Reads txt files of all papers and computes tfidf vectors for all papers.
Dumps results to file tfidf.p
"""
from random import shuffle, seed
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer
from utils import Config, safe_pickle_dump, load_db
seed(1337)
# max number of tfidf training documents (chosen randomly), for memory efficiency
max_train = 50000
max_features = 50000
# reading papers
db = load_db()
# compute tfidf vectors with scikits
v = TfidfVectorizer(input='content',
encoding='utf-8', decode_error='replace', strip_accents='unicode',
lowercase=True, analyzer='word', stop_words='english',
token_pattern=r'(?u)\b[a-zA-Z_][a-zA-Z0-9_]+\b',
ngram_range=(1, 2), max_features=max_features,
norm='l2', use_idf=True, smooth_idf=True, sublinear_tf=True,
max_df=1.0, min_df=1)
# create an iterator object to conserve memory
def make_corpus(paths):
for p in paths:
yield db[p].get('summary', '')
txt_paths = list(db.keys())
pids = []
for pid, j in db.items():
if '_rawid' in j:
idvv = '%sv%d' % (j['_rawid'], j['_version'])
else:
idvv = pid
pids.append(idvv)
# train
train_txt_paths = list(txt_paths) # duplicate
shuffle(train_txt_paths) # shuffle
train_txt_paths = train_txt_paths[:min(
len(train_txt_paths), max_train)] # crop
print("training on %d documents..." % (len(train_txt_paths), ))
train_corpus = make_corpus(train_txt_paths)
v.fit(train_corpus)
# transform
print("transforming %d documents..." % (len(txt_paths), ))
corpus = make_corpus(txt_paths)
X = v.transform(corpus)
# print(v.vocabulary_)
print(X.shape)
# write full matrix out
out = {}
out['X'] = X # this one is heavy!
print("writing", Config.tfidf_path)
safe_pickle_dump(out, Config.tfidf_path)
# writing lighter metadata information into a separate (smaller) file
out = {}
out['vocab'] = v.vocabulary_
out['idf'] = v._tfidf.idf_
out['pids'] = pids # a full idvv string (id and version number)
out['ptoi'] = {x: i for i, x in enumerate(pids)} # pid to ix in X mapping
print("writing", Config.meta_path)
safe_pickle_dump(out, Config.meta_path)
'''
print("precomputing nearest neighbor queries in batches...")
X = X.todense() # originally it's a sparse matrix
sim_dict = {}
batch_size = 1000
for i in range(0, len(pids), batch_size):
i1 = min(len(pids), i+batch_size)
xquery = X[i:i1] # BxD
ds = -np.asarray(np.dot(X, xquery.T)) # NxD * DxB => NxB
IX = np.argsort(ds, axis=0) # NxB
for j in range(i1-i):
sim_dict[pids[i+j]] = [pids[q] for q in list(IX[:50, j])]
print('%d/%d...' % (i, len(pids)))
print("writing", Config.sim_path)
safe_pickle_dump(sim_dict, Config.sim_path)
'''
| 30.16129
| 86
| 0.665241
|
from random import shuffle, seed
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer
from utils import Config, safe_pickle_dump, load_db
seed(1337)
max_train = 50000
max_features = 50000
db = load_db()
v = TfidfVectorizer(input='content',
encoding='utf-8', decode_error='replace', strip_accents='unicode',
lowercase=True, analyzer='word', stop_words='english',
token_pattern=r'(?u)\b[a-zA-Z_][a-zA-Z0-9_]+\b',
ngram_range=(1, 2), max_features=max_features,
norm='l2', use_idf=True, smooth_idf=True, sublinear_tf=True,
max_df=1.0, min_df=1)
def make_corpus(paths):
for p in paths:
yield db[p].get('summary', '')
txt_paths = list(db.keys())
pids = []
for pid, j in db.items():
if '_rawid' in j:
idvv = '%sv%d' % (j['_rawid'], j['_version'])
else:
idvv = pid
pids.append(idvv)
train_txt_paths = list(txt_paths)
shuffle(train_txt_paths)
train_txt_paths = train_txt_paths[:min(
len(train_txt_paths), max_train)]
print("training on %d documents..." % (len(train_txt_paths), ))
train_corpus = make_corpus(train_txt_paths)
v.fit(train_corpus)
print("transforming %d documents..." % (len(txt_paths), ))
corpus = make_corpus(txt_paths)
X = v.transform(corpus)
print(X.shape)
out = {}
out['X'] = X
print("writing", Config.tfidf_path)
safe_pickle_dump(out, Config.tfidf_path)
out = {}
out['vocab'] = v.vocabulary_
out['idf'] = v._tfidf.idf_
out['pids'] = pids
out['ptoi'] = {x: i for i, x in enumerate(pids)}
print("writing", Config.meta_path)
safe_pickle_dump(out, Config.meta_path)
| true
| true
|
1c41e79428ccc78350c14e36be5eb7e42d55120c
| 1,332
|
py
|
Python
|
onnx/backend/test/case/node/sub.py
|
MISC-FORKS-cqc/onnx
|
c50f329dcde038aa364082e0942764d36fcd1448
|
[
"MIT"
] | null | null | null |
onnx/backend/test/case/node/sub.py
|
MISC-FORKS-cqc/onnx
|
c50f329dcde038aa364082e0942764d36fcd1448
|
[
"MIT"
] | null | null | null |
onnx/backend/test/case/node/sub.py
|
MISC-FORKS-cqc/onnx
|
c50f329dcde038aa364082e0942764d36fcd1448
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np # type: ignore
import onnx
from ..base import Base
from . import expect
class Sub(Base):
@staticmethod
def export(): # type: () -> None
node = onnx.helper.make_node(
'Sub',
inputs=['x', 'y'],
outputs=['z'],
)
x = np.array([1, 2, 3]).astype(np.float32)
y = np.array([3, 2, 1]).astype(np.float32)
z = x - y # expected output [-2., 0., 2.]
expect(node, inputs=[x, y], outputs=[z],
name='test_sub_example')
x = np.random.randn(3, 4, 5).astype(np.float32)
y = np.random.randn(3, 4, 5).astype(np.float32)
z = x - y
expect(node, inputs=[x, y], outputs=[z],
name='test_sub')
@staticmethod
def export_sub_broadcast(): # type: () -> None
node = onnx.helper.make_node(
'Sub',
inputs=['x', 'y'],
outputs=['z'],
broadcast=1,
)
x = np.random.randn(3, 4, 5).astype(np.float32)
y = np.random.randn(5).astype(np.float32)
z = x - y
expect(node, inputs=[x, y], outputs=[z],
name='test_sub_bcast')
| 27.183673
| 55
| 0.534535
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import onnx
from ..base import Base
from . import expect
class Sub(Base):
@staticmethod
def export():
node = onnx.helper.make_node(
'Sub',
inputs=['x', 'y'],
outputs=['z'],
)
x = np.array([1, 2, 3]).astype(np.float32)
y = np.array([3, 2, 1]).astype(np.float32)
z = x - y
expect(node, inputs=[x, y], outputs=[z],
name='test_sub_example')
x = np.random.randn(3, 4, 5).astype(np.float32)
y = np.random.randn(3, 4, 5).astype(np.float32)
z = x - y
expect(node, inputs=[x, y], outputs=[z],
name='test_sub')
@staticmethod
def export_sub_broadcast():
node = onnx.helper.make_node(
'Sub',
inputs=['x', 'y'],
outputs=['z'],
broadcast=1,
)
x = np.random.randn(3, 4, 5).astype(np.float32)
y = np.random.randn(5).astype(np.float32)
z = x - y
expect(node, inputs=[x, y], outputs=[z],
name='test_sub_bcast')
| true
| true
|
1c41e7a1c7e0d3146cfddb69b0578fd1dab77acf
| 4,296
|
py
|
Python
|
var/spack/repos/builtin/packages/parsec/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/parsec/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 6
|
2022-01-08T08:41:11.000Z
|
2022-03-14T19:28:07.000Z
|
var/spack/repos/builtin/packages/parsec/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#
import llnl.util.tty as tty
from spack import *
class Parsec(CMakePackage, CudaPackage):
"""PaRSEC: the Parallel Runtime Scheduler and Execution Controller
PaRSEC is a runtime and a programming toolbox that support the design and
parallel execution of micro-tasks on distributed, heterogeneous systems.
"""
homepage = "https://icl.utk.edu/dte"
git = "https://bitbucket.org/icldistcomp/parsec.git"
url = "https://bitbucket.org/icldistcomp/parsec/get/parsec-3.0.2012.tar.bz2"
list_url = "https://bitbucket.org/icldistcomp/parsec/downloads/?tab=tags"
maintainers = ['abouteiller', 'bosilca', 'herault']
tags = ['e4s']
test_requires_compiler = True
version('master', branch='master')
version('3.0.2012', sha256='f565bcfffe106be8237b6aea3e83a5770607b7236606414b6f270244fa6ec3bc')
version('1.1.0', sha256='d2928033c121000ae0a554f1e7f757c1f22274a8b74457ecd52744ae1f70b95a', url='https://bitbucket.org/icldistcomp/parsec/get/v1.1.0.tar.bz2')
variant('build_type', default='RelWithDebInfo', description='CMake build type', values=('Debug', 'Release', 'RelWithDebInfo'))
variant('shared', default=True, description='Build a shared library')
variant('cuda', default=True, description='Build with CUDA')
variant('profile', default=False, description='Generate profiling data')
variant('debug_verbose', default=False, description='Debug version with verbose and paranoid (incurs performance overhead!)')
conflicts('+debug_verbose build_type=Release', msg='You need to set build_type=Debug for +debug_verbose')
conflicts('+debug_verbose build_type=RelWithDebInfo', msg='You need to set build_type=Debug for +debug_verbose')
# TODO: Spack does not handle cross-compilation atm
# variant('xcompile', default=False, description='Cross compile')
depends_on('cmake@3.16:', type='build')
depends_on('python', type='build')
depends_on('flex', type='build')
depends_on('bison', type='build')
depends_on('hwloc')
depends_on('mpi')
depends_on('papi', when='+profile')
depends_on('python', type=('build', 'run'), when='+profile')
depends_on('py-cython', type=('build', 'run'), when='+profile')
depends_on('py-pandas', type=('build', 'run'), when='+profile')
depends_on('py-matplotlib', type=('build', 'run'), when='+profile')
depends_on('py-tables', type=('build', 'run'), when='+profile')
def cmake_args(self):
args = [
self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
self.define_from_variant('PARSEC_GPU_WITH_CUDA', 'cuda'),
self.define_from_variant('PARSEC_PROF_TRACE', 'profile'),
self.define_from_variant('PARSEC_DEBUG_HISTORY', 'debug_verbose'),
self.define_from_variant('PARSEC_DEBUG_PARANOID', 'debug_verbose'),
]
return args
@run_after('install')
@on_package_attributes(run_tests=True)
def check(self):
"""Run ctest after building binary."""
with working_dir(self.build_directory):
try:
ctest('--output-on-failure', '-j1')
except ProcessError:
warn = 'ctest tests failed.\n'
warn += 'Please report this failure to:\n'
warn += 'https://bitbucket.org/icldistcomp/parsec/issues'
tty.msg(warn)
def test(self):
"""Compile and run a user program with the installed library"""
with working_dir(join_path(self.install_test_root,
'contrib/build_with_parsec')):
self.run_test('cmake',
options=['.'],
purpose='Check if CMake can find PaRSEC and its targets')
self.run_test('make',
purpose='Check if tests can compile')
self.run_test('./dtd_test_allreduce')
self.run_test('./write_check')
@run_after('install')
def cache_test_sources(self):
srcs = ['contrib/build_with_parsec']
self.cache_extra_test_sources(srcs)
| 46.193548
| 162
| 0.657356
|
import llnl.util.tty as tty
from spack import *
class Parsec(CMakePackage, CudaPackage):
homepage = "https://icl.utk.edu/dte"
git = "https://bitbucket.org/icldistcomp/parsec.git"
url = "https://bitbucket.org/icldistcomp/parsec/get/parsec-3.0.2012.tar.bz2"
list_url = "https://bitbucket.org/icldistcomp/parsec/downloads/?tab=tags"
maintainers = ['abouteiller', 'bosilca', 'herault']
tags = ['e4s']
test_requires_compiler = True
version('master', branch='master')
version('3.0.2012', sha256='f565bcfffe106be8237b6aea3e83a5770607b7236606414b6f270244fa6ec3bc')
version('1.1.0', sha256='d2928033c121000ae0a554f1e7f757c1f22274a8b74457ecd52744ae1f70b95a', url='https://bitbucket.org/icldistcomp/parsec/get/v1.1.0.tar.bz2')
variant('build_type', default='RelWithDebInfo', description='CMake build type', values=('Debug', 'Release', 'RelWithDebInfo'))
variant('shared', default=True, description='Build a shared library')
variant('cuda', default=True, description='Build with CUDA')
variant('profile', default=False, description='Generate profiling data')
variant('debug_verbose', default=False, description='Debug version with verbose and paranoid (incurs performance overhead!)')
conflicts('+debug_verbose build_type=Release', msg='You need to set build_type=Debug for +debug_verbose')
conflicts('+debug_verbose build_type=RelWithDebInfo', msg='You need to set build_type=Debug for +debug_verbose')
depends_on('cmake@3.16:', type='build')
depends_on('python', type='build')
depends_on('flex', type='build')
depends_on('bison', type='build')
depends_on('hwloc')
depends_on('mpi')
depends_on('papi', when='+profile')
depends_on('python', type=('build', 'run'), when='+profile')
depends_on('py-cython', type=('build', 'run'), when='+profile')
depends_on('py-pandas', type=('build', 'run'), when='+profile')
depends_on('py-matplotlib', type=('build', 'run'), when='+profile')
depends_on('py-tables', type=('build', 'run'), when='+profile')
def cmake_args(self):
args = [
self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
self.define_from_variant('PARSEC_GPU_WITH_CUDA', 'cuda'),
self.define_from_variant('PARSEC_PROF_TRACE', 'profile'),
self.define_from_variant('PARSEC_DEBUG_HISTORY', 'debug_verbose'),
self.define_from_variant('PARSEC_DEBUG_PARANOID', 'debug_verbose'),
]
return args
@run_after('install')
@on_package_attributes(run_tests=True)
def check(self):
with working_dir(self.build_directory):
try:
ctest('--output-on-failure', '-j1')
except ProcessError:
warn = 'ctest tests failed.\n'
warn += 'Please report this failure to:\n'
warn += 'https://bitbucket.org/icldistcomp/parsec/issues'
tty.msg(warn)
def test(self):
with working_dir(join_path(self.install_test_root,
'contrib/build_with_parsec')):
self.run_test('cmake',
options=['.'],
purpose='Check if CMake can find PaRSEC and its targets')
self.run_test('make',
purpose='Check if tests can compile')
self.run_test('./dtd_test_allreduce')
self.run_test('./write_check')
@run_after('install')
def cache_test_sources(self):
srcs = ['contrib/build_with_parsec']
self.cache_extra_test_sources(srcs)
| true
| true
|
1c41e7ca0948d717b2ec25a4757732e776cf92a4
| 27,997
|
py
|
Python
|
heat/tests/test_instance_group_update_policy.py
|
NeCTAR-RC/heat
|
b152817f192a7b46514793633ddc968c1fe1ebf8
|
[
"Apache-2.0"
] | 1
|
2015-02-26T03:23:23.000Z
|
2015-02-26T03:23:23.000Z
|
heat/tests/test_instance_group_update_policy.py
|
NeCTAR-RC/heat
|
b152817f192a7b46514793633ddc968c1fe1ebf8
|
[
"Apache-2.0"
] | null | null | null |
heat/tests/test_instance_group_update_policy.py
|
NeCTAR-RC/heat
|
b152817f192a7b46514793633ddc968c1fe1ebf8
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import mox
from testtools.matchers import MatchesRegex
from heat.common import exception
from heat.common import template_format
from heat.engine import parser
from heat.engine.resources import image
from heat.engine.resources import instance
from heat.engine.resources import nova_keypair
from heat.tests.common import HeatTestCase
from heat.tests import utils
from heat.tests.v1_1 import fakes
ig_tmpl_without_updt_policy = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {},
"Resources" : {
"JobServerGroup" : {
"Type" : "OS::Heat::InstanceGroup",
"Properties" : {
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"Size" : "10",
"AvailabilityZones" : ["nova"]
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId" : "foo",
"InstanceType" : "m1.medium",
"KeyName" : "test",
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data"
}
}
}
}
'''
ig_tmpl_with_bad_updt_policy = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {},
"Resources" : {
"JobServerGroup" : {
"UpdatePolicy" : {
"RollingUpdate": "foo"
},
"Type" : "OS::Heat::InstanceGroup",
"Properties" : {
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"Size" : "10",
"AvailabilityZones" : ["nova"]
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId" : "foo",
"InstanceType" : "m1.medium",
"KeyName" : "test",
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data"
}
}
}
}
'''
ig_tmpl_with_default_updt_policy = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {},
"Resources" : {
"JobServerGroup" : {
"UpdatePolicy" : {
"RollingUpdate" : {
}
},
"Type" : "OS::Heat::InstanceGroup",
"Properties" : {
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"Size" : "10",
"AvailabilityZones" : ["nova"]
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId" : "foo",
"InstanceType" : "m1.medium",
"KeyName" : "test",
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data"
}
}
}
}
'''
ig_tmpl_with_updt_policy = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {},
"Resources" : {
"JobServerGroup" : {
"UpdatePolicy" : {
"RollingUpdate" : {
"MinInstancesInService" : "1",
"MaxBatchSize" : "2",
"PauseTime" : "PT1S"
}
},
"Type" : "OS::Heat::InstanceGroup",
"Properties" : {
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"Size" : "10",
"AvailabilityZones" : ["nova"]
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId" : "foo",
"InstanceType" : "m1.medium",
"KeyName" : "test",
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data"
}
}
}
}
'''
class InstanceGroupTest(HeatTestCase):
def setUp(self):
super(InstanceGroupTest, self).setUp()
self.fc = fakes.FakeClient()
utils.setup_dummy_db()
def _stub_validate(self):
self.m.StubOutWithMock(parser.Stack, 'validate')
parser.Stack.validate().MultipleTimes()
self.m.StubOutWithMock(nova_keypair.KeypairConstraint, 'validate')
nova_keypair.KeypairConstraint.validate(
mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes().AndReturn(True)
self.m.StubOutWithMock(image.ImageConstraint, 'validate')
image.ImageConstraint.validate(
mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes().AndReturn(True)
def _stub_grp_create(self, capacity):
"""
Expect creation of instances to capacity
"""
self._stub_validate()
self.m.StubOutWithMock(instance.Instance, 'handle_create')
self.m.StubOutWithMock(instance.Instance, 'check_create_complete')
cookie = object()
for x in range(capacity):
instance.Instance.handle_create().AndReturn(cookie)
instance.Instance.check_create_complete(cookie).AndReturn(True)
def _stub_grp_replace(self,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0):
"""
Expect update replacement of the instances
"""
self._stub_validate()
self.m.StubOutWithMock(instance.Instance, 'handle_create')
self.m.StubOutWithMock(instance.Instance, 'check_create_complete')
self.m.StubOutWithMock(instance.Instance, 'destroy')
cookie = object()
for i in range(num_creates_expected_on_updt):
instance.Instance.handle_create().AndReturn(cookie)
instance.Instance.check_create_complete(cookie).AndReturn(True)
for i in range(num_deletes_expected_on_updt):
instance.Instance.destroy().AndReturn(None)
def _stub_grp_update(self,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0):
"""
Expect update of the instances
"""
self.m.StubOutWithMock(instance.Instance, 'nova')
instance.Instance.nova().MultipleTimes().AndReturn(self.fc)
def activate_status(server):
server.status = 'VERIFY_RESIZE'
return_server = self.fc.servers.list()[1]
return_server.id = 1234
return_server.get = activate_status.__get__(return_server)
self.m.StubOutWithMock(self.fc.servers, 'get')
self.m.StubOutWithMock(self.fc.client, 'post_servers_1234_action')
self.fc.servers.get(mox.IgnoreArg()).\
MultipleTimes().AndReturn(return_server)
self.fc.client.post_servers_1234_action(
body={'resize': {'flavorRef': 3}}).\
MultipleTimes().AndReturn((202, None))
self.fc.client.post_servers_1234_action(
body={'confirmResize': None}).\
MultipleTimes().AndReturn((202, None))
self._stub_grp_replace(num_creates_expected_on_updt,
num_deletes_expected_on_updt)
def get_launch_conf_name(self, stack, ig_name):
return stack[ig_name].properties['LaunchConfigurationName']
def test_parse_without_update_policy(self):
tmpl = template_format.parse(ig_tmpl_without_updt_policy)
stack = utils.parse_stack(tmpl)
stack.validate()
grp = stack['JobServerGroup']
self.assertFalse(grp.update_policy['RollingUpdate'])
def test_parse_with_update_policy(self):
tmpl = template_format.parse(ig_tmpl_with_updt_policy)
stack = utils.parse_stack(tmpl)
stack.validate()
grp = stack['JobServerGroup']
self.assertTrue(grp.update_policy)
self.assertEqual(1, len(grp.update_policy))
self.assertIn('RollingUpdate', grp.update_policy)
policy = grp.update_policy['RollingUpdate']
self.assertTrue(policy and len(policy) > 0)
self.assertEqual(1, int(policy['MinInstancesInService']))
self.assertEqual(2, int(policy['MaxBatchSize']))
self.assertEqual('PT1S', policy['PauseTime'])
def test_parse_with_default_update_policy(self):
tmpl = template_format.parse(ig_tmpl_with_default_updt_policy)
stack = utils.parse_stack(tmpl)
stack.validate()
grp = stack['JobServerGroup']
self.assertTrue(grp.update_policy)
self.assertEqual(1, len(grp.update_policy))
self.assertIn('RollingUpdate', grp.update_policy)
policy = grp.update_policy['RollingUpdate']
self.assertTrue(policy and len(policy) > 0)
self.assertEqual(0, int(policy['MinInstancesInService']))
self.assertEqual(1, int(policy['MaxBatchSize']))
self.assertEqual('PT0S', policy['PauseTime'])
def test_parse_with_bad_update_policy(self):
tmpl = template_format.parse(ig_tmpl_with_bad_updt_policy)
stack = utils.parse_stack(tmpl)
self.assertRaises(exception.StackValidationFailed, stack.validate)
def test_parse_with_bad_pausetime_in_update_policy(self):
tmpl = template_format.parse(ig_tmpl_with_updt_policy)
group = tmpl['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
# test against some random string
policy['PauseTime'] = 'ABCD1234'
stack = utils.parse_stack(tmpl)
self.assertRaises(exception.StackValidationFailed, stack.validate)
# test unsupported designator
policy['PauseTime'] = 'P1YT1H'
stack = utils.parse_stack(tmpl)
self.assertRaises(exception.StackValidationFailed, stack.validate)
def validate_update_policy_diff(self, current, updated):
# load current stack
current_tmpl = template_format.parse(current)
current_stack = utils.parse_stack(current_tmpl)
# get the json snippet for the current InstanceGroup resource
current_grp = current_stack['JobServerGroup']
current_snippets = dict((n, r.parsed_template())
for n, r in current_stack.items())
current_grp_json = current_snippets[current_grp.name]
# load the updated stack
updated_tmpl = template_format.parse(updated)
updated_stack = utils.parse_stack(updated_tmpl)
# get the updated json snippet for the InstanceGroup resource in the
# context of the current stack
updated_grp = updated_stack['JobServerGroup']
updated_grp_json = current_stack.resolve_runtime_data(updated_grp.t)
# identify the template difference
tmpl_diff = updated_grp.update_template_diff(
updated_grp_json, current_grp_json)
updated_policy = (updated_grp.t['UpdatePolicy']
if 'UpdatePolicy' in updated_grp.t else None)
expected = {u'UpdatePolicy': updated_policy}
self.assertEqual(expected, tmpl_diff)
def test_update_policy_added(self):
self.validate_update_policy_diff(ig_tmpl_without_updt_policy,
ig_tmpl_with_updt_policy)
def test_update_policy_updated(self):
updt_template = json.loads(ig_tmpl_with_updt_policy)
grp = updt_template['Resources']['JobServerGroup']
policy = grp['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '2'
policy['MaxBatchSize'] = '4'
policy['PauseTime'] = 'PT1M30S'
self.validate_update_policy_diff(ig_tmpl_with_updt_policy,
json.dumps(updt_template))
def test_update_policy_removed(self):
self.validate_update_policy_diff(ig_tmpl_with_updt_policy,
ig_tmpl_without_updt_policy)
def update_instance_group(self, init_template, updt_template,
num_updates_expected_on_updt,
num_creates_expected_on_updt,
num_deletes_expected_on_updt,
update_replace):
# setup stack from the initial template
tmpl = template_format.parse(init_template)
stack = utils.parse_stack(tmpl)
stack.validate()
# test stack create
size = int(stack['JobServerGroup'].properties['Size'])
self._stub_grp_create(size)
self.m.ReplayAll()
stack.create()
self.m.VerifyAll()
self.assertEqual(('CREATE', 'COMPLETE'), stack.state)
# test that update policy is loaded
current_grp = stack['JobServerGroup']
self.assertIn('RollingUpdate', current_grp.update_policy)
current_policy = current_grp.update_policy['RollingUpdate']
self.assertTrue(current_policy)
self.assertTrue(len(current_policy) > 0)
init_grp_tmpl = tmpl['Resources']['JobServerGroup']
init_roll_updt = init_grp_tmpl['UpdatePolicy']['RollingUpdate']
init_batch_sz = int(init_roll_updt['MaxBatchSize'])
self.assertEqual(init_batch_sz, int(current_policy['MaxBatchSize']))
# test that physical resource name of launch configuration is used
conf = stack['JobServerConfig']
conf_name_pattern = '%s-JobServerConfig-[a-zA-Z0-9]+$' % stack.name
self.assertThat(conf.FnGetRefId(), MatchesRegex(conf_name_pattern))
# get launch conf name here to compare result after update
conf_name = self.get_launch_conf_name(stack, 'JobServerGroup')
# test the number of instances created
nested = stack['JobServerGroup'].nested()
self.assertEqual(size, len(nested.resources))
# clean up for next test
self.m.UnsetStubs()
# saves info from initial list of instances for comparison later
init_instances = current_grp.get_instances()
init_names = current_grp.get_instance_names()
init_images = [(i.name, i.t['Properties']['ImageId'])
for i in init_instances]
init_flavors = [(i.name, i.t['Properties']['InstanceType'])
for i in init_instances]
# test stack update
updated_tmpl = template_format.parse(updt_template)
updated_stack = utils.parse_stack(updated_tmpl)
new_grp_tmpl = updated_tmpl['Resources']['JobServerGroup']
new_roll_updt = new_grp_tmpl['UpdatePolicy']['RollingUpdate']
new_batch_sz = int(new_roll_updt['MaxBatchSize'])
self.assertNotEqual(new_batch_sz, init_batch_sz)
if update_replace:
self._stub_grp_replace(size, size)
else:
self._stub_grp_update(num_creates_expected_on_updt,
num_deletes_expected_on_updt)
self.stub_wallclock()
self.m.ReplayAll()
stack.update(updated_stack)
self.m.VerifyAll()
self.assertEqual(('UPDATE', 'COMPLETE'), stack.state)
# test that the update policy is updated
updated_grp = stack['JobServerGroup']
self.assertIn('RollingUpdate', updated_grp.update_policy)
updated_policy = updated_grp.update_policy['RollingUpdate']
self.assertTrue(updated_policy)
self.assertTrue(len(updated_policy) > 0)
self.assertEqual(new_batch_sz, int(updated_policy['MaxBatchSize']))
# test that the launch configuration is replaced
updated_conf_name = self.get_launch_conf_name(stack, 'JobServerGroup')
self.assertNotEqual(conf_name, updated_conf_name)
# test that the group size are the same
updt_instances = updated_grp.get_instances()
updt_names = updated_grp.get_instance_names()
self.assertEqual(len(init_names), len(updt_names))
# test that the appropriate number of instance names are the same
matched_names = set(updt_names) & set(init_names)
self.assertEqual(num_updates_expected_on_updt, len(matched_names))
# test that the appropriate number of new instances are created
self.assertEqual(num_creates_expected_on_updt,
len(set(updt_names) - set(init_names)))
# test that the appropriate number of instances are deleted
self.assertEqual(num_deletes_expected_on_updt,
len(set(init_names) - set(updt_names)))
# test that the older instances are the ones being deleted
if num_deletes_expected_on_updt > 0:
deletes_expected = init_names[:num_deletes_expected_on_updt]
self.assertNotIn(deletes_expected, updt_names)
# test if instances are updated
if update_replace:
# test that the image id is changed for all instances
updt_images = [(i.name, i.t['Properties']['ImageId'])
for i in updt_instances]
self.assertEqual(0, len(set(updt_images) & set(init_images)))
else:
# test that instance type is changed for all instances
updt_flavors = [(i.name, i.t['Properties']['InstanceType'])
for i in updt_instances]
self.assertEqual(0, len(set(updt_flavors) & set(init_flavors)))
def test_instance_group_update_replace(self):
"""
Test simple update replace with no conflict in batch size and
minimum instances in service.
"""
updt_template = json.loads(ig_tmpl_with_updt_policy)
grp = updt_template['Resources']['JobServerGroup']
policy = grp['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '1'
policy['MaxBatchSize'] = '3'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = 'bar'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=10,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0,
update_replace=True)
def test_instance_group_update_replace_with_adjusted_capacity(self):
"""
Test update replace with capacity adjustment due to conflict in
batch size and minimum instances in service.
"""
updt_template = json.loads(ig_tmpl_with_updt_policy)
grp = updt_template['Resources']['JobServerGroup']
policy = grp['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '8'
policy['MaxBatchSize'] = '4'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = 'bar'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=8,
num_creates_expected_on_updt=2,
num_deletes_expected_on_updt=2,
update_replace=True)
def test_instance_group_update_replace_huge_batch_size(self):
"""
Test update replace with a huge batch size.
"""
updt_template = json.loads(ig_tmpl_with_updt_policy)
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '0'
policy['MaxBatchSize'] = '20'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = 'bar'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=10,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0,
update_replace=True)
def test_instance_group_update_replace_huge_min_in_service(self):
"""
Test update replace with a huge number of minimum instances in service.
"""
updt_template = json.loads(ig_tmpl_with_updt_policy)
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '20'
policy['MaxBatchSize'] = '1'
policy['PauseTime'] = 'PT0S'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = 'bar'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=9,
num_creates_expected_on_updt=1,
num_deletes_expected_on_updt=1,
update_replace=True)
def test_instance_group_update_no_replace(self):
"""
Test simple update only and no replace (i.e. updated instance flavor
in Launch Configuration) with no conflict in batch size and
minimum instances in service.
"""
updt_template = json.loads(copy.deepcopy(ig_tmpl_with_updt_policy))
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '1'
policy['MaxBatchSize'] = '3'
policy['PauseTime'] = 'PT0S'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['InstanceType'] = 'm1.large'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=10,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0,
update_replace=False)
def test_instance_group_update_no_replace_with_adjusted_capacity(self):
"""
Test update only and no replace (i.e. updated instance flavor in
Launch Configuration) with capacity adjustment due to conflict in
batch size and minimum instances in service.
"""
updt_template = json.loads(copy.deepcopy(ig_tmpl_with_updt_policy))
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '8'
policy['MaxBatchSize'] = '4'
policy['PauseTime'] = 'PT0S'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['InstanceType'] = 'm1.large'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=8,
num_creates_expected_on_updt=2,
num_deletes_expected_on_updt=2,
update_replace=False)
def test_instance_group_update_policy_removed(self):
# setup stack from the initial template
tmpl = template_format.parse(ig_tmpl_with_updt_policy)
stack = utils.parse_stack(tmpl)
# test stack create
size = int(stack['JobServerGroup'].properties['Size'])
self._stub_grp_create(size)
self.m.ReplayAll()
stack.create()
self.m.VerifyAll()
self.assertEqual(('CREATE', 'COMPLETE'), stack.state)
# test that update policy is loaded
current_grp = stack['JobServerGroup']
self.assertIn('RollingUpdate', current_grp.update_policy)
current_policy = current_grp.update_policy['RollingUpdate']
self.assertTrue(current_policy)
self.assertTrue(len(current_policy) > 0)
init_grp_tmpl = tmpl['Resources']['JobServerGroup']
init_roll_updt = init_grp_tmpl['UpdatePolicy']['RollingUpdate']
init_batch_sz = int(init_roll_updt['MaxBatchSize'])
self.assertEqual(init_batch_sz, int(current_policy['MaxBatchSize']))
# test that physical resource name of launch configuration is used
conf = stack['JobServerConfig']
conf_name_pattern = '%s-JobServerConfig-[a-zA-Z0-9]+$' % stack.name
self.assertThat(conf.FnGetRefId(), MatchesRegex(conf_name_pattern))
# test the number of instances created
nested = stack['JobServerGroup'].nested()
self.assertEqual(size, len(nested.resources))
# test stack update
updated_tmpl = template_format.parse(ig_tmpl_without_updt_policy)
updated_stack = utils.parse_stack(updated_tmpl)
stack.update(updated_stack)
self.assertEqual(('UPDATE', 'COMPLETE'), stack.state)
# test that update policy is removed
updated_grp = stack['JobServerGroup']
self.assertFalse(updated_grp.update_policy['RollingUpdate'])
def test_instance_group_update_policy_check_timeout(self):
# setup stack from the initial template
tmpl = template_format.parse(ig_tmpl_with_updt_policy)
stack = utils.parse_stack(tmpl)
# test stack create
size = int(stack['JobServerGroup'].properties['Size'])
self._stub_grp_create(size)
self.m.ReplayAll()
stack.create()
self.m.VerifyAll()
self.assertEqual(('CREATE', 'COMPLETE'), stack.state)
# test that update policy is loaded
current_grp = stack['JobServerGroup']
self.assertIn('RollingUpdate', current_grp.update_policy)
current_policy = current_grp.update_policy['RollingUpdate']
self.assertTrue(current_policy)
self.assertTrue(len(current_policy) > 0)
init_grp_tmpl = tmpl['Resources']['JobServerGroup']
init_roll_updt = init_grp_tmpl['UpdatePolicy']['RollingUpdate']
init_batch_sz = int(init_roll_updt['MaxBatchSize'])
self.assertEqual(init_batch_sz, int(current_policy['MaxBatchSize']))
# test the number of instances created
nested = stack['JobServerGroup'].nested()
self.assertEqual(size, len(nested.resources))
# clean up for next test
self.m.UnsetStubs()
# modify the pause time and test for error
new_pause_time = 'PT30M'
updt_template = json.loads(copy.deepcopy(ig_tmpl_with_updt_policy))
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['PauseTime'] = new_pause_time
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = 'bar'
updated_tmpl = template_format.parse(json.dumps(updt_template))
updated_stack = utils.parse_stack(updated_tmpl)
stack.update(updated_stack)
self.assertEqual(('UPDATE', 'FAILED'), stack.state)
# test that the update policy is updated
updated_grp = stack['JobServerGroup']
self.assertIn('RollingUpdate', updated_grp.update_policy)
updated_policy = updated_grp.update_policy['RollingUpdate']
self.assertTrue(updated_policy)
self.assertTrue(len(updated_policy) > 0)
self.assertEqual(new_pause_time, updated_policy['PauseTime'])
# test that error message match
expected_error_message = ('The current UpdatePolicy will result '
'in stack update timeout.')
self.assertIn(expected_error_message, stack.status_reason)
| 40.871533
| 79
| 0.628353
|
import copy
import json
import mox
from testtools.matchers import MatchesRegex
from heat.common import exception
from heat.common import template_format
from heat.engine import parser
from heat.engine.resources import image
from heat.engine.resources import instance
from heat.engine.resources import nova_keypair
from heat.tests.common import HeatTestCase
from heat.tests import utils
from heat.tests.v1_1 import fakes
ig_tmpl_without_updt_policy = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {},
"Resources" : {
"JobServerGroup" : {
"Type" : "OS::Heat::InstanceGroup",
"Properties" : {
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"Size" : "10",
"AvailabilityZones" : ["nova"]
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId" : "foo",
"InstanceType" : "m1.medium",
"KeyName" : "test",
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data"
}
}
}
}
'''
ig_tmpl_with_bad_updt_policy = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {},
"Resources" : {
"JobServerGroup" : {
"UpdatePolicy" : {
"RollingUpdate": "foo"
},
"Type" : "OS::Heat::InstanceGroup",
"Properties" : {
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"Size" : "10",
"AvailabilityZones" : ["nova"]
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId" : "foo",
"InstanceType" : "m1.medium",
"KeyName" : "test",
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data"
}
}
}
}
'''
ig_tmpl_with_default_updt_policy = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {},
"Resources" : {
"JobServerGroup" : {
"UpdatePolicy" : {
"RollingUpdate" : {
}
},
"Type" : "OS::Heat::InstanceGroup",
"Properties" : {
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"Size" : "10",
"AvailabilityZones" : ["nova"]
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId" : "foo",
"InstanceType" : "m1.medium",
"KeyName" : "test",
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data"
}
}
}
}
'''
ig_tmpl_with_updt_policy = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {},
"Resources" : {
"JobServerGroup" : {
"UpdatePolicy" : {
"RollingUpdate" : {
"MinInstancesInService" : "1",
"MaxBatchSize" : "2",
"PauseTime" : "PT1S"
}
},
"Type" : "OS::Heat::InstanceGroup",
"Properties" : {
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"Size" : "10",
"AvailabilityZones" : ["nova"]
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId" : "foo",
"InstanceType" : "m1.medium",
"KeyName" : "test",
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data"
}
}
}
}
'''
class InstanceGroupTest(HeatTestCase):
def setUp(self):
super(InstanceGroupTest, self).setUp()
self.fc = fakes.FakeClient()
utils.setup_dummy_db()
def _stub_validate(self):
self.m.StubOutWithMock(parser.Stack, 'validate')
parser.Stack.validate().MultipleTimes()
self.m.StubOutWithMock(nova_keypair.KeypairConstraint, 'validate')
nova_keypair.KeypairConstraint.validate(
mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes().AndReturn(True)
self.m.StubOutWithMock(image.ImageConstraint, 'validate')
image.ImageConstraint.validate(
mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes().AndReturn(True)
def _stub_grp_create(self, capacity):
self._stub_validate()
self.m.StubOutWithMock(instance.Instance, 'handle_create')
self.m.StubOutWithMock(instance.Instance, 'check_create_complete')
cookie = object()
for x in range(capacity):
instance.Instance.handle_create().AndReturn(cookie)
instance.Instance.check_create_complete(cookie).AndReturn(True)
def _stub_grp_replace(self,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0):
self._stub_validate()
self.m.StubOutWithMock(instance.Instance, 'handle_create')
self.m.StubOutWithMock(instance.Instance, 'check_create_complete')
self.m.StubOutWithMock(instance.Instance, 'destroy')
cookie = object()
for i in range(num_creates_expected_on_updt):
instance.Instance.handle_create().AndReturn(cookie)
instance.Instance.check_create_complete(cookie).AndReturn(True)
for i in range(num_deletes_expected_on_updt):
instance.Instance.destroy().AndReturn(None)
def _stub_grp_update(self,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0):
self.m.StubOutWithMock(instance.Instance, 'nova')
instance.Instance.nova().MultipleTimes().AndReturn(self.fc)
def activate_status(server):
server.status = 'VERIFY_RESIZE'
return_server = self.fc.servers.list()[1]
return_server.id = 1234
return_server.get = activate_status.__get__(return_server)
self.m.StubOutWithMock(self.fc.servers, 'get')
self.m.StubOutWithMock(self.fc.client, 'post_servers_1234_action')
self.fc.servers.get(mox.IgnoreArg()).\
MultipleTimes().AndReturn(return_server)
self.fc.client.post_servers_1234_action(
body={'resize': {'flavorRef': 3}}).\
MultipleTimes().AndReturn((202, None))
self.fc.client.post_servers_1234_action(
body={'confirmResize': None}).\
MultipleTimes().AndReturn((202, None))
self._stub_grp_replace(num_creates_expected_on_updt,
num_deletes_expected_on_updt)
def get_launch_conf_name(self, stack, ig_name):
return stack[ig_name].properties['LaunchConfigurationName']
def test_parse_without_update_policy(self):
tmpl = template_format.parse(ig_tmpl_without_updt_policy)
stack = utils.parse_stack(tmpl)
stack.validate()
grp = stack['JobServerGroup']
self.assertFalse(grp.update_policy['RollingUpdate'])
def test_parse_with_update_policy(self):
tmpl = template_format.parse(ig_tmpl_with_updt_policy)
stack = utils.parse_stack(tmpl)
stack.validate()
grp = stack['JobServerGroup']
self.assertTrue(grp.update_policy)
self.assertEqual(1, len(grp.update_policy))
self.assertIn('RollingUpdate', grp.update_policy)
policy = grp.update_policy['RollingUpdate']
self.assertTrue(policy and len(policy) > 0)
self.assertEqual(1, int(policy['MinInstancesInService']))
self.assertEqual(2, int(policy['MaxBatchSize']))
self.assertEqual('PT1S', policy['PauseTime'])
def test_parse_with_default_update_policy(self):
tmpl = template_format.parse(ig_tmpl_with_default_updt_policy)
stack = utils.parse_stack(tmpl)
stack.validate()
grp = stack['JobServerGroup']
self.assertTrue(grp.update_policy)
self.assertEqual(1, len(grp.update_policy))
self.assertIn('RollingUpdate', grp.update_policy)
policy = grp.update_policy['RollingUpdate']
self.assertTrue(policy and len(policy) > 0)
self.assertEqual(0, int(policy['MinInstancesInService']))
self.assertEqual(1, int(policy['MaxBatchSize']))
self.assertEqual('PT0S', policy['PauseTime'])
def test_parse_with_bad_update_policy(self):
tmpl = template_format.parse(ig_tmpl_with_bad_updt_policy)
stack = utils.parse_stack(tmpl)
self.assertRaises(exception.StackValidationFailed, stack.validate)
def test_parse_with_bad_pausetime_in_update_policy(self):
tmpl = template_format.parse(ig_tmpl_with_updt_policy)
group = tmpl['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['PauseTime'] = 'ABCD1234'
stack = utils.parse_stack(tmpl)
self.assertRaises(exception.StackValidationFailed, stack.validate)
policy['PauseTime'] = 'P1YT1H'
stack = utils.parse_stack(tmpl)
self.assertRaises(exception.StackValidationFailed, stack.validate)
def validate_update_policy_diff(self, current, updated):
current_tmpl = template_format.parse(current)
current_stack = utils.parse_stack(current_tmpl)
current_grp = current_stack['JobServerGroup']
current_snippets = dict((n, r.parsed_template())
for n, r in current_stack.items())
current_grp_json = current_snippets[current_grp.name]
updated_tmpl = template_format.parse(updated)
updated_stack = utils.parse_stack(updated_tmpl)
updated_grp = updated_stack['JobServerGroup']
updated_grp_json = current_stack.resolve_runtime_data(updated_grp.t)
tmpl_diff = updated_grp.update_template_diff(
updated_grp_json, current_grp_json)
updated_policy = (updated_grp.t['UpdatePolicy']
if 'UpdatePolicy' in updated_grp.t else None)
expected = {u'UpdatePolicy': updated_policy}
self.assertEqual(expected, tmpl_diff)
def test_update_policy_added(self):
self.validate_update_policy_diff(ig_tmpl_without_updt_policy,
ig_tmpl_with_updt_policy)
def test_update_policy_updated(self):
updt_template = json.loads(ig_tmpl_with_updt_policy)
grp = updt_template['Resources']['JobServerGroup']
policy = grp['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '2'
policy['MaxBatchSize'] = '4'
policy['PauseTime'] = 'PT1M30S'
self.validate_update_policy_diff(ig_tmpl_with_updt_policy,
json.dumps(updt_template))
def test_update_policy_removed(self):
self.validate_update_policy_diff(ig_tmpl_with_updt_policy,
ig_tmpl_without_updt_policy)
def update_instance_group(self, init_template, updt_template,
num_updates_expected_on_updt,
num_creates_expected_on_updt,
num_deletes_expected_on_updt,
update_replace):
tmpl = template_format.parse(init_template)
stack = utils.parse_stack(tmpl)
stack.validate()
size = int(stack['JobServerGroup'].properties['Size'])
self._stub_grp_create(size)
self.m.ReplayAll()
stack.create()
self.m.VerifyAll()
self.assertEqual(('CREATE', 'COMPLETE'), stack.state)
current_grp = stack['JobServerGroup']
self.assertIn('RollingUpdate', current_grp.update_policy)
current_policy = current_grp.update_policy['RollingUpdate']
self.assertTrue(current_policy)
self.assertTrue(len(current_policy) > 0)
init_grp_tmpl = tmpl['Resources']['JobServerGroup']
init_roll_updt = init_grp_tmpl['UpdatePolicy']['RollingUpdate']
init_batch_sz = int(init_roll_updt['MaxBatchSize'])
self.assertEqual(init_batch_sz, int(current_policy['MaxBatchSize']))
conf = stack['JobServerConfig']
conf_name_pattern = '%s-JobServerConfig-[a-zA-Z0-9]+$' % stack.name
self.assertThat(conf.FnGetRefId(), MatchesRegex(conf_name_pattern))
conf_name = self.get_launch_conf_name(stack, 'JobServerGroup')
nested = stack['JobServerGroup'].nested()
self.assertEqual(size, len(nested.resources))
self.m.UnsetStubs()
init_instances = current_grp.get_instances()
init_names = current_grp.get_instance_names()
init_images = [(i.name, i.t['Properties']['ImageId'])
for i in init_instances]
init_flavors = [(i.name, i.t['Properties']['InstanceType'])
for i in init_instances]
updated_tmpl = template_format.parse(updt_template)
updated_stack = utils.parse_stack(updated_tmpl)
new_grp_tmpl = updated_tmpl['Resources']['JobServerGroup']
new_roll_updt = new_grp_tmpl['UpdatePolicy']['RollingUpdate']
new_batch_sz = int(new_roll_updt['MaxBatchSize'])
self.assertNotEqual(new_batch_sz, init_batch_sz)
if update_replace:
self._stub_grp_replace(size, size)
else:
self._stub_grp_update(num_creates_expected_on_updt,
num_deletes_expected_on_updt)
self.stub_wallclock()
self.m.ReplayAll()
stack.update(updated_stack)
self.m.VerifyAll()
self.assertEqual(('UPDATE', 'COMPLETE'), stack.state)
updated_grp = stack['JobServerGroup']
self.assertIn('RollingUpdate', updated_grp.update_policy)
updated_policy = updated_grp.update_policy['RollingUpdate']
self.assertTrue(updated_policy)
self.assertTrue(len(updated_policy) > 0)
self.assertEqual(new_batch_sz, int(updated_policy['MaxBatchSize']))
updated_conf_name = self.get_launch_conf_name(stack, 'JobServerGroup')
self.assertNotEqual(conf_name, updated_conf_name)
updt_instances = updated_grp.get_instances()
updt_names = updated_grp.get_instance_names()
self.assertEqual(len(init_names), len(updt_names))
matched_names = set(updt_names) & set(init_names)
self.assertEqual(num_updates_expected_on_updt, len(matched_names))
self.assertEqual(num_creates_expected_on_updt,
len(set(updt_names) - set(init_names)))
self.assertEqual(num_deletes_expected_on_updt,
len(set(init_names) - set(updt_names)))
if num_deletes_expected_on_updt > 0:
deletes_expected = init_names[:num_deletes_expected_on_updt]
self.assertNotIn(deletes_expected, updt_names)
if update_replace:
updt_images = [(i.name, i.t['Properties']['ImageId'])
for i in updt_instances]
self.assertEqual(0, len(set(updt_images) & set(init_images)))
else:
updt_flavors = [(i.name, i.t['Properties']['InstanceType'])
for i in updt_instances]
self.assertEqual(0, len(set(updt_flavors) & set(init_flavors)))
def test_instance_group_update_replace(self):
updt_template = json.loads(ig_tmpl_with_updt_policy)
grp = updt_template['Resources']['JobServerGroup']
policy = grp['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '1'
policy['MaxBatchSize'] = '3'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = 'bar'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=10,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0,
update_replace=True)
def test_instance_group_update_replace_with_adjusted_capacity(self):
updt_template = json.loads(ig_tmpl_with_updt_policy)
grp = updt_template['Resources']['JobServerGroup']
policy = grp['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '8'
policy['MaxBatchSize'] = '4'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = 'bar'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=8,
num_creates_expected_on_updt=2,
num_deletes_expected_on_updt=2,
update_replace=True)
def test_instance_group_update_replace_huge_batch_size(self):
updt_template = json.loads(ig_tmpl_with_updt_policy)
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '0'
policy['MaxBatchSize'] = '20'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = 'bar'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=10,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0,
update_replace=True)
def test_instance_group_update_replace_huge_min_in_service(self):
updt_template = json.loads(ig_tmpl_with_updt_policy)
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '20'
policy['MaxBatchSize'] = '1'
policy['PauseTime'] = 'PT0S'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = 'bar'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=9,
num_creates_expected_on_updt=1,
num_deletes_expected_on_updt=1,
update_replace=True)
def test_instance_group_update_no_replace(self):
updt_template = json.loads(copy.deepcopy(ig_tmpl_with_updt_policy))
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '1'
policy['MaxBatchSize'] = '3'
policy['PauseTime'] = 'PT0S'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['InstanceType'] = 'm1.large'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=10,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0,
update_replace=False)
def test_instance_group_update_no_replace_with_adjusted_capacity(self):
updt_template = json.loads(copy.deepcopy(ig_tmpl_with_updt_policy))
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['MinInstancesInService'] = '8'
policy['MaxBatchSize'] = '4'
policy['PauseTime'] = 'PT0S'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['InstanceType'] = 'm1.large'
self.update_instance_group(ig_tmpl_with_updt_policy,
json.dumps(updt_template),
num_updates_expected_on_updt=8,
num_creates_expected_on_updt=2,
num_deletes_expected_on_updt=2,
update_replace=False)
def test_instance_group_update_policy_removed(self):
tmpl = template_format.parse(ig_tmpl_with_updt_policy)
stack = utils.parse_stack(tmpl)
size = int(stack['JobServerGroup'].properties['Size'])
self._stub_grp_create(size)
self.m.ReplayAll()
stack.create()
self.m.VerifyAll()
self.assertEqual(('CREATE', 'COMPLETE'), stack.state)
current_grp = stack['JobServerGroup']
self.assertIn('RollingUpdate', current_grp.update_policy)
current_policy = current_grp.update_policy['RollingUpdate']
self.assertTrue(current_policy)
self.assertTrue(len(current_policy) > 0)
init_grp_tmpl = tmpl['Resources']['JobServerGroup']
init_roll_updt = init_grp_tmpl['UpdatePolicy']['RollingUpdate']
init_batch_sz = int(init_roll_updt['MaxBatchSize'])
self.assertEqual(init_batch_sz, int(current_policy['MaxBatchSize']))
conf = stack['JobServerConfig']
conf_name_pattern = '%s-JobServerConfig-[a-zA-Z0-9]+$' % stack.name
self.assertThat(conf.FnGetRefId(), MatchesRegex(conf_name_pattern))
nested = stack['JobServerGroup'].nested()
self.assertEqual(size, len(nested.resources))
updated_tmpl = template_format.parse(ig_tmpl_without_updt_policy)
updated_stack = utils.parse_stack(updated_tmpl)
stack.update(updated_stack)
self.assertEqual(('UPDATE', 'COMPLETE'), stack.state)
updated_grp = stack['JobServerGroup']
self.assertFalse(updated_grp.update_policy['RollingUpdate'])
def test_instance_group_update_policy_check_timeout(self):
tmpl = template_format.parse(ig_tmpl_with_updt_policy)
stack = utils.parse_stack(tmpl)
size = int(stack['JobServerGroup'].properties['Size'])
self._stub_grp_create(size)
self.m.ReplayAll()
stack.create()
self.m.VerifyAll()
self.assertEqual(('CREATE', 'COMPLETE'), stack.state)
current_grp = stack['JobServerGroup']
self.assertIn('RollingUpdate', current_grp.update_policy)
current_policy = current_grp.update_policy['RollingUpdate']
self.assertTrue(current_policy)
self.assertTrue(len(current_policy) > 0)
init_grp_tmpl = tmpl['Resources']['JobServerGroup']
init_roll_updt = init_grp_tmpl['UpdatePolicy']['RollingUpdate']
init_batch_sz = int(init_roll_updt['MaxBatchSize'])
self.assertEqual(init_batch_sz, int(current_policy['MaxBatchSize']))
nested = stack['JobServerGroup'].nested()
self.assertEqual(size, len(nested.resources))
self.m.UnsetStubs()
new_pause_time = 'PT30M'
updt_template = json.loads(copy.deepcopy(ig_tmpl_with_updt_policy))
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['RollingUpdate']
policy['PauseTime'] = new_pause_time
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = 'bar'
updated_tmpl = template_format.parse(json.dumps(updt_template))
updated_stack = utils.parse_stack(updated_tmpl)
stack.update(updated_stack)
self.assertEqual(('UPDATE', 'FAILED'), stack.state)
updated_grp = stack['JobServerGroup']
self.assertIn('RollingUpdate', updated_grp.update_policy)
updated_policy = updated_grp.update_policy['RollingUpdate']
self.assertTrue(updated_policy)
self.assertTrue(len(updated_policy) > 0)
self.assertEqual(new_pause_time, updated_policy['PauseTime'])
expected_error_message = ('The current UpdatePolicy will result '
'in stack update timeout.')
self.assertIn(expected_error_message, stack.status_reason)
| true
| true
|
1c41e8791b5c99f8614c868a92b2094f833ec4b4
| 28,004
|
py
|
Python
|
salt/cloud/clouds/joyent.py
|
kev009/salt
|
aecd53203eca51e150128ae7c9ad2a979d004127
|
[
"Apache-2.0"
] | null | null | null |
salt/cloud/clouds/joyent.py
|
kev009/salt
|
aecd53203eca51e150128ae7c9ad2a979d004127
|
[
"Apache-2.0"
] | null | null | null |
salt/cloud/clouds/joyent.py
|
kev009/salt
|
aecd53203eca51e150128ae7c9ad2a979d004127
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
Joyent Cloud Module
===================
The Joyent Cloud module is used to interact with the Joyent cloud system.
Set up the cloud configuration at ``/etc/salt/cloud.providers`` or
``/etc/salt/cloud.providers.d/joyent.conf``:
.. code-block:: yaml
my-joyent-config:
provider: joyent
# The Joyent login user
user: fred
# The Joyent user's password
password: saltybacon
# The location of the ssh private key that can log into the new VM
private_key: /root/mykey.pem
# The name of the private key
private_key: mykey
When creating your profiles for the joyent cloud, add the location attribute to
the profile, this will automatically get picked up when performing tasks
associated with that vm. An example profile might look like:
.. code-block:: yaml
joyent_512:
provider: my-joyent-config
size: Extra Small 512 MB
image: centos-6
location: us-east-1
This driver can also be used with the Joyent SmartDataCenter project. More
details can be found at:
.. _`SmartDataCenter`: https://github.com/joyent/sdc
Using SDC requires that an api_host_suffix is set. The default value for this is
`.api.joyentcloud.com`. All characters, including the leading `.`, should be
included:
.. code-block:: yaml
api_host_suffix: .api.myhostname.com
:depends: PyCrypto
'''
# pylint: disable=invalid-name,function-redefined
# Import python libs
from __future__ import absolute_import
import os
import json
import logging
import base64
import pprint
import inspect
import yaml
import datetime
from Crypto.Hash import SHA256
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
# Import salt libs
import salt.ext.six as six
import salt.utils.http
import salt.utils.cloud
import salt.config as config
from salt.utils.cloud import is_public_ip
from salt.cloud.libcloudfuncs import node_state
from salt.exceptions import (
SaltCloudSystemExit,
SaltCloudExecutionFailure,
SaltCloudExecutionTimeout,
SaltCloudNotFound,
)
# Import 3rd-party libs
import salt.ext.six as six
from salt.ext.six.moves import http_client # pylint: disable=import-error,no-name-in-module
# Get logging started
log = logging.getLogger(__name__)
JOYENT_API_HOST_SUFFIX = '.api.joyentcloud.com'
JOYENT_API_VERSION = '~7.2'
JOYENT_LOCATIONS = {
'us-east-1': 'North Virginia, USA',
'us-west-1': 'Bay Area, California, USA',
'us-sw-1': 'Las Vegas, Nevada, USA',
'eu-ams-1': 'Amsterdam, Netherlands'
}
DEFAULT_LOCATION = 'us-east-1'
# joyent no longer reports on all data centers, so setting this value to true
# causes the list_nodes function to get information on machines from all
# data centers
POLL_ALL_LOCATIONS = True
VALID_RESPONSE_CODES = [
http_client.OK,
http_client.ACCEPTED,
http_client.CREATED,
http_client.NO_CONTENT
]
# Only load in this module if the Joyent configurations are in place
def __virtual__():
'''
Check for Joyent configs
'''
if get_configured_provider() is False:
return False
conn = None
return True
def get_configured_provider():
'''
Return the first configured instance.
'''
return config.is_provider_configured(
__opts__,
__active_provider_name__ or 'joyent',
('user', 'password')
)
def get_image(vm_):
'''
Return the image object to use
'''
images = avail_images()
vm_image = config.get_cloud_config_value('image', vm_, __opts__)
if vm_image and str(vm_image) in images:
images[vm_image]['name'] = images[vm_image]['id']
return images[vm_image]
raise SaltCloudNotFound(
'The specified image, {0!r}, could not be found.'.format(vm_image)
)
def get_size(vm_):
'''
Return the VM's size object
'''
sizes = avail_sizes()
vm_size = config.get_cloud_config_value('size', vm_, __opts__)
if not vm_size:
raise SaltCloudNotFound('No size specified for this VM.')
if vm_size and str(vm_size) in sizes:
return sizes[vm_size]
raise SaltCloudNotFound(
'The specified size, {0!r}, could not be found.'.format(vm_size)
)
def query_instance(vm_=None, call=None):
'''
Query an instance upon creation from the Joyent API
'''
if isinstance(vm_, six.string_types) and call == 'action':
vm_ = {'name': vm_, 'provider': 'joyent'}
if call == 'function':
# Technically this function may be called other ways too, but it
# definitely cannot be called with --function.
raise SaltCloudSystemExit(
'The query_instance action must be called with -a or --action.'
)
salt.utils.cloud.fire_event(
'event',
'querying instance',
'salt/cloud/{0}/querying'.format(vm_['name']),
transport=__opts__['transport']
)
def _query_ip_address():
data = show_instance(vm_['name'], call='action')
if not data:
log.error(
'There was an error while querying Joyent. Empty response'
)
# Trigger a failure in the wait for IP function
return False
if isinstance(data, dict) and 'error' in data:
log.warn(
'There was an error in the query {0}'.format(data['error']) # pylint: disable=E1126
)
# Trigger a failure in the wait for IP function
return False
log.debug('Returned query data: {0}'.format(data))
if 'primaryIp' in data[1]:
return data[1]['primaryIp']
return None
try:
data = salt.utils.cloud.wait_for_ip(
_query_ip_address,
timeout=config.get_cloud_config_value(
'wait_for_ip_timeout', vm_, __opts__, default=10 * 60),
interval=config.get_cloud_config_value(
'wait_for_ip_interval', vm_, __opts__, default=10),
interval_multiplier=config.get_cloud_config_value(
'wait_for_ip_interval_multiplier', vm_, __opts__, default=1),
)
except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc:
try:
# It might be already up, let's destroy it!
pass
#destroy(vm_['name'])
except SaltCloudSystemExit:
pass
finally:
raise SaltCloudSystemExit(str(exc))
return data
def create(vm_):
'''
Create a single VM from a data dict
CLI Example:
.. code-block:: bash
salt-cloud -p profile_name vm_name
'''
key_filename = config.get_cloud_config_value(
'private_key', vm_, __opts__, search_global=False, default=None
)
salt.utils.cloud.fire_event(
'event',
'starting create',
'salt/cloud/{0}/creating'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['provider'],
},
transport=__opts__['transport']
)
log.info(
'Creating Cloud VM {0} in {1}'.format(
vm_['name'],
vm_.get('location', DEFAULT_LOCATION)
)
)
# added . for fqdn hostnames
salt.utils.cloud.check_name(vm_['name'], 'a-zA-Z0-9-.')
kwargs = {
'name': vm_['name'],
'image': get_image(vm_),
'size': get_size(vm_),
'location': vm_.get('location', DEFAULT_LOCATION)
}
salt.utils.cloud.fire_event(
'event',
'requesting instance',
'salt/cloud/{0}/requesting'.format(vm_['name']),
{'kwargs': kwargs},
transport=__opts__['transport']
)
try:
data = create_node(**kwargs)
except Exception as exc:
log.error(
'Error creating {0} on JOYENT\n\n'
'The following exception was thrown when trying to '
'run the initial deployment: \n{1}'.format(
vm_['name'], str(exc)
),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return False
query_instance(vm_)
data = show_instance(vm_['name'], call='action')
vm_['key_filename'] = key_filename
vm_['ssh_host'] = data[1]['primaryIp']
salt.utils.cloud.bootstrap(vm_, __opts__)
salt.utils.cloud.fire_event(
'event',
'created instance',
'salt/cloud/{0}/created'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['provider'],
},
transport=__opts__['transport']
)
return data[1]
def create_node(**kwargs):
'''
convenience function to make the rest api call for node creation.
'''
name = kwargs['name']
size = kwargs['size']
image = kwargs['image']
location = kwargs['location']
data = json.dumps({
'name': name,
'package': size['name'],
'image': image['name']
})
try:
ret = query(command='/my/machines', data=data, method='POST',
location=location)
if ret[0] in VALID_RESPONSE_CODES:
return ret[1]
except Exception as exc:
log.error(
'Failed to create node {0}: {1}'.format(name, exc)
)
return {}
def destroy(name, call=None):
'''
destroy a machine by name
:param name: name given to the machine
:param call: call value in this case is 'action'
:return: array of booleans , true if successfully stopped and true if
successfully removed
CLI Example:
.. code-block:: bash
salt-cloud -d vm_name
'''
if call == 'function':
raise SaltCloudSystemExit(
'The destroy action must be called with -d, --destroy, '
'-a or --action.'
)
salt.utils.cloud.fire_event(
'event',
'destroying instance',
'salt/cloud/{0}/destroying'.format(name),
{'name': name},
transport=__opts__['transport']
)
node = get_node(name)
ret = query(command='my/machines/{0}'.format(node['id']),
location=node['location'], method='DELETE')
salt.utils.cloud.fire_event(
'event',
'destroyed instance',
'salt/cloud/{0}/destroyed'.format(name),
{'name': name},
transport=__opts__['transport']
)
if __opts__.get('update_cachedir', False) is True:
salt.utils.cloud.delete_minion_cachedir(name, __active_provider_name__.split(':')[0], __opts__)
return ret[0] in VALID_RESPONSE_CODES
def reboot(name, call=None):
'''
reboot a machine by name
:param name: name given to the machine
:param call: call value in this case is 'action'
:return: true if successful
CLI Example:
.. code-block:: bash
salt-cloud -a reboot vm_name
'''
node = get_node(name)
ret = take_action(name=name, call=call, method='POST',
command='/my/machines/{0}'.format(node['id']),
location=node['location'], data={'action': 'reboot'})
return ret[0] in VALID_RESPONSE_CODES
def stop(name, call=None):
'''
stop a machine by name
:param name: name given to the machine
:param call: call value in this case is 'action'
:return: true if successful
CLI Example:
.. code-block:: bash
salt-cloud -a stop vm_name
'''
node = get_node(name)
ret = take_action(name=name, call=call, method='POST',
command='/my/machines/{0}'.format(node['id']),
location=node['location'], data={'action': 'stop'})
return ret[0] in VALID_RESPONSE_CODES
def start(name, call=None):
'''
start a machine by name
:param name: name given to the machine
:param call: call value in this case is 'action'
:return: true if successful
CLI Example:
.. code-block:: bash
salt-cloud -a start vm_name
'''
node = get_node(name)
ret = take_action(name=name, call=call, method='POST',
command='/my/machines/{0}'.format(node['id']),
location=node['location'], data={'action': 'start'})
return ret[0] in VALID_RESPONSE_CODES
def take_action(name=None, call=None, command=None, data=None, method='GET',
location=DEFAULT_LOCATION):
'''
take action call used by start,stop, reboot
:param name: name given to the machine
:param call: call value in this case is 'action'
:command: api path
:data: any data to be passed to the api, must be in json format
:method: GET,POST,or DELETE
:location: data center to execute the command on
:return: true if successful
'''
caller = inspect.stack()[1][3]
if call != 'action':
raise SaltCloudSystemExit(
'This action must be called with -a or --action.'
)
if data:
data = json.dumps(data)
ret = []
try:
ret = query(command=command, data=data, method=method,
location=location)
log.info('Success {0} for node {1}'.format(caller, name))
except Exception as exc:
if 'InvalidState' in str(exc):
ret = [200, {}]
else:
log.error(
'Failed to invoke {0} node {1}: {2}'.format(caller, name, exc),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
ret = [100, {}]
return ret
def ssh_interface(vm_):
'''
Return the ssh_interface type to connect to. Either 'public_ips' (default)
or 'private_ips'.
'''
return config.get_cloud_config_value(
'ssh_interface', vm_, __opts__, default='public_ips',
search_global=False
)
def get_location(vm_=None):
'''
Return the joyent data center to use, in this order:
- CLI parameter
- VM parameter
- Cloud profile setting
'''
return __opts__.get(
'location',
config.get_cloud_config_value(
'location',
vm_ or get_configured_provider(),
__opts__,
default=DEFAULT_LOCATION,
search_global=False
)
)
def avail_locations(call=None):
'''
List all available locations
'''
if call == 'action':
raise SaltCloudSystemExit(
'The avail_locations function must be called with '
'-f or --function, or with the --list-locations option'
)
ret = {}
for key in JOYENT_LOCATIONS:
ret[key] = {
'name': key,
'region': JOYENT_LOCATIONS[key]
}
# this can be enabled when the bug in the joyent get data centers call is
# corrected, currently only the European dc (new api) returns the correct
# values
# ret = {}
# rcode, datacenters = query(
# command='my/datacenters', location=DEFAULT_LOCATION, method='GET'
# )
# if rcode in VALID_RESPONSE_CODES and isinstance(datacenters, dict):
# for key in datacenters:
# ret[key] = {
# 'name': key,
# 'url': datacenters[key]
# }
return ret
def has_method(obj, method_name):
'''
Find if the provided object has a specific method
'''
if method_name in dir(obj):
return True
log.error(
'Method {0!r} not yet supported!'.format(
method_name
)
)
return False
def key_list(items=None):
'''
convert list to dictionary using the key as the identifier
:param items: array to iterate over
:return: dictionary
'''
if items is None:
items = []
ret = {}
if items and isinstance(items, list):
for item in items:
if 'name' in item:
# added for consistency with old code
if 'id' not in item:
item['id'] = item['name']
ret[item['name']] = item
return ret
def get_node(name):
'''
gets the node from the full node list by name
:param name: name of the vm
:return: node object
'''
nodes = list_nodes()
if name in nodes:
return nodes[name]
return None
def show_instance(name, call=None):
'''
get details about a machine
:param name: name given to the machine
:param call: call value in this case is 'action'
:return: machine information
CLI Example:
.. code-block:: bash
salt-cloud -a show_instance vm_name
'''
node = get_node(name)
ret = query(command='my/machines/{0}'.format(node['id']),
location=node['location'], method='GET')
return ret
def joyent_node_state(id_):
'''
Convert joyent returned state to state common to other data center return
values for consistency
:param id_: joyent state value
:return: state value
'''
states = {'running': 0,
'stopped': 2,
'stopping': 2,
'provisioning': 3,
'deleted': 2,
'unknown': 4}
if id_ not in states:
id_ = 'unknown'
return node_state(states[id_])
def reformat_node(item=None, full=False):
'''
Reformat the returned data from joyent, determine public/private IPs and
strip out fields if necessary to provide either full or brief content.
:param item: node dictionary
:param full: full or brief output
:return: dict
'''
desired_keys = [
'id', 'name', 'state', 'public_ips', 'private_ips', 'size', 'image',
'location'
]
item['private_ips'] = []
item['public_ips'] = []
if 'ips' in item:
for ip in item['ips']:
if is_public_ip(ip):
item['public_ips'].append(ip)
else:
item['private_ips'].append(ip)
# add any undefined desired keys
for key in desired_keys:
if key not in item:
item[key] = None
# remove all the extra key value pairs to provide a brief listing
if not full:
for key in six.iterkeys(item): # iterate over a copy of the keys
if key not in desired_keys:
del item[key]
if 'state' in item:
item['state'] = joyent_node_state(item['state'])
return item
def list_nodes(full=False, call=None):
'''
list of nodes, keeping only a brief listing
CLI Example:
.. code-block:: bash
salt-cloud -Q
'''
if call == 'action':
raise SaltCloudSystemExit(
'The list_nodes function must be called with -f or --function.'
)
ret = {}
if POLL_ALL_LOCATIONS:
for location in JOYENT_LOCATIONS:
result = query(command='my/machines', location=location,
method='GET')
nodes = result[1]
for node in nodes:
if 'name' in node:
node['location'] = location
ret[node['name']] = reformat_node(item=node, full=full)
else:
result = query(command='my/machines', location=DEFAULT_LOCATION,
method='GET')
nodes = result[1]
for node in nodes:
if 'name' in node:
node['location'] = DEFAULT_LOCATION
ret[node['name']] = reformat_node(item=node, full=full)
return ret
def list_nodes_full(call=None):
'''
list of nodes, maintaining all content provided from joyent listings
CLI Example:
.. code-block:: bash
salt-cloud -F
'''
if call == 'action':
raise SaltCloudSystemExit(
'The list_nodes_full function must be called with -f or --function.'
)
return list_nodes(full=True)
def list_nodes_select(call=None):
'''
Return a list of the VMs that are on the provider, with select fields
'''
return salt.utils.cloud.list_nodes_select(
list_nodes_full('function'), __opts__['query.selection'], call,
)
def _get_proto():
'''
Checks configuration to see whether the user has SSL turned on. Default is:
.. code-block:: yaml
use_ssl: True
'''
use_ssl = config.get_cloud_config_value(
'use_ssl',
get_configured_provider(),
__opts__,
search_global=False,
default=True
)
if use_ssl is True:
return 'https'
return 'http'
def avail_images(call=None):
'''
Get list of available images
CLI Example:
.. code-block:: bash
salt-cloud --list-images
Can use a custom URL for images. Default is:
.. code-block:: yaml
image_url: images.joyent.com/image
'''
if call == 'action':
raise SaltCloudSystemExit(
'The avail_images function must be called with '
'-f or --function, or with the --list-images option'
)
user = config.get_cloud_config_value(
'user', get_configured_provider(), __opts__, search_global=False
)
img_url = config.get_cloud_config_value(
'image_url',
get_configured_provider(),
__opts__,
search_global=False,
default='{0}{1}/{2}/images'.format(DEFAULT_LOCATION, JOYENT_API_HOST_SUFFIX, user)
)
if not img_url.startswith('http://') and not img_url.startswith('https://'):
img_url = '{0}://{1}'.format(_get_proto(), img_url)
rcode, data = query(command='my/images', method='GET')
log.debug(data)
ret = {}
for image in data:
ret[image['name']] = image
return ret
def avail_sizes(call=None):
'''
get list of available packages
CLI Example:
.. code-block:: bash
salt-cloud --list-sizes
'''
if call == 'action':
raise SaltCloudSystemExit(
'The avail_sizes function must be called with '
'-f or --function, or with the --list-sizes option'
)
rcode, items = query(command='/my/packages')
if rcode not in VALID_RESPONSE_CODES:
return {}
return key_list(items=items)
def list_keys(kwargs=None, call=None):
'''
List the keys available
'''
if call != 'function':
log.error(
'The list_keys function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
ret = {}
rcode, data = query(command='my/keys', method='GET')
for pair in data:
ret[pair['name']] = pair['key']
return {'keys': ret}
def show_key(kwargs=None, call=None):
'''
List the keys available
'''
if call != 'function':
log.error(
'The list_keys function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
if 'keyname' not in kwargs:
log.error('A keyname is required.')
return False
rcode, data = query(
command='my/keys/{0}'.format(kwargs['keyname']),
method='GET',
)
return {'keys': {data['name']: data['key']}}
def import_key(kwargs=None, call=None):
'''
List the keys available
CLI Example:
.. code-block:: bash
salt-cloud -f import_key joyent keyname=mykey keyfile=/tmp/mykey.pub
'''
if call != 'function':
log.error(
'The import_key function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
if 'keyname' not in kwargs:
log.error('A keyname is required.')
return False
if 'keyfile' not in kwargs:
log.error('The location of the SSH keyfile is required.')
return False
if not os.path.isfile(kwargs['keyfile']):
log.error('The specified keyfile ({0}) does not exist.'.format(
kwargs['keyfile']
))
return False
with salt.utils.fopen(kwargs['keyfile'], 'r') as fp_:
kwargs['key'] = fp_.read()
send_data = {'name': kwargs['keyname'], 'key': kwargs['key']}
kwargs['data'] = json.dumps(send_data)
rcode, data = query(
command='my/keys',
method='POST',
data=kwargs['data'],
)
log.debug(pprint.pformat(data))
return {'keys': {data['name']: data['key']}}
def delete_key(kwargs=None, call=None):
'''
List the keys available
CLI Example:
.. code-block:: bash
salt-cloud -f delete_key joyent keyname=mykey
'''
if call != 'function':
log.error(
'The delete_keys function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
if 'keyname' not in kwargs:
log.error('A keyname is required.')
return False
rcode, data = query(
command='my/keys/{0}'.format(kwargs['keyname']),
method='DELETE',
)
return data
def get_location_path(location=DEFAULT_LOCATION, api_host_suffix=JOYENT_API_HOST_SUFFIX):
'''
create url from location variable
:param location: joyent data center location
:return: url
'''
return '{0}://{1}{2}'.format(_get_proto(), location, api_host_suffix)
def query(action=None,
command=None,
args=None,
method='GET',
location=None,
data=None):
'''
Make a web call to Joyent
'''
user = config.get_cloud_config_value(
'user', get_configured_provider(), __opts__, search_global=False
)
password = config.get_cloud_config_value(
'password', get_configured_provider(), __opts__,
search_global=False
)
verify_ssl = config.get_cloud_config_value(
'verify_ssl', get_configured_provider(), __opts__,
search_global=False, default=True
)
ssh_keyfile = config.get_cloud_config_value(
'private_key', get_configured_provider(), __opts__,
search_global=False, default=True
)
ssh_keyname = config.get_cloud_config_value(
'keyname', get_configured_provider(), __opts__,
search_global=False, default=True
)
if not location:
location = get_location()
api_host_suffix = config.get_cloud_config_value(
'api_host_suffix', get_configured_provider(), __opts__,
search_global=False, default=JOYENT_API_HOST_SUFFIX
)
path = get_location_path(location=location, api_host_suffix=api_host_suffix)
if action:
path += action
if command:
path += '/{0}'.format(command)
log.debug('User: {0!r} on PATH: {1}'.format(user, path))
timenow = datetime.datetime.utcnow()
timestamp = timenow.strftime('%a, %d %b %Y %H:%M:%S %Z').strip()
with salt.utils.fopen(ssh_keyfile, 'r') as kh_:
rsa_key = RSA.importKey(kh_)
rsa_ = PKCS1_v1_5.new(rsa_key)
hash_ = SHA256.new()
hash_.update(timestamp)
signed = base64.b64encode(rsa_.sign(hash_))
keyid = '/{0}/keys/{1}'.format(user, ssh_keyname)
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'X-Api-Version': JOYENT_API_VERSION,
'Date': timestamp,
'Authorization': 'Signature keyId="{0}",algorithm="rsa-sha256" {1}'.format(
keyid,
signed
),
}
if not isinstance(args, dict):
args = {}
# post form data
if not data:
data = json.dumps({})
return_content = None
result = salt.utils.http.query(
path,
method,
params=args,
header_dict=headers,
data=data,
decode=False,
text=True,
status=True,
headers=True,
verify=verify_ssl,
opts=__opts__,
)
log.debug(
'Joyent Response Status Code: {0}'.format(
result['status']
)
)
if 'Content-Length' in result['headers']:
content = result['text']
return_content = yaml.safe_load(content)
return [result['status'], return_content]
| 25.953661
| 103
| 0.595094
|
from __future__ import absolute_import
import os
import json
import logging
import base64
import pprint
import inspect
import yaml
import datetime
from Crypto.Hash import SHA256
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
import salt.ext.six as six
import salt.utils.http
import salt.utils.cloud
import salt.config as config
from salt.utils.cloud import is_public_ip
from salt.cloud.libcloudfuncs import node_state
from salt.exceptions import (
SaltCloudSystemExit,
SaltCloudExecutionFailure,
SaltCloudExecutionTimeout,
SaltCloudNotFound,
)
import salt.ext.six as six
from salt.ext.six.moves import http_client
log = logging.getLogger(__name__)
JOYENT_API_HOST_SUFFIX = '.api.joyentcloud.com'
JOYENT_API_VERSION = '~7.2'
JOYENT_LOCATIONS = {
'us-east-1': 'North Virginia, USA',
'us-west-1': 'Bay Area, California, USA',
'us-sw-1': 'Las Vegas, Nevada, USA',
'eu-ams-1': 'Amsterdam, Netherlands'
}
DEFAULT_LOCATION = 'us-east-1'
POLL_ALL_LOCATIONS = True
VALID_RESPONSE_CODES = [
http_client.OK,
http_client.ACCEPTED,
http_client.CREATED,
http_client.NO_CONTENT
]
def __virtual__():
if get_configured_provider() is False:
return False
conn = None
return True
def get_configured_provider():
return config.is_provider_configured(
__opts__,
__active_provider_name__ or 'joyent',
('user', 'password')
)
def get_image(vm_):
images = avail_images()
vm_image = config.get_cloud_config_value('image', vm_, __opts__)
if vm_image and str(vm_image) in images:
images[vm_image]['name'] = images[vm_image]['id']
return images[vm_image]
raise SaltCloudNotFound(
'The specified image, {0!r}, could not be found.'.format(vm_image)
)
def get_size(vm_):
sizes = avail_sizes()
vm_size = config.get_cloud_config_value('size', vm_, __opts__)
if not vm_size:
raise SaltCloudNotFound('No size specified for this VM.')
if vm_size and str(vm_size) in sizes:
return sizes[vm_size]
raise SaltCloudNotFound(
'The specified size, {0!r}, could not be found.'.format(vm_size)
)
def query_instance(vm_=None, call=None):
if isinstance(vm_, six.string_types) and call == 'action':
vm_ = {'name': vm_, 'provider': 'joyent'}
if call == 'function':
raise SaltCloudSystemExit(
'The query_instance action must be called with -a or --action.'
)
salt.utils.cloud.fire_event(
'event',
'querying instance',
'salt/cloud/{0}/querying'.format(vm_['name']),
transport=__opts__['transport']
)
def _query_ip_address():
data = show_instance(vm_['name'], call='action')
if not data:
log.error(
'There was an error while querying Joyent. Empty response'
)
return False
if isinstance(data, dict) and 'error' in data:
log.warn(
'There was an error in the query {0}'.format(data['error'])
)
return False
log.debug('Returned query data: {0}'.format(data))
if 'primaryIp' in data[1]:
return data[1]['primaryIp']
return None
try:
data = salt.utils.cloud.wait_for_ip(
_query_ip_address,
timeout=config.get_cloud_config_value(
'wait_for_ip_timeout', vm_, __opts__, default=10 * 60),
interval=config.get_cloud_config_value(
'wait_for_ip_interval', vm_, __opts__, default=10),
interval_multiplier=config.get_cloud_config_value(
'wait_for_ip_interval_multiplier', vm_, __opts__, default=1),
)
except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc:
try:
pass
#destroy(vm_['name'])
except SaltCloudSystemExit:
pass
finally:
raise SaltCloudSystemExit(str(exc))
return data
def create(vm_):
key_filename = config.get_cloud_config_value(
'private_key', vm_, __opts__, search_global=False, default=None
)
salt.utils.cloud.fire_event(
'event',
'starting create',
'salt/cloud/{0}/creating'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['provider'],
},
transport=__opts__['transport']
)
log.info(
'Creating Cloud VM {0} in {1}'.format(
vm_['name'],
vm_.get('location', DEFAULT_LOCATION)
)
)
# added . for fqdn hostnames
salt.utils.cloud.check_name(vm_['name'], 'a-zA-Z0-9-.')
kwargs = {
'name': vm_['name'],
'image': get_image(vm_),
'size': get_size(vm_),
'location': vm_.get('location', DEFAULT_LOCATION)
}
salt.utils.cloud.fire_event(
'event',
'requesting instance',
'salt/cloud/{0}/requesting'.format(vm_['name']),
{'kwargs': kwargs},
transport=__opts__['transport']
)
try:
data = create_node(**kwargs)
except Exception as exc:
log.error(
'Error creating {0} on JOYENT\n\n'
'The following exception was thrown when trying to '
'run the initial deployment: \n{1}'.format(
vm_['name'], str(exc)
),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return False
query_instance(vm_)
data = show_instance(vm_['name'], call='action')
vm_['key_filename'] = key_filename
vm_['ssh_host'] = data[1]['primaryIp']
salt.utils.cloud.bootstrap(vm_, __opts__)
salt.utils.cloud.fire_event(
'event',
'created instance',
'salt/cloud/{0}/created'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['provider'],
},
transport=__opts__['transport']
)
return data[1]
def create_node(**kwargs):
name = kwargs['name']
size = kwargs['size']
image = kwargs['image']
location = kwargs['location']
data = json.dumps({
'name': name,
'package': size['name'],
'image': image['name']
})
try:
ret = query(command='/my/machines', data=data, method='POST',
location=location)
if ret[0] in VALID_RESPONSE_CODES:
return ret[1]
except Exception as exc:
log.error(
'Failed to create node {0}: {1}'.format(name, exc)
)
return {}
def destroy(name, call=None):
if call == 'function':
raise SaltCloudSystemExit(
'The destroy action must be called with -d, --destroy, '
'-a or --action.'
)
salt.utils.cloud.fire_event(
'event',
'destroying instance',
'salt/cloud/{0}/destroying'.format(name),
{'name': name},
transport=__opts__['transport']
)
node = get_node(name)
ret = query(command='my/machines/{0}'.format(node['id']),
location=node['location'], method='DELETE')
salt.utils.cloud.fire_event(
'event',
'destroyed instance',
'salt/cloud/{0}/destroyed'.format(name),
{'name': name},
transport=__opts__['transport']
)
if __opts__.get('update_cachedir', False) is True:
salt.utils.cloud.delete_minion_cachedir(name, __active_provider_name__.split(':')[0], __opts__)
return ret[0] in VALID_RESPONSE_CODES
def reboot(name, call=None):
node = get_node(name)
ret = take_action(name=name, call=call, method='POST',
command='/my/machines/{0}'.format(node['id']),
location=node['location'], data={'action': 'reboot'})
return ret[0] in VALID_RESPONSE_CODES
def stop(name, call=None):
node = get_node(name)
ret = take_action(name=name, call=call, method='POST',
command='/my/machines/{0}'.format(node['id']),
location=node['location'], data={'action': 'stop'})
return ret[0] in VALID_RESPONSE_CODES
def start(name, call=None):
node = get_node(name)
ret = take_action(name=name, call=call, method='POST',
command='/my/machines/{0}'.format(node['id']),
location=node['location'], data={'action': 'start'})
return ret[0] in VALID_RESPONSE_CODES
def take_action(name=None, call=None, command=None, data=None, method='GET',
location=DEFAULT_LOCATION):
caller = inspect.stack()[1][3]
if call != 'action':
raise SaltCloudSystemExit(
'This action must be called with -a or --action.'
)
if data:
data = json.dumps(data)
ret = []
try:
ret = query(command=command, data=data, method=method,
location=location)
log.info('Success {0} for node {1}'.format(caller, name))
except Exception as exc:
if 'InvalidState' in str(exc):
ret = [200, {}]
else:
log.error(
'Failed to invoke {0} node {1}: {2}'.format(caller, name, exc),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
ret = [100, {}]
return ret
def ssh_interface(vm_):
return config.get_cloud_config_value(
'ssh_interface', vm_, __opts__, default='public_ips',
search_global=False
)
def get_location(vm_=None):
return __opts__.get(
'location',
config.get_cloud_config_value(
'location',
vm_ or get_configured_provider(),
__opts__,
default=DEFAULT_LOCATION,
search_global=False
)
)
def avail_locations(call=None):
if call == 'action':
raise SaltCloudSystemExit(
'The avail_locations function must be called with '
'-f or --function, or with the --list-locations option'
)
ret = {}
for key in JOYENT_LOCATIONS:
ret[key] = {
'name': key,
'region': JOYENT_LOCATIONS[key]
}
# this can be enabled when the bug in the joyent get data centers call is
# corrected, currently only the European dc (new api) returns the correct
# values
# ret = {}
# rcode, datacenters = query(
# command='my/datacenters', location=DEFAULT_LOCATION, method='GET'
# )
# if rcode in VALID_RESPONSE_CODES and isinstance(datacenters, dict):
# for key in datacenters:
# ret[key] = {
# 'name': key,
# 'url': datacenters[key]
# }
return ret
def has_method(obj, method_name):
if method_name in dir(obj):
return True
log.error(
'Method {0!r} not yet supported!'.format(
method_name
)
)
return False
def key_list(items=None):
if items is None:
items = []
ret = {}
if items and isinstance(items, list):
for item in items:
if 'name' in item:
# added for consistency with old code
if 'id' not in item:
item['id'] = item['name']
ret[item['name']] = item
return ret
def get_node(name):
nodes = list_nodes()
if name in nodes:
return nodes[name]
return None
def show_instance(name, call=None):
node = get_node(name)
ret = query(command='my/machines/{0}'.format(node['id']),
location=node['location'], method='GET')
return ret
def joyent_node_state(id_):
states = {'running': 0,
'stopped': 2,
'stopping': 2,
'provisioning': 3,
'deleted': 2,
'unknown': 4}
if id_ not in states:
id_ = 'unknown'
return node_state(states[id_])
def reformat_node(item=None, full=False):
desired_keys = [
'id', 'name', 'state', 'public_ips', 'private_ips', 'size', 'image',
'location'
]
item['private_ips'] = []
item['public_ips'] = []
if 'ips' in item:
for ip in item['ips']:
if is_public_ip(ip):
item['public_ips'].append(ip)
else:
item['private_ips'].append(ip)
# add any undefined desired keys
for key in desired_keys:
if key not in item:
item[key] = None
# remove all the extra key value pairs to provide a brief listing
if not full:
for key in six.iterkeys(item): # iterate over a copy of the keys
if key not in desired_keys:
del item[key]
if 'state' in item:
item['state'] = joyent_node_state(item['state'])
return item
def list_nodes(full=False, call=None):
if call == 'action':
raise SaltCloudSystemExit(
'The list_nodes function must be called with -f or --function.'
)
ret = {}
if POLL_ALL_LOCATIONS:
for location in JOYENT_LOCATIONS:
result = query(command='my/machines', location=location,
method='GET')
nodes = result[1]
for node in nodes:
if 'name' in node:
node['location'] = location
ret[node['name']] = reformat_node(item=node, full=full)
else:
result = query(command='my/machines', location=DEFAULT_LOCATION,
method='GET')
nodes = result[1]
for node in nodes:
if 'name' in node:
node['location'] = DEFAULT_LOCATION
ret[node['name']] = reformat_node(item=node, full=full)
return ret
def list_nodes_full(call=None):
if call == 'action':
raise SaltCloudSystemExit(
'The list_nodes_full function must be called with -f or --function.'
)
return list_nodes(full=True)
def list_nodes_select(call=None):
return salt.utils.cloud.list_nodes_select(
list_nodes_full('function'), __opts__['query.selection'], call,
)
def _get_proto():
use_ssl = config.get_cloud_config_value(
'use_ssl',
get_configured_provider(),
__opts__,
search_global=False,
default=True
)
if use_ssl is True:
return 'https'
return 'http'
def avail_images(call=None):
if call == 'action':
raise SaltCloudSystemExit(
'The avail_images function must be called with '
'-f or --function, or with the --list-images option'
)
user = config.get_cloud_config_value(
'user', get_configured_provider(), __opts__, search_global=False
)
img_url = config.get_cloud_config_value(
'image_url',
get_configured_provider(),
__opts__,
search_global=False,
default='{0}{1}/{2}/images'.format(DEFAULT_LOCATION, JOYENT_API_HOST_SUFFIX, user)
)
if not img_url.startswith('http://') and not img_url.startswith('https://'):
img_url = '{0}://{1}'.format(_get_proto(), img_url)
rcode, data = query(command='my/images', method='GET')
log.debug(data)
ret = {}
for image in data:
ret[image['name']] = image
return ret
def avail_sizes(call=None):
if call == 'action':
raise SaltCloudSystemExit(
'The avail_sizes function must be called with '
'-f or --function, or with the --list-sizes option'
)
rcode, items = query(command='/my/packages')
if rcode not in VALID_RESPONSE_CODES:
return {}
return key_list(items=items)
def list_keys(kwargs=None, call=None):
if call != 'function':
log.error(
'The list_keys function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
ret = {}
rcode, data = query(command='my/keys', method='GET')
for pair in data:
ret[pair['name']] = pair['key']
return {'keys': ret}
def show_key(kwargs=None, call=None):
if call != 'function':
log.error(
'The list_keys function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
if 'keyname' not in kwargs:
log.error('A keyname is required.')
return False
rcode, data = query(
command='my/keys/{0}'.format(kwargs['keyname']),
method='GET',
)
return {'keys': {data['name']: data['key']}}
def import_key(kwargs=None, call=None):
if call != 'function':
log.error(
'The import_key function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
if 'keyname' not in kwargs:
log.error('A keyname is required.')
return False
if 'keyfile' not in kwargs:
log.error('The location of the SSH keyfile is required.')
return False
if not os.path.isfile(kwargs['keyfile']):
log.error('The specified keyfile ({0}) does not exist.'.format(
kwargs['keyfile']
))
return False
with salt.utils.fopen(kwargs['keyfile'], 'r') as fp_:
kwargs['key'] = fp_.read()
send_data = {'name': kwargs['keyname'], 'key': kwargs['key']}
kwargs['data'] = json.dumps(send_data)
rcode, data = query(
command='my/keys',
method='POST',
data=kwargs['data'],
)
log.debug(pprint.pformat(data))
return {'keys': {data['name']: data['key']}}
def delete_key(kwargs=None, call=None):
if call != 'function':
log.error(
'The delete_keys function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
if 'keyname' not in kwargs:
log.error('A keyname is required.')
return False
rcode, data = query(
command='my/keys/{0}'.format(kwargs['keyname']),
method='DELETE',
)
return data
def get_location_path(location=DEFAULT_LOCATION, api_host_suffix=JOYENT_API_HOST_SUFFIX):
return '{0}://{1}{2}'.format(_get_proto(), location, api_host_suffix)
def query(action=None,
command=None,
args=None,
method='GET',
location=None,
data=None):
user = config.get_cloud_config_value(
'user', get_configured_provider(), __opts__, search_global=False
)
password = config.get_cloud_config_value(
'password', get_configured_provider(), __opts__,
search_global=False
)
verify_ssl = config.get_cloud_config_value(
'verify_ssl', get_configured_provider(), __opts__,
search_global=False, default=True
)
ssh_keyfile = config.get_cloud_config_value(
'private_key', get_configured_provider(), __opts__,
search_global=False, default=True
)
ssh_keyname = config.get_cloud_config_value(
'keyname', get_configured_provider(), __opts__,
search_global=False, default=True
)
if not location:
location = get_location()
api_host_suffix = config.get_cloud_config_value(
'api_host_suffix', get_configured_provider(), __opts__,
search_global=False, default=JOYENT_API_HOST_SUFFIX
)
path = get_location_path(location=location, api_host_suffix=api_host_suffix)
if action:
path += action
if command:
path += '/{0}'.format(command)
log.debug('User: {0!r} on PATH: {1}'.format(user, path))
timenow = datetime.datetime.utcnow()
timestamp = timenow.strftime('%a, %d %b %Y %H:%M:%S %Z').strip()
with salt.utils.fopen(ssh_keyfile, 'r') as kh_:
rsa_key = RSA.importKey(kh_)
rsa_ = PKCS1_v1_5.new(rsa_key)
hash_ = SHA256.new()
hash_.update(timestamp)
signed = base64.b64encode(rsa_.sign(hash_))
keyid = '/{0}/keys/{1}'.format(user, ssh_keyname)
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'X-Api-Version': JOYENT_API_VERSION,
'Date': timestamp,
'Authorization': 'Signature keyId="{0}",algorithm="rsa-sha256" {1}'.format(
keyid,
signed
),
}
if not isinstance(args, dict):
args = {}
# post form data
if not data:
data = json.dumps({})
return_content = None
result = salt.utils.http.query(
path,
method,
params=args,
header_dict=headers,
data=data,
decode=False,
text=True,
status=True,
headers=True,
verify=verify_ssl,
opts=__opts__,
)
log.debug(
'Joyent Response Status Code: {0}'.format(
result['status']
)
)
if 'Content-Length' in result['headers']:
content = result['text']
return_content = yaml.safe_load(content)
return [result['status'], return_content]
| true
| true
|
1c41e8d446e7e12112fc42785b64eb62c8407fb0
| 3,705
|
py
|
Python
|
test/test_special_iocommands.py
|
LinusCDE/okcli
|
e52e1a7d5272f0d4b0cdeaa66af9d4a4de953319
|
[
"BSD-3-Clause"
] | 29
|
2020-01-19T10:36:36.000Z
|
2022-03-10T15:17:43.000Z
|
test/test_special_iocommands.py
|
LinusCDE/okcli
|
e52e1a7d5272f0d4b0cdeaa66af9d4a4de953319
|
[
"BSD-3-Clause"
] | 4
|
2019-11-10T04:03:42.000Z
|
2021-03-26T03:14:37.000Z
|
test/test_special_iocommands.py
|
LinusCDE/okcli
|
e52e1a7d5272f0d4b0cdeaa66af9d4a4de953319
|
[
"BSD-3-Clause"
] | 4
|
2019-11-08T13:59:05.000Z
|
2022-02-09T22:47:20.000Z
|
# coding: utf-8
import os
import stat
import tempfile
import pytest
import okcli.packages.special
from okcli.packages.special.main import CommandNotFound
from utils import db_connection, dbtest
def test_set_get_pager():
okcli.packages.special.set_pager_enabled(True)
assert okcli.packages.special.is_pager_enabled()
okcli.packages.special.set_pager_enabled(False)
assert not okcli.packages.special.is_pager_enabled()
okcli.packages.special.set_pager('less')
assert os.environ['PAGER'] == "less"
okcli.packages.special.set_pager(False)
assert os.environ['PAGER'] == "less"
del os.environ['PAGER']
okcli.packages.special.set_pager(False)
okcli.packages.special.disable_pager()
assert not okcli.packages.special.is_pager_enabled()
def test_set_get_timing():
okcli.packages.special.set_timing_enabled(True)
assert okcli.packages.special.is_timing_enabled()
okcli.packages.special.set_timing_enabled(False)
assert not okcli.packages.special.is_timing_enabled()
def test_set_get_expanded_output():
okcli.packages.special.set_expanded_output(True)
assert okcli.packages.special.is_expanded_output()
okcli.packages.special.set_expanded_output(False)
assert not okcli.packages.special.is_expanded_output()
def test_editor_command():
assert okcli.packages.special.editor_command(r'ed hello')
assert not okcli.packages.special.editor_command(r'hello')
assert okcli.packages.special.get_filename(r'ed filename') == "filename"
os.environ['EDITOR'] = 'true'
okcli.packages.special.open_external_editor(r'select 1') == "select 1"
def test_spool_command():
okcli.packages.special.write_tee(u"hello world") # write without file set
with tempfile.NamedTemporaryFile() as f:
okcli.packages.special.execute(None, u"spool " + f.name)
okcli.packages.special.write_tee(u"hello world")
assert f.read() == b"hello world"
okcli.packages.special.execute(None, u"spool -o " + f.name)
okcli.packages.special.write_tee(u"hello world")
f.seek(0)
assert f.read() == b"hello world"
okcli.packages.special.execute(None, u"nospool")
okcli.packages.special.write_tee(u"hello world")
f.seek(0)
assert f.read() == b"hello world"
def test_tee_command_error():
with pytest.raises(TypeError):
okcli.packages.special.execute(None, 'tee')
with pytest.raises(OSError):
with tempfile.NamedTemporaryFile() as f:
os.chmod(f.name, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
okcli.packages.special.execute(None, 'tee {}'.format(f.name))
@dbtest
def test_favorite_query():
with db_connection().cursor() as cur:
query = u'select "✔"'
okcli.packages.special.execute(cur, u'\\fs check {0}'.format(query))
assert next(okcli.packages.special.execute(
cur, u'\\f check'))[0] == "> " + query
def test_once_command():
with pytest.raises(TypeError):
okcli.packages.special.execute(None, u"\once")
okcli.packages.special.execute(None, u"\once /proc/access-denied")
with pytest.raises(OSError):
okcli.packages.special.write_once(u"hello world")
okcli.packages.special.write_once(u"hello world") # write without file set
with tempfile.NamedTemporaryFile() as f:
okcli.packages.special.execute(None, u"\once " + f.name)
okcli.packages.special.write_once(u"hello world")
assert f.read() == b"hello world\n"
okcli.packages.special.execute(None, u"\once -o " + f.name)
okcli.packages.special.write_once(u"hello world")
f.seek(0)
assert f.read() == b"hello world\n"
| 34.305556
| 79
| 0.700675
|
import os
import stat
import tempfile
import pytest
import okcli.packages.special
from okcli.packages.special.main import CommandNotFound
from utils import db_connection, dbtest
def test_set_get_pager():
okcli.packages.special.set_pager_enabled(True)
assert okcli.packages.special.is_pager_enabled()
okcli.packages.special.set_pager_enabled(False)
assert not okcli.packages.special.is_pager_enabled()
okcli.packages.special.set_pager('less')
assert os.environ['PAGER'] == "less"
okcli.packages.special.set_pager(False)
assert os.environ['PAGER'] == "less"
del os.environ['PAGER']
okcli.packages.special.set_pager(False)
okcli.packages.special.disable_pager()
assert not okcli.packages.special.is_pager_enabled()
def test_set_get_timing():
okcli.packages.special.set_timing_enabled(True)
assert okcli.packages.special.is_timing_enabled()
okcli.packages.special.set_timing_enabled(False)
assert not okcli.packages.special.is_timing_enabled()
def test_set_get_expanded_output():
okcli.packages.special.set_expanded_output(True)
assert okcli.packages.special.is_expanded_output()
okcli.packages.special.set_expanded_output(False)
assert not okcli.packages.special.is_expanded_output()
def test_editor_command():
assert okcli.packages.special.editor_command(r'ed hello')
assert not okcli.packages.special.editor_command(r'hello')
assert okcli.packages.special.get_filename(r'ed filename') == "filename"
os.environ['EDITOR'] = 'true'
okcli.packages.special.open_external_editor(r'select 1') == "select 1"
def test_spool_command():
okcli.packages.special.write_tee(u"hello world")
with tempfile.NamedTemporaryFile() as f:
okcli.packages.special.execute(None, u"spool " + f.name)
okcli.packages.special.write_tee(u"hello world")
assert f.read() == b"hello world"
okcli.packages.special.execute(None, u"spool -o " + f.name)
okcli.packages.special.write_tee(u"hello world")
f.seek(0)
assert f.read() == b"hello world"
okcli.packages.special.execute(None, u"nospool")
okcli.packages.special.write_tee(u"hello world")
f.seek(0)
assert f.read() == b"hello world"
def test_tee_command_error():
with pytest.raises(TypeError):
okcli.packages.special.execute(None, 'tee')
with pytest.raises(OSError):
with tempfile.NamedTemporaryFile() as f:
os.chmod(f.name, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
okcli.packages.special.execute(None, 'tee {}'.format(f.name))
@dbtest
def test_favorite_query():
with db_connection().cursor() as cur:
query = u'select "✔"'
okcli.packages.special.execute(cur, u'\\fs check {0}'.format(query))
assert next(okcli.packages.special.execute(
cur, u'\\f check'))[0] == "> " + query
def test_once_command():
with pytest.raises(TypeError):
okcli.packages.special.execute(None, u"\once")
okcli.packages.special.execute(None, u"\once /proc/access-denied")
with pytest.raises(OSError):
okcli.packages.special.write_once(u"hello world")
okcli.packages.special.write_once(u"hello world")
with tempfile.NamedTemporaryFile() as f:
okcli.packages.special.execute(None, u"\once " + f.name)
okcli.packages.special.write_once(u"hello world")
assert f.read() == b"hello world\n"
okcli.packages.special.execute(None, u"\once -o " + f.name)
okcli.packages.special.write_once(u"hello world")
f.seek(0)
assert f.read() == b"hello world\n"
| true
| true
|
1c41e93c89e0bc46ad2a5fa43da6421ec55bfee9
| 536
|
py
|
Python
|
recipes/libtasn1/all/test_package/conanfile.py
|
rockandsalt/conan-center-index
|
d739adcec3e4dd4c250eff559ceb738e420673dd
|
[
"MIT"
] | 562
|
2019-09-04T12:23:43.000Z
|
2022-03-29T16:41:43.000Z
|
recipes/libtasn1/all/test_package/conanfile.py
|
rockandsalt/conan-center-index
|
d739adcec3e4dd4c250eff559ceb738e420673dd
|
[
"MIT"
] | 9,799
|
2019-09-04T12:02:11.000Z
|
2022-03-31T23:55:45.000Z
|
recipes/libtasn1/all/test_package/conanfile.py
|
rockandsalt/conan-center-index
|
d739adcec3e4dd4c250eff559ceb738e420673dd
|
[
"MIT"
] | 1,126
|
2019-09-04T11:57:46.000Z
|
2022-03-31T16:43:38.000Z
|
from conans import ConanFile, CMake, tools
import os
class TestPackageConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def test(self):
if not tools.cross_building(self.settings):
bin_path = os.path.join("bin", "test_package")
asn = os.path.join(self.source_folder, "pkix.asn")
self.run("{} {}".format(bin_path, asn), run_environment=True)
| 28.210526
| 73
| 0.615672
|
from conans import ConanFile, CMake, tools
import os
class TestPackageConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def test(self):
if not tools.cross_building(self.settings):
bin_path = os.path.join("bin", "test_package")
asn = os.path.join(self.source_folder, "pkix.asn")
self.run("{} {}".format(bin_path, asn), run_environment=True)
| true
| true
|
1c41eaa03ba7d884eb4c6be59cf06a01ad694350
| 126
|
py
|
Python
|
setup.py
|
gditzler/ProtoShotXAI-1
|
223d3da37397d35096238bbf49f6315ac8c2894c
|
[
"MIT"
] | 1
|
2021-11-17T08:43:50.000Z
|
2021-11-17T08:43:50.000Z
|
setup.py
|
gditzler/ProtoShotXAI-1
|
223d3da37397d35096238bbf49f6315ac8c2894c
|
[
"MIT"
] | null | null | null |
setup.py
|
gditzler/ProtoShotXAI-1
|
223d3da37397d35096238bbf49f6315ac8c2894c
|
[
"MIT"
] | 2
|
2021-12-02T23:35:38.000Z
|
2021-12-02T23:35:53.000Z
|
from setuptools import setup, find_packages
setup(
name='ProtoShotXAI',
version='1.0',
packages=find_packages()
)
| 18
| 43
| 0.706349
|
from setuptools import setup, find_packages
setup(
name='ProtoShotXAI',
version='1.0',
packages=find_packages()
)
| true
| true
|
1c41eb030bae28e89f34356c9f0d71679d6a4f15
| 22,940
|
py
|
Python
|
tests/integration/modules/test_pip.py
|
springborland/salt
|
bee85e477d57e9a171884e54fefb9a59d0835ed0
|
[
"Apache-2.0"
] | 1
|
2020-04-09T03:25:10.000Z
|
2020-04-09T03:25:10.000Z
|
tests/integration/modules/test_pip.py
|
springborland/salt
|
bee85e477d57e9a171884e54fefb9a59d0835ed0
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/modules/test_pip.py
|
springborland/salt
|
bee85e477d57e9a171884e54fefb9a59d0835ed0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
tests.integration.modules.pip
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
import os
import pprint
import re
import shutil
import sys
import tempfile
# Import salt libs
import salt.utils.files
import salt.utils.path
import salt.utils.platform
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
from tests.support.case import ModuleCase
from tests.support.helpers import patched_environ
# Import Salt Testing libs
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import skipIf
@skipIf(
salt.utils.path.which_bin(KNOWN_BINARY_NAMES) is None, "virtualenv not installed"
)
class PipModuleTest(ModuleCase):
def setUp(self):
super(PipModuleTest, self).setUp()
self.venv_test_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
# Remove the venv test directory
self.addCleanup(shutil.rmtree, self.venv_test_dir, ignore_errors=True)
self.venv_dir = os.path.join(self.venv_test_dir, "venv")
self.pip_temp = os.path.join(self.venv_test_dir, ".pip-temp")
if not os.path.isdir(self.pip_temp):
os.makedirs(self.pip_temp)
self.patched_environ = patched_environ(
PIP_SOURCE_DIR="",
PIP_BUILD_DIR="",
__cleanup__=[k for k in os.environ if k.startswith("PIP_")],
)
self.patched_environ.__enter__()
self.addCleanup(self.patched_environ.__exit__)
def _create_virtualenv(self, path):
"""
The reason why the virtualenv creation is proxied by this function is mostly
because under windows, we can't seem to properly create a virtualenv off of
another virtualenv(we can on linux) and also because, we really don't want to
test virtualenv creation off of another virtualenv, we want a virtualenv created
from the original python.
Also, one windows, we must also point to the virtualenv binary outside the existing
virtualenv because it will fail otherwise
"""
try:
if salt.utils.platform.is_windows():
python = os.path.join(sys.real_prefix, os.path.basename(sys.executable))
else:
python_binary_names = [
"python{}.{}".format(*sys.version_info),
"python{}".format(*sys.version_info),
"python",
]
for binary_name in python_binary_names:
python = os.path.join(sys.real_prefix, "bin", binary_name)
if os.path.exists(python):
break
else:
self.fail(
"Couldn't find a python binary name under '{}' matching: {}".format(
os.path.join(sys.real_prefix, "bin"), python_binary_names
)
)
# We're running off a virtualenv, and we don't want to create a virtualenv off of
# a virtualenv
kwargs = {"python": python}
except AttributeError:
# We're running off of the system python
kwargs = {}
self.run_function("virtualenv.create", [path], **kwargs)
def _check_download_error(self, ret):
"""
Checks to see if a download error looks transitory
"""
return any(w in ret for w in ["URLError", "Download error"])
def pip_successful_install(self, target, expect=("irc3-plugins-test", "pep8",)):
"""
isolate regex for extracting `successful install` message from pip
"""
expect = set(expect)
expect_str = "|".join(expect)
success = re.search(
r"^.*Successfully installed\s([^\n]+)(?:Clean.*)?", target, re.M | re.S
)
success_for = (
re.findall(
r"({0})(?:-(?:[\d\.-]))?".format(expect_str), success.groups()[0]
)
if success
else []
)
return expect.issubset(set(success_for))
def test_issue_2087_missing_pip(self):
# Let's create the testing virtualenv
self._create_virtualenv(self.venv_dir)
# Let's remove the pip binary
pip_bin = os.path.join(self.venv_dir, "bin", "pip")
site_dir = self.run_function(
"virtualenv.get_distribution_path", [self.venv_dir, "pip"]
)
if salt.utils.platform.is_windows():
pip_bin = os.path.join(self.venv_dir, "Scripts", "pip.exe")
site_dir = os.path.join(self.venv_dir, "lib", "site-packages")
if not os.path.isfile(pip_bin):
self.skipTest("Failed to find the pip binary to the test virtualenv")
os.remove(pip_bin)
# Also remove the pip dir from site-packages
# This is needed now that we're using python -m pip instead of the
# pip binary directly. python -m pip will still work even if the
# pip binary is missing
shutil.rmtree(os.path.join(site_dir, "pip"))
# Let's run a pip depending functions
for func in ("pip.freeze", "pip.list"):
ret = self.run_function(func, bin_env=self.venv_dir)
self.assertIn(
"Command required for '{0}' not found: "
"Could not find a `pip` binary".format(func),
ret,
)
def test_requirements_as_list_of_chains__cwd_set__absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
# Create a requirements file that depends on another one.
req1_filename = os.path.join(self.venv_dir, "requirements1.txt")
req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements1b.txt\n")
with salt.utils.files.fopen(req1b_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("-r requirements2b.txt\n")
with salt.utils.files.fopen(req2b_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install",
requirements=requirements_list,
bin_env=self.venv_dir,
cwd=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
found = self.pip_successful_install(ret["stdout"])
self.assertTrue(found)
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_requirements_as_list_of_chains__cwd_not_set__absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
# Create a requirements file that depends on another one.
req1_filename = os.path.join(self.venv_dir, "requirements1.txt")
req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements1b.txt\n")
with salt.utils.files.fopen(req1b_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("-r requirements2b.txt\n")
with salt.utils.files.fopen(req2b_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install", requirements=requirements_list, bin_env=self.venv_dir
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
found = self.pip_successful_install(ret["stdout"])
self.assertTrue(found)
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_requirements_as_list__absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install", requirements=requirements_list, bin_env=self.venv_dir
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
found = self.pip_successful_install(ret["stdout"])
self.assertTrue(found)
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_requirements_as_list__non_absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
# Create a requirements file that depends on another one.
req1_filename = "requirements.txt"
req2_filename = "requirements2.txt"
req_cwd = self.venv_dir
req1_filepath = os.path.join(req_cwd, req1_filename)
req2_filepath = os.path.join(req_cwd, req2_filename)
with salt.utils.files.fopen(req1_filepath, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filepath, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install",
requirements=requirements_list,
bin_env=self.venv_dir,
cwd=req_cwd,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
found = self.pip_successful_install(ret["stdout"])
self.assertTrue(found)
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_chained_requirements__absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
# Create a requirements file that depends on another one.
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8")
ret = self.run_function(
"pip.install", requirements=req1_filename, bin_env=self.venv_dir
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_chained_requirements__non_absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
# Create a requirements file that depends on another one.
req_basepath = self.venv_dir
req1_filename = "requirements.txt"
req2_filename = "requirements2.txt"
req1_file = os.path.join(self.venv_dir, req1_filename)
req2_file = os.path.join(self.venv_dir, req2_filename)
with salt.utils.files.fopen(req1_file, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_file, "w") as f:
f.write("pep8")
ret = self.run_function(
"pip.install",
requirements=req1_filename,
cwd=req_basepath,
bin_env=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_issue_4805_nested_requirements(self):
self._create_virtualenv(self.venv_dir)
# Create a requirements file that depends on another one.
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8")
ret = self.run_function(
"pip.install",
requirements=req1_filename,
bin_env=self.venv_dir,
timeout=300,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_pip_uninstall(self):
# Let's create the testing virtualenv
self._create_virtualenv(self.venv_dir)
ret = self.run_function("pip.install", ["pep8"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.uninstall' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
self.assertIn("uninstalled pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_pip_install_upgrade(self):
# Create the testing virtualenv
self._create_virtualenv(self.venv_dir)
ret = self.run_function("pip.install", ["pep8==1.3.4"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
ret = self.run_function(
"pip.install", ["pep8"], bin_env=self.venv_dir, upgrade=True
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.uninstall' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
self.assertIn("uninstalled pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_pip_install_multiple_editables(self):
editables = [
"git+https://github.com/jek/blinker.git#egg=Blinker",
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
]
# Create the testing virtualenv
self._create_virtualenv(self.venv_dir)
ret = self.run_function(
"pip.install",
[],
editable="{0}".format(",".join(editables)),
bin_env=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
self.assertIn("Successfully installed Blinker SaltTesting", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_pip_install_multiple_editables_and_pkgs(self):
editables = [
"git+https://github.com/jek/blinker.git#egg=Blinker",
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
]
# Create the testing virtualenv
self._create_virtualenv(self.venv_dir)
ret = self.run_function(
"pip.install",
["pep8"],
editable="{0}".format(",".join(editables)),
bin_env=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
for package in ("Blinker", "SaltTesting", "pep8"):
self.assertRegex(
ret["stdout"],
r"(?:.*)(Successfully installed)(?:.*)({0})(?:.*)".format(package),
)
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
@skipIf(not os.path.isfile("pip3"), "test where pip3 is installed")
@skipIf(
salt.utils.platform.is_windows(), "test specific for linux usage of /bin/python"
)
def test_system_pip3(self):
self.run_function(
"pip.install", pkgs=["lazyimport==0.0.1"], bin_env="/bin/pip3"
)
ret1 = self.run_function("cmd.run", "/bin/pip3 freeze | grep lazyimport")
self.run_function("pip.uninstall", pkgs=["lazyimport"], bin_env="/bin/pip3")
ret2 = self.run_function("cmd.run", "/bin/pip3 freeze | grep lazyimport")
assert "lazyimport==0.0.1" in ret1
assert ret2 == ""
| 37.179903
| 105
| 0.564037
|
from __future__ import absolute_import, print_function, unicode_literals
import os
import pprint
import re
import shutil
import sys
import tempfile
import salt.utils.files
import salt.utils.path
import salt.utils.platform
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
from tests.support.case import ModuleCase
from tests.support.helpers import patched_environ
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import skipIf
@skipIf(
salt.utils.path.which_bin(KNOWN_BINARY_NAMES) is None, "virtualenv not installed"
)
class PipModuleTest(ModuleCase):
def setUp(self):
super(PipModuleTest, self).setUp()
self.venv_test_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
self.addCleanup(shutil.rmtree, self.venv_test_dir, ignore_errors=True)
self.venv_dir = os.path.join(self.venv_test_dir, "venv")
self.pip_temp = os.path.join(self.venv_test_dir, ".pip-temp")
if not os.path.isdir(self.pip_temp):
os.makedirs(self.pip_temp)
self.patched_environ = patched_environ(
PIP_SOURCE_DIR="",
PIP_BUILD_DIR="",
__cleanup__=[k for k in os.environ if k.startswith("PIP_")],
)
self.patched_environ.__enter__()
self.addCleanup(self.patched_environ.__exit__)
def _create_virtualenv(self, path):
try:
if salt.utils.platform.is_windows():
python = os.path.join(sys.real_prefix, os.path.basename(sys.executable))
else:
python_binary_names = [
"python{}.{}".format(*sys.version_info),
"python{}".format(*sys.version_info),
"python",
]
for binary_name in python_binary_names:
python = os.path.join(sys.real_prefix, "bin", binary_name)
if os.path.exists(python):
break
else:
self.fail(
"Couldn't find a python binary name under '{}' matching: {}".format(
os.path.join(sys.real_prefix, "bin"), python_binary_names
)
)
# We're running off a virtualenv, and we don't want to create a virtualenv off of
# a virtualenv
kwargs = {"python": python}
except AttributeError:
# We're running off of the system python
kwargs = {}
self.run_function("virtualenv.create", [path], **kwargs)
def _check_download_error(self, ret):
return any(w in ret for w in ["URLError", "Download error"])
def pip_successful_install(self, target, expect=("irc3-plugins-test", "pep8",)):
expect = set(expect)
expect_str = "|".join(expect)
success = re.search(
r"^.*Successfully installed\s([^\n]+)(?:Clean.*)?", target, re.M | re.S
)
success_for = (
re.findall(
r"({0})(?:-(?:[\d\.-]))?".format(expect_str), success.groups()[0]
)
if success
else []
)
return expect.issubset(set(success_for))
def test_issue_2087_missing_pip(self):
self._create_virtualenv(self.venv_dir)
# Let's remove the pip binary
pip_bin = os.path.join(self.venv_dir, "bin", "pip")
site_dir = self.run_function(
"virtualenv.get_distribution_path", [self.venv_dir, "pip"]
)
if salt.utils.platform.is_windows():
pip_bin = os.path.join(self.venv_dir, "Scripts", "pip.exe")
site_dir = os.path.join(self.venv_dir, "lib", "site-packages")
if not os.path.isfile(pip_bin):
self.skipTest("Failed to find the pip binary to the test virtualenv")
os.remove(pip_bin)
# pip binary directly. python -m pip will still work even if the
# pip binary is missing
shutil.rmtree(os.path.join(site_dir, "pip"))
# Let's run a pip depending functions
for func in ("pip.freeze", "pip.list"):
ret = self.run_function(func, bin_env=self.venv_dir)
self.assertIn(
"Command required for '{0}' not found: "
"Could not find a `pip` binary".format(func),
ret,
)
def test_requirements_as_list_of_chains__cwd_set__absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
req1_filename = os.path.join(self.venv_dir, "requirements1.txt")
req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements1b.txt\n")
with salt.utils.files.fopen(req1b_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("-r requirements2b.txt\n")
with salt.utils.files.fopen(req2b_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install",
requirements=requirements_list,
bin_env=self.venv_dir,
cwd=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
found = self.pip_successful_install(ret["stdout"])
self.assertTrue(found)
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_requirements_as_list_of_chains__cwd_not_set__absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
req1_filename = os.path.join(self.venv_dir, "requirements1.txt")
req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements1b.txt\n")
with salt.utils.files.fopen(req1b_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("-r requirements2b.txt\n")
with salt.utils.files.fopen(req2b_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install", requirements=requirements_list, bin_env=self.venv_dir
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
found = self.pip_successful_install(ret["stdout"])
self.assertTrue(found)
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_requirements_as_list__absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install", requirements=requirements_list, bin_env=self.venv_dir
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
found = self.pip_successful_install(ret["stdout"])
self.assertTrue(found)
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_requirements_as_list__non_absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
req1_filename = "requirements.txt"
req2_filename = "requirements2.txt"
req_cwd = self.venv_dir
req1_filepath = os.path.join(req_cwd, req1_filename)
req2_filepath = os.path.join(req_cwd, req2_filename)
with salt.utils.files.fopen(req1_filepath, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filepath, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install",
requirements=requirements_list,
bin_env=self.venv_dir,
cwd=req_cwd,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
found = self.pip_successful_install(ret["stdout"])
self.assertTrue(found)
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_chained_requirements__absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8")
ret = self.run_function(
"pip.install", requirements=req1_filename, bin_env=self.venv_dir
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_chained_requirements__non_absolute_file_path(self):
self._create_virtualenv(self.venv_dir)
req_basepath = self.venv_dir
req1_filename = "requirements.txt"
req2_filename = "requirements2.txt"
req1_file = os.path.join(self.venv_dir, req1_filename)
req2_file = os.path.join(self.venv_dir, req2_filename)
with salt.utils.files.fopen(req1_file, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_file, "w") as f:
f.write("pep8")
ret = self.run_function(
"pip.install",
requirements=req1_filename,
cwd=req_basepath,
bin_env=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_issue_4805_nested_requirements(self):
self._create_virtualenv(self.venv_dir)
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8")
ret = self.run_function(
"pip.install",
requirements=req1_filename,
bin_env=self.venv_dir,
timeout=300,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_pip_uninstall(self):
self._create_virtualenv(self.venv_dir)
ret = self.run_function("pip.install", ["pep8"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.uninstall' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
self.assertIn("uninstalled pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_pip_install_upgrade(self):
# Create the testing virtualenv
self._create_virtualenv(self.venv_dir)
ret = self.run_function("pip.install", ["pep8==1.3.4"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
ret = self.run_function(
"pip.install", ["pep8"], bin_env=self.venv_dir, upgrade=True
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
self.assertIn("installed pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.uninstall' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
self.assertEqual(ret["retcode"], 0)
self.assertIn("uninstalled pep8", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_pip_install_multiple_editables(self):
editables = [
"git+https://github.com/jek/blinker.git#egg=Blinker",
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
]
# Create the testing virtualenv
self._create_virtualenv(self.venv_dir)
ret = self.run_function(
"pip.install",
[],
editable="{0}".format(",".join(editables)),
bin_env=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
self.assertIn("Successfully installed Blinker SaltTesting", ret["stdout"])
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
def test_pip_install_multiple_editables_and_pkgs(self):
editables = [
"git+https://github.com/jek/blinker.git#egg=Blinker",
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
]
# Create the testing virtualenv
self._create_virtualenv(self.venv_dir)
ret = self.run_function(
"pip.install",
["pep8"],
editable="{0}".format(",".join(editables)),
bin_env=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary. Output:\n{}".format(
ret
)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
self.assertEqual(ret["retcode"], 0)
for package in ("Blinker", "SaltTesting", "pep8"):
self.assertRegex(
ret["stdout"],
r"(?:.*)(Successfully installed)(?:.*)({0})(?:.*)".format(package),
)
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'. Dictionary: {}".format(
exc, pprint.pformat(ret)
)
)
@skipIf(not os.path.isfile("pip3"), "test where pip3 is installed")
@skipIf(
salt.utils.platform.is_windows(), "test specific for linux usage of /bin/python"
)
def test_system_pip3(self):
self.run_function(
"pip.install", pkgs=["lazyimport==0.0.1"], bin_env="/bin/pip3"
)
ret1 = self.run_function("cmd.run", "/bin/pip3 freeze | grep lazyimport")
self.run_function("pip.uninstall", pkgs=["lazyimport"], bin_env="/bin/pip3")
ret2 = self.run_function("cmd.run", "/bin/pip3 freeze | grep lazyimport")
assert "lazyimport==0.0.1" in ret1
assert ret2 == ""
| true
| true
|
1c41ebabc106d700de6df5026715d4856ee4b861
| 1,481
|
py
|
Python
|
auto_editor/interpolate.py
|
cysk003/auto-editor
|
e027d897fd5ee842deaf2db3e4fea81d909b343b
|
[
"Unlicense"
] | 1
|
2021-03-17T03:44:51.000Z
|
2021-03-17T03:44:51.000Z
|
auto_editor/interpolate.py
|
cysk003/auto-editor
|
e027d897fd5ee842deaf2db3e4fea81d909b343b
|
[
"Unlicense"
] | null | null | null |
auto_editor/interpolate.py
|
cysk003/auto-editor
|
e027d897fd5ee842deaf2db3e4fea81d909b343b
|
[
"Unlicense"
] | null | null | null |
'''interpolate.py'''
import math
def linear(x: int, y: int, n: int) -> list:
b = [x]
step = (y - x) / n
incre = x
for _ in range(n - 2):
incre += step
b.append(incre)
b.append(y)
return b
# See how these formulas are derived:
# - https://www.desmos.com/calculator/jj4tociyb4
def sine(x, y, n: int) -> list:
# slow -> fast -> slow
b = [x]
incre = 0
for _ in range(n - 2):
incre += math.pi / n
val = ((y - x)/2) * math.sin(incre - (math.pi / 2)) + ((y - x)/2) + x
b.append(val)
b.append(y)
return b
def start_sine(x, y, n: int) -> list:
# slow -> fast
b = [x]
incre = 0
for _ in range(n - 2):
incre += math.pi / n
val = ((y - x)/2) * math.sin(incre - (math.pi / 2)) + ((y - x)/2) + x
b.append(val)
b.append(y)
return b
def end_sine(x, y, n: int) -> list:
# fast -> slow
b = [x]
incre = 0
for _ in range(n - 2):
incre += (math.pi / 2) / n
val = x + math.sin(incre) * (y - x)
b.append(val)
b.append(y)
return b
def interpolate(x, y, n, log, method='linear') -> list:
if(method == 'linear'):
return linear(x, y, n)
elif(method == 'sine'):
return sine(x, y, n)
elif(method == 'start_sine'):
return start_sine(x, y, n)
elif(method == 'end_sine'):
return end_sine(x, y, n)
else:
log.error(f"Method: {method} isn't implemented.")
| 20.859155
| 77
| 0.488859
|
import math
def linear(x: int, y: int, n: int) -> list:
b = [x]
step = (y - x) / n
incre = x
for _ in range(n - 2):
incre += step
b.append(incre)
b.append(y)
return b
def sine(x, y, n: int) -> list:
b = [x]
incre = 0
for _ in range(n - 2):
incre += math.pi / n
val = ((y - x)/2) * math.sin(incre - (math.pi / 2)) + ((y - x)/2) + x
b.append(val)
b.append(y)
return b
def start_sine(x, y, n: int) -> list:
b = [x]
incre = 0
for _ in range(n - 2):
incre += math.pi / n
val = ((y - x)/2) * math.sin(incre - (math.pi / 2)) + ((y - x)/2) + x
b.append(val)
b.append(y)
return b
def end_sine(x, y, n: int) -> list:
b = [x]
incre = 0
for _ in range(n - 2):
incre += (math.pi / 2) / n
val = x + math.sin(incre) * (y - x)
b.append(val)
b.append(y)
return b
def interpolate(x, y, n, log, method='linear') -> list:
if(method == 'linear'):
return linear(x, y, n)
elif(method == 'sine'):
return sine(x, y, n)
elif(method == 'start_sine'):
return start_sine(x, y, n)
elif(method == 'end_sine'):
return end_sine(x, y, n)
else:
log.error(f"Method: {method} isn't implemented.")
| true
| true
|
1c41ec8e80bab40b9157f039fb6c7beb59ca8cd6
| 468
|
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/patches/v7_0/update_prevdoc_values_for_supplier_quotation_item.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/patches/v7_0/update_prevdoc_values_for_supplier_quotation_item.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/patches/v7_0/update_prevdoc_values_for_supplier_quotation_item.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
import frappe
def execute():
frappe.reload_doctype('Supplier Quotation Item')
for data in frappe.db.sql(""" select prevdoc_docname, prevdoc_detail_docname, name
from `tabSupplier Quotation Item` where prevdoc_docname is not null""", as_dict=True):
frappe.db.set_value("Supplier Quotation Item", data.name, "material_request", data.prevdoc_docname)
frappe.db.set_value("Supplier Quotation Item", data.name, "material_request_item", data.prevdoc_detail_docname)
| 58.5
| 113
| 0.794872
|
import frappe
def execute():
frappe.reload_doctype('Supplier Quotation Item')
for data in frappe.db.sql(""" select prevdoc_docname, prevdoc_detail_docname, name
from `tabSupplier Quotation Item` where prevdoc_docname is not null""", as_dict=True):
frappe.db.set_value("Supplier Quotation Item", data.name, "material_request", data.prevdoc_docname)
frappe.db.set_value("Supplier Quotation Item", data.name, "material_request_item", data.prevdoc_detail_docname)
| true
| true
|
1c41eddbcb665b20c36629b49fac1572c0ff3214
| 3,424
|
py
|
Python
|
elmextensions/embeddedterminal.py
|
JeffHoogland/python-elm-extensions
|
165ba2f0df38382b3e2bb7e8dbed9724633501ab
|
[
"BSD-3-Clause"
] | 8
|
2015-02-01T15:14:17.000Z
|
2018-08-12T21:57:05.000Z
|
elmextensions/embeddedterminal.py
|
JeffHoogland/python-elm-extensions
|
165ba2f0df38382b3e2bb7e8dbed9724633501ab
|
[
"BSD-3-Clause"
] | 5
|
2015-02-04T03:02:51.000Z
|
2017-08-06T16:07:52.000Z
|
elmextensions/embeddedterminal.py
|
JeffHoogland/python-elm-extensions
|
165ba2f0df38382b3e2bb7e8dbed9724633501ab
|
[
"BSD-3-Clause"
] | 6
|
2015-02-03T23:46:28.000Z
|
2020-04-22T14:25:33.000Z
|
from efl import ecore
from efl.elementary.box import Box
from efl.elementary.frame import Frame
from efl.elementary.button import Button
from efl.elementary.entry import Entry, markup_to_utf8
from efl.evas import EVAS_HINT_EXPAND, EVAS_HINT_FILL
EXPAND_BOTH = EVAS_HINT_EXPAND, EVAS_HINT_EXPAND
EXPAND_HORIZ = EVAS_HINT_EXPAND, 0.0
FILL_BOTH = EVAS_HINT_FILL, EVAS_HINT_FILL
FILL_HORIZ = EVAS_HINT_FILL, 0.5
class EmbeddedTerminal(Box):
def __init__(self, parent_widget, titles=None, *args, **kwargs):
Box.__init__(self, parent_widget, *args, **kwargs)
self.outPut = Entry(self, size_hint_weight=EXPAND_BOTH, size_hint_align=FILL_BOTH)
self.outPut.editable_set(False)
self.outPut.scrollable_set(True)
self.outPut.callback_changed_add(self.changedCb)
self.outPut.show()
frame = Frame(self, size_hint_weight=EXPAND_HORIZ, size_hint_align=FILL_HORIZ)
frame.text = "Input:"
frame.autocollapse_set(True)
frame.collapse_go(True)
frame.show()
bx = Box(self, size_hint_weight=EXPAND_HORIZ, size_hint_align=FILL_HORIZ)
bx.horizontal = True
bx.show()
frame.content = bx
self.inPut = Entry(self, size_hint_weight=EXPAND_BOTH, size_hint_align=FILL_BOTH)
self.inPut.single_line_set(True)
self.inPut.callback_activated_add(self.enterPressed)
self.inPut.show()
enterButton = Button(self)
enterButton.text = "Execute"
enterButton.callback_pressed_add(self.enterPressed)
enterButton.show()
bx.pack_end(self.inPut)
bx.pack_end(enterButton)
self.pack_end(self.outPut)
self.pack_end(frame)
self.cmd_exe = None
self.done_cb = None
def changedCb(self, obj):
obj.cursor_end_set()
def enterPressed(self, btn):
if not self.cmd_exe:
self.runCommand(self.inPut.text)
self.inPut.text = ""
else:
ourResult = self.cmd_exe.send("%s\n"%self.inPut.text)
self.inPut.text = ""
def runCommand(self, command, done_cb=None):
command = markup_to_utf8(command)
self.cmd_exe = cmd = ecore.Exe(
command,
ecore.ECORE_EXE_PIPE_READ |
ecore.ECORE_EXE_PIPE_ERROR |
ecore.ECORE_EXE_PIPE_WRITE
)
cmd.on_add_event_add(self.command_started)
cmd.on_data_event_add(self.received_data)
cmd.on_error_event_add(self.received_error)
cmd.on_del_event_add(self.command_done)
self.done_cb = done_cb
def command_started(self, cmd, event, *args, **kwargs):
self.outPut.entry_append("---------------------------------")
self.outPut.entry_append("<br>")
def received_data(self, cmd, event, *args, **kwargs):
self.outPut.entry_append("%s"%event.data)
self.outPut.entry_append("<br>")
def received_error(self, cmd, event, *args, **kwargs):
self.outPut.entry_append("Error: %s" % event.data)
def command_done(self, cmd, event, *args, **kwargs):
self.outPut.entry_append("---------------------------------")
self.outPut.entry_append("<br>")
self.cmd_exe = None
if self.done_cb:
if callable(self.done_cb):
self.done_cb()
| 34.938776
| 90
| 0.625876
|
from efl import ecore
from efl.elementary.box import Box
from efl.elementary.frame import Frame
from efl.elementary.button import Button
from efl.elementary.entry import Entry, markup_to_utf8
from efl.evas import EVAS_HINT_EXPAND, EVAS_HINT_FILL
EXPAND_BOTH = EVAS_HINT_EXPAND, EVAS_HINT_EXPAND
EXPAND_HORIZ = EVAS_HINT_EXPAND, 0.0
FILL_BOTH = EVAS_HINT_FILL, EVAS_HINT_FILL
FILL_HORIZ = EVAS_HINT_FILL, 0.5
class EmbeddedTerminal(Box):
def __init__(self, parent_widget, titles=None, *args, **kwargs):
Box.__init__(self, parent_widget, *args, **kwargs)
self.outPut = Entry(self, size_hint_weight=EXPAND_BOTH, size_hint_align=FILL_BOTH)
self.outPut.editable_set(False)
self.outPut.scrollable_set(True)
self.outPut.callback_changed_add(self.changedCb)
self.outPut.show()
frame = Frame(self, size_hint_weight=EXPAND_HORIZ, size_hint_align=FILL_HORIZ)
frame.text = "Input:"
frame.autocollapse_set(True)
frame.collapse_go(True)
frame.show()
bx = Box(self, size_hint_weight=EXPAND_HORIZ, size_hint_align=FILL_HORIZ)
bx.horizontal = True
bx.show()
frame.content = bx
self.inPut = Entry(self, size_hint_weight=EXPAND_BOTH, size_hint_align=FILL_BOTH)
self.inPut.single_line_set(True)
self.inPut.callback_activated_add(self.enterPressed)
self.inPut.show()
enterButton = Button(self)
enterButton.text = "Execute"
enterButton.callback_pressed_add(self.enterPressed)
enterButton.show()
bx.pack_end(self.inPut)
bx.pack_end(enterButton)
self.pack_end(self.outPut)
self.pack_end(frame)
self.cmd_exe = None
self.done_cb = None
def changedCb(self, obj):
obj.cursor_end_set()
def enterPressed(self, btn):
if not self.cmd_exe:
self.runCommand(self.inPut.text)
self.inPut.text = ""
else:
ourResult = self.cmd_exe.send("%s\n"%self.inPut.text)
self.inPut.text = ""
def runCommand(self, command, done_cb=None):
command = markup_to_utf8(command)
self.cmd_exe = cmd = ecore.Exe(
command,
ecore.ECORE_EXE_PIPE_READ |
ecore.ECORE_EXE_PIPE_ERROR |
ecore.ECORE_EXE_PIPE_WRITE
)
cmd.on_add_event_add(self.command_started)
cmd.on_data_event_add(self.received_data)
cmd.on_error_event_add(self.received_error)
cmd.on_del_event_add(self.command_done)
self.done_cb = done_cb
def command_started(self, cmd, event, *args, **kwargs):
self.outPut.entry_append("---------------------------------")
self.outPut.entry_append("<br>")
def received_data(self, cmd, event, *args, **kwargs):
self.outPut.entry_append("%s"%event.data)
self.outPut.entry_append("<br>")
def received_error(self, cmd, event, *args, **kwargs):
self.outPut.entry_append("Error: %s" % event.data)
def command_done(self, cmd, event, *args, **kwargs):
self.outPut.entry_append("---------------------------------")
self.outPut.entry_append("<br>")
self.cmd_exe = None
if self.done_cb:
if callable(self.done_cb):
self.done_cb()
| true
| true
|
1c41ee729bf77b324242364358d2d779a37b648f
| 2,683
|
py
|
Python
|
apps/utils/base_crypt.py
|
kiritoscs/bk-log
|
4801b14182ba7cb108d968cd4f33668ee2d16dbc
|
[
"MIT"
] | null | null | null |
apps/utils/base_crypt.py
|
kiritoscs/bk-log
|
4801b14182ba7cb108d968cd4f33668ee2d16dbc
|
[
"MIT"
] | null | null | null |
apps/utils/base_crypt.py
|
kiritoscs/bk-log
|
4801b14182ba7cb108d968cd4f33668ee2d16dbc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-LOG 蓝鲸日志平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-LOG 蓝鲸日志平台 is licensed under the MIT License.
License for BK-LOG 蓝鲸日志平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
We undertake not to change the open source license (MIT license) applicable to the current version of
the project delivered to anyone in the future.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import base64
from Crypto.Cipher import AES
from django.conf import settings
class BaseCrypt(object):
_bk_crypt = False
# KEY 和 IV 的长度需等于16
ROOT_KEY = b"TencentBkApp-Key"
ROOT_IV = b"TencentBkApp--Iv"
def __init__(self, instance_key=settings.SECRET_KEY):
self.INSTANCE_KEY = instance_key
def encrypt(self, plaintext):
"""
加密
:param plaintext: 需要加密的内容
:return:
"""
decrypt_key = self.__parse_key()
secret_txt = AES.new(decrypt_key, AES.MODE_CFB, self.ROOT_IV).encrypt(plaintext)
return base64.b64encode(secret_txt).decode("utf-8")
def decrypt(self, ciphertext):
"""
解密
:param ciphertext: 需要解密的内容
:return:
"""
decrypt_key = self.__parse_key()
# 先解base64
secret_txt = base64.b64decode(ciphertext)
# 再解对称加密
plain = AES.new(decrypt_key, AES.MODE_CFB, self.ROOT_IV).decrypt(secret_txt)
return plain.decode(encoding="utf-8")
def __parse_key(self):
return self.INSTANCE_KEY[:24].encode()
| 40.651515
| 111
| 0.710026
|
from __future__ import absolute_import
from __future__ import unicode_literals
import base64
from Crypto.Cipher import AES
from django.conf import settings
class BaseCrypt(object):
_bk_crypt = False
ROOT_KEY = b"TencentBkApp-Key"
ROOT_IV = b"TencentBkApp--Iv"
def __init__(self, instance_key=settings.SECRET_KEY):
self.INSTANCE_KEY = instance_key
def encrypt(self, plaintext):
decrypt_key = self.__parse_key()
secret_txt = AES.new(decrypt_key, AES.MODE_CFB, self.ROOT_IV).encrypt(plaintext)
return base64.b64encode(secret_txt).decode("utf-8")
def decrypt(self, ciphertext):
decrypt_key = self.__parse_key()
secret_txt = base64.b64decode(ciphertext)
plain = AES.new(decrypt_key, AES.MODE_CFB, self.ROOT_IV).decrypt(secret_txt)
return plain.decode(encoding="utf-8")
def __parse_key(self):
return self.INSTANCE_KEY[:24].encode()
| true
| true
|
1c41ef2c5dea949cf3dbf13ee2295b6900f98748
| 1,556
|
py
|
Python
|
logreader/lineage.py
|
thundersen/ohol-data
|
6a8f9a3572e3149e222f38d226b1572ac25c07b3
|
[
"MIT"
] | null | null | null |
logreader/lineage.py
|
thundersen/ohol-data
|
6a8f9a3572e3149e222f38d226b1572ac25c07b3
|
[
"MIT"
] | null | null | null |
logreader/lineage.py
|
thundersen/ohol-data
|
6a8f9a3572e3149e222f38d226b1572ac25c07b3
|
[
"MIT"
] | null | null | null |
from datetime import timedelta
from datetimerange import DateTimeRange
class Lineage:
max_descendants = 0
max_duration = timedelta(seconds=0)
def __init__(self, eve):
self.eve = eve
eve_descendants = self.eve.descendants()
self._characters = [self.eve] + eve_descendants
self._log_max_eve_descendants(eve_descendants)
self._duration = self._calculate_duration()
self._log_max_duration()
def _log_max_eve_descendants(self, eve_descendants):
if len(eve_descendants) > Lineage.max_descendants:
Lineage.max_descendants = len(eve_descendants)
print(
f'max descendants so far: {Lineage.max_descendants} '
f'for {self.eve.name} ({self.id()}) born at {self.eve.birth}')
def _log_max_duration(self):
if self._duration.timedelta > Lineage.max_duration:
Lineage.max_duration = self._duration.timedelta
print(
f'max duration so far: {Lineage.max_duration} '
f'for {self.eve.name} ({self.id()}) born at {self.eve.birth}')
def __str__(self):
return '%s | %s' % (self.duration(), self.eve)
def id(self):
return self.eve.id
def characters(self):
return self._characters
def duration(self):
return self._duration
def _calculate_duration(self):
last_fertility_end = max(d.fertility_period().end_datetime for d in self.characters())
return DateTimeRange(self.eve.birth, last_fertility_end)
| 27.298246
| 94
| 0.643959
|
from datetime import timedelta
from datetimerange import DateTimeRange
class Lineage:
max_descendants = 0
max_duration = timedelta(seconds=0)
def __init__(self, eve):
self.eve = eve
eve_descendants = self.eve.descendants()
self._characters = [self.eve] + eve_descendants
self._log_max_eve_descendants(eve_descendants)
self._duration = self._calculate_duration()
self._log_max_duration()
def _log_max_eve_descendants(self, eve_descendants):
if len(eve_descendants) > Lineage.max_descendants:
Lineage.max_descendants = len(eve_descendants)
print(
f'max descendants so far: {Lineage.max_descendants} '
f'for {self.eve.name} ({self.id()}) born at {self.eve.birth}')
def _log_max_duration(self):
if self._duration.timedelta > Lineage.max_duration:
Lineage.max_duration = self._duration.timedelta
print(
f'max duration so far: {Lineage.max_duration} '
f'for {self.eve.name} ({self.id()}) born at {self.eve.birth}')
def __str__(self):
return '%s | %s' % (self.duration(), self.eve)
def id(self):
return self.eve.id
def characters(self):
return self._characters
def duration(self):
return self._duration
def _calculate_duration(self):
last_fertility_end = max(d.fertility_period().end_datetime for d in self.characters())
return DateTimeRange(self.eve.birth, last_fertility_end)
| true
| true
|
1c41f0d7e3d1f1d8c4780884483f3c9dd0e530d8
| 1,858
|
py
|
Python
|
ecs/numpy/core/fromnumeric.py
|
gongqingfeng/huawei-codecraft
|
89b9bff78aa90a09ab391e20570ebbe660336f38
|
[
"MIT"
] | null | null | null |
ecs/numpy/core/fromnumeric.py
|
gongqingfeng/huawei-codecraft
|
89b9bff78aa90a09ab391e20570ebbe660336f38
|
[
"MIT"
] | null | null | null |
ecs/numpy/core/fromnumeric.py
|
gongqingfeng/huawei-codecraft
|
89b9bff78aa90a09ab391e20570ebbe660336f38
|
[
"MIT"
] | null | null | null |
"""Module containing non-deprecated functions borrowed from Numeric.
"""
from __future__ import division, absolute_import, print_function
import types
from .numeric import asarray
# functions that are methods
__all__ = [
'amax', 'amin',
'mean',
'round_'
]
try:
_gentype = types.GeneratorType
except AttributeError:
_gentype = type(None)
# save away Python sum
def amax(a, axis=None, out=None, keepdims=False):
if type(a) is not mu.ndarray:
try:
amax = a.max
except AttributeError:
return _methods._amax(a, axis=axis,
out=out, keepdims=keepdims)
# NOTE: Dropping the keepdims parameter
return amax(axis=axis, out=out)
else:
return _methods._amax(a, axis=axis,
out=out, keepdims=keepdims)
def amin(a, axis=None, out=None, keepdims=False):
if type(a) is not mu.ndarray:
try:
amin = a.min
except AttributeError:
return _methods._amin(a, axis=axis,
out=out, keepdims=keepdims)
# NOTE: Dropping the keepdims parameter
return amin(axis=axis, out=out)
else:
return _methods._amin(a, axis=axis,
out=out, keepdims=keepdims)
def round_(a, decimals=0, out=None):
try:
round = a.round
except AttributeError:
return _wrapit(a, 'round', decimals, out)
return round(decimals, out)
def mean(a, axis=None, dtype=None, out=None, keepdims=False):
if type(a) is not mu.ndarray:
try:
mean = a.mean
return mean(axis=axis, dtype=dtype, out=out)
except AttributeError:
pass
return _methods._mean(a, axis=axis, dtype=dtype,
out=out, keepdims=keepdims)
| 27.323529
| 68
| 0.582885
|
from __future__ import division, absolute_import, print_function
import types
from .numeric import asarray
__all__ = [
'amax', 'amin',
'mean',
'round_'
]
try:
_gentype = types.GeneratorType
except AttributeError:
_gentype = type(None)
def amax(a, axis=None, out=None, keepdims=False):
if type(a) is not mu.ndarray:
try:
amax = a.max
except AttributeError:
return _methods._amax(a, axis=axis,
out=out, keepdims=keepdims)
return amax(axis=axis, out=out)
else:
return _methods._amax(a, axis=axis,
out=out, keepdims=keepdims)
def amin(a, axis=None, out=None, keepdims=False):
if type(a) is not mu.ndarray:
try:
amin = a.min
except AttributeError:
return _methods._amin(a, axis=axis,
out=out, keepdims=keepdims)
return amin(axis=axis, out=out)
else:
return _methods._amin(a, axis=axis,
out=out, keepdims=keepdims)
def round_(a, decimals=0, out=None):
try:
round = a.round
except AttributeError:
return _wrapit(a, 'round', decimals, out)
return round(decimals, out)
def mean(a, axis=None, dtype=None, out=None, keepdims=False):
if type(a) is not mu.ndarray:
try:
mean = a.mean
return mean(axis=axis, dtype=dtype, out=out)
except AttributeError:
pass
return _methods._mean(a, axis=axis, dtype=dtype,
out=out, keepdims=keepdims)
| true
| true
|
1c41f2b441c93c4135dbb4c2c26daffe49f2da7f
| 943
|
py
|
Python
|
scripts/dataset/common.py
|
heptaliane/CycleGanTrainerPytorch
|
63380cd4129b959a86a8ce1b30fd4c13830ac6f3
|
[
"MIT"
] | null | null | null |
scripts/dataset/common.py
|
heptaliane/CycleGanTrainerPytorch
|
63380cd4129b959a86a8ce1b30fd4c13830ac6f3
|
[
"MIT"
] | null | null | null |
scripts/dataset/common.py
|
heptaliane/CycleGanTrainerPytorch
|
63380cd4129b959a86a8ce1b30fd4c13830ac6f3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import glob
class DatasetDirectory():
def __init__(self, dirpath, ext='', rm_ext=True):
self._path = dirpath
self.ext = ext
self._rm_ext = rm_ext
os.makedirs(dirpath, exist_ok=True)
def __str__(self):
return os.path.join(self._path, '*%s' % self.ext)
@property
def names(self):
names = glob.glob(str(self))
names = [os.path.basename(name) for name in names]
if self._rm_ext:
names = [name[:-len(self.ext)] for name in names]
return names
@property
def path(self):
return self._path
@path.setter
def path(self, dirpath):
self._path
os.makedirs(dirpath, exist_ok=True)
def name_to_path(self, name):
if self._rm_ext:
return os.path.join(self._path, '%s%s' % (name, self.ext))
else:
return os.path.join(self._path, name)
| 24.179487
| 70
| 0.576882
|
import os
import glob
class DatasetDirectory():
def __init__(self, dirpath, ext='', rm_ext=True):
self._path = dirpath
self.ext = ext
self._rm_ext = rm_ext
os.makedirs(dirpath, exist_ok=True)
def __str__(self):
return os.path.join(self._path, '*%s' % self.ext)
@property
def names(self):
names = glob.glob(str(self))
names = [os.path.basename(name) for name in names]
if self._rm_ext:
names = [name[:-len(self.ext)] for name in names]
return names
@property
def path(self):
return self._path
@path.setter
def path(self, dirpath):
self._path
os.makedirs(dirpath, exist_ok=True)
def name_to_path(self, name):
if self._rm_ext:
return os.path.join(self._path, '%s%s' % (name, self.ext))
else:
return os.path.join(self._path, name)
| true
| true
|
1c41f353898b15f4887409f1dd7b968e62b34230
| 2,098
|
py
|
Python
|
services/ingest-file/ingestors/support/encoding.py
|
jalmquist/aleph
|
28a56bc7edd77dbd7910c94dac40916032edc177
|
[
"MIT"
] | 7
|
2020-02-18T14:35:10.000Z
|
2020-03-09T22:53:10.000Z
|
services/ingest-file/ingestors/support/encoding.py
|
jalmquist/aleph
|
28a56bc7edd77dbd7910c94dac40916032edc177
|
[
"MIT"
] | 4
|
2021-09-08T02:07:35.000Z
|
2022-03-12T00:33:46.000Z
|
services/ingest-file/ingestors/support/encoding.py
|
jalmquist/aleph
|
28a56bc7edd77dbd7910c94dac40916032edc177
|
[
"MIT"
] | null | null | null |
import logging
import chardet
from normality import stringify, guess_encoding
from normality.encoding import guess_file_encoding, normalize_result
from normality.encoding import normalize_encoding
from ingestors.exc import ProcessingException
log = logging.getLogger(__name__)
class EncodingSupport(object):
"""Decode the contents of the given file as plain text by guessing its
encoding."""
DEFAULT_ENCODING = 'utf-8'
def decode_string(self, text, encoding=DEFAULT_ENCODING):
if not isinstance(text, bytes):
return stringify(text)
encoding = normalize_encoding(encoding)
try:
return text.decode(encoding, 'strict')
except Exception:
try:
detected = guess_encoding(text)
return text.decode(detected, 'strict')
except Exception:
return text.decode(encoding, 'replace')
def detect_stream_encoding(self, fh, default=DEFAULT_ENCODING):
return guess_file_encoding(fh, default=default)
def detect_list_encoding(self, items, default=DEFAULT_ENCODING):
detector = chardet.UniversalDetector()
for text in items:
if not isinstance(text, bytes):
continue
detector.feed(text)
if detector.done:
break
detector.close()
return normalize_result(detector.result, default)
def read_file_decoded(self, entity, file_path):
with open(file_path, 'rb') as fh:
body = fh.read()
if not entity.has('encoding'):
entity.set('encoding', guess_encoding(body))
for encoding in entity.get('encoding'):
try:
body = body.decode(encoding)
if encoding != self.DEFAULT_ENCODING:
log.info("Decoding [%r] as: %s", entity, encoding)
return body
except UnicodeDecodeError as ude:
raise ProcessingException('Error decoding file as %s: %s' %
(encoding, ude)) from ude
| 34.393443
| 75
| 0.618684
|
import logging
import chardet
from normality import stringify, guess_encoding
from normality.encoding import guess_file_encoding, normalize_result
from normality.encoding import normalize_encoding
from ingestors.exc import ProcessingException
log = logging.getLogger(__name__)
class EncodingSupport(object):
DEFAULT_ENCODING = 'utf-8'
def decode_string(self, text, encoding=DEFAULT_ENCODING):
if not isinstance(text, bytes):
return stringify(text)
encoding = normalize_encoding(encoding)
try:
return text.decode(encoding, 'strict')
except Exception:
try:
detected = guess_encoding(text)
return text.decode(detected, 'strict')
except Exception:
return text.decode(encoding, 'replace')
def detect_stream_encoding(self, fh, default=DEFAULT_ENCODING):
return guess_file_encoding(fh, default=default)
def detect_list_encoding(self, items, default=DEFAULT_ENCODING):
detector = chardet.UniversalDetector()
for text in items:
if not isinstance(text, bytes):
continue
detector.feed(text)
if detector.done:
break
detector.close()
return normalize_result(detector.result, default)
def read_file_decoded(self, entity, file_path):
with open(file_path, 'rb') as fh:
body = fh.read()
if not entity.has('encoding'):
entity.set('encoding', guess_encoding(body))
for encoding in entity.get('encoding'):
try:
body = body.decode(encoding)
if encoding != self.DEFAULT_ENCODING:
log.info("Decoding [%r] as: %s", entity, encoding)
return body
except UnicodeDecodeError as ude:
raise ProcessingException('Error decoding file as %s: %s' %
(encoding, ude)) from ude
| true
| true
|
1c41f35fbb870118e1cc69183ab2e3fb1f90a153
| 28,671
|
py
|
Python
|
tronapi/trx.py
|
sonicskye/tron-api-python
|
eba4d5d1eb27b95ea69e2f9e0fbf05c819012b7e
|
[
"MIT"
] | 1
|
2018-12-20T22:09:39.000Z
|
2018-12-20T22:09:39.000Z
|
tronapi/trx.py
|
sonicskye/tron-api-python
|
eba4d5d1eb27b95ea69e2f9e0fbf05c819012b7e
|
[
"MIT"
] | null | null | null |
tronapi/trx.py
|
sonicskye/tron-api-python
|
eba4d5d1eb27b95ea69e2f9e0fbf05c819012b7e
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------
# Copyright (c) iEXBase. All rights reserved.
# Licensed under the MIT License.
# See License.txt in the project root for license information.
# --------------------------------------------------------------------
"""
tronapi.trx
===============
Work with basic methods
:copyright: © 2018 by the iEXBase.
:license: MIT License
"""
import math
from typing import Any
from eth_account import Account
from tronapi.base.transactions import wait_for_transaction_id
from tronapi.contract import Contract
from tronapi.exceptions import InvalidTronError, TronError, TimeExhausted
from tronapi.module import Module
from tronapi.utils.blocks import select_method_for_block
from tronapi.utils.hexadecimal import is_hex
from tronapi.utils.types import is_integer, is_object, is_string
from tronapi.base.toolz import (
assoc
)
TRX_MESSAGE_HEADER = '\x19TRON Signed Message:\n'
ETH_MESSAGE_HEADER = '\x19Ethereum Signed Message:\n'
class Trx(Module):
default_contract_factory = Contract
def get_current_block(self):
"""Query the latest block"""
return self.tron.manager.request(url='/wallet/getnowblock')
def get_block(self, block: Any = None):
"""Get block details using HashString or blockNumber
Args:
block (Any): ID or height for the block
"""
# If the block identifier is not specified,
# we take the default
if block is None:
block = self.tron.default_block
if block == 'latest':
return self.get_current_block()
elif block == 'earliest':
return self.get_block(0)
method = select_method_for_block(
block,
if_hash={'url': '/wallet/getblockbyid', 'field': 'value'},
if_number={'url': '/wallet/getblockbynum', 'field': 'num'},
)
response = self.tron.manager.request(method['url'], {
method['field']: block
})
return response
def get_block_transaction_count(self, block: Any):
"""Total number of transactions in a block
Args:
block (Any): Number or Hash Block
"""
transaction = self.get_block(block)
if 'transactions' not in transaction:
raise TronError('Parameter "transactions" not found')
return len(transaction)
def get_transaction_from_block(self, block: Any, index: int = 0):
"""Get transaction details from Block
Args:
block (Any): Number or Hash Block
index (int) Position
"""
if not is_integer(index) or index < 0:
raise InvalidTronError('Invalid transaction index provided')
transactions = self.get_block(block).get('transactions')
if not transactions or len(transactions) < index:
raise TronError('Transaction not found in block')
return transactions[index]
def wait_for_transaction_id(self, transaction_hash: str, timeout: int = 120):
"""
Waits for the transaction specified by transaction_hash
to be included in a block, then returns its transaction receipt.
Optionally, specify a timeout in seconds.
If timeout elapses before the transaction is added to a block,
then wait_for_transaction_id() raises a Timeout exception.
Args:
transaction_hash (str): Transaction Hash
timeout (int): TimeOut
"""
try:
return wait_for_transaction_id(self.tron, transaction_hash, timeout)
except TimeoutError:
raise TimeExhausted(
"Transaction {} is not in the chain, after {} seconds".format(
transaction_hash,
timeout,
)
)
def get_transaction(self, transaction_id: str,
is_confirm: bool = False):
"""Query transaction based on id
Args:
transaction_id (str): transaction id
is_confirm (bool):
"""
method = 'walletsolidity' if is_confirm else 'wallet'
response = self.tron.manager.request('/{}/gettransactionbyid'.format(method), {
'value': transaction_id
})
if not response:
raise TronError('Transaction not found')
return response
def get_account_resource(self, address=None):
"""Query the resource information of the account
Args:
address (str): Address
Results:
Resource information of the account
"""
if address is None:
address = self.tron.default_address.hex
if not self.tron.isAddress(address):
raise InvalidTronError('Invalid address provided')
return self.tron.manager.request('/wallet/getaccountresource', {
'address': self.tron.address.to_hex(address)
})
def get_account(self, address=None):
"""Query information about an account
Args:
address (str): Address
"""
if address is None:
address = self.tron.default_address.hex
if not self.tron.isAddress(address):
raise InvalidTronError('Invalid address provided')
return self.tron.manager.request('/walletsolidity/getaccount', {
'address': self.tron.address.to_hex(address)
})
def get_balance(self, address=None, is_float=False):
"""Getting a balance
Args:
address (str): Address
is_float (bool): Convert to float format
"""
response = self.get_account(address)
if 'balance' not in response:
return 0
if is_float:
return self.tron.fromSun(response['balance'])
return response['balance']
def get_transactions_related(self, address, direction='all', limit=30, offset=0):
"""Getting data in the "from", "to" and "all" directions
Args:
address (str): Address
direction (str): Type direction
address (str): address
limit (int): number of transactions expected to be returned
offset (int): index of the starting transaction
"""
if direction not in ['from', 'to', 'all']:
raise InvalidTronError('Invalid direction provided: Expected "to", "from" or "all"')
if direction == 'all':
from_direction = {'from': self.get_transactions_related(address, 'from', limit, offset)}
to_direction = {'to': self.get_transactions_related(address, 'to', limit, offset)}
callback = from_direction
callback.update(to_direction)
return callback
if address is None:
address = self.tron.default_address.hex
if not self.tron.isAddress(address):
raise InvalidTronError('Invalid address provided')
if not isinstance(limit, int) or limit < 0 or (offset and limit < 1):
raise InvalidTronError('Invalid limit provided')
if not isinstance(offset, int) or offset < 0:
raise InvalidTronError('Invalid offset provided')
path = '/walletextension/gettransactions{0}this'.format(direction)
response = self.tron.manager.request(path, {
'account': {
'address': self.tron.address.to_hex(address)
},
'limit': limit,
'offset': offset
})
# response.update({'direction': direction})
return response
def get_transactions_to_address(self, address=None, limit=30, offset=0):
"""Query the list of transactions received by an address
Args:
address (str): address
limit (int): number of transactions expected to be returned
offset (int): index of the starting transaction
Returns:
Transactions list
"""
return self.get_transactions_related(address, 'to', limit, offset)
def get_transactions_from_address(self, address=None, limit=30, offset=0):
"""Query the list of transactions sent by an address
Args:
address (str): address
limit (int): number of transactions expected to be returned
offset (int): index of the starting transaction
Returns:
Transactions list
"""
return self.get_transactions_related(address, 'from', limit, offset)
def get_transaction_info(self, tx_id):
"""Query transaction fee based on id
Args:
tx_id (str): Transaction Id
Returns:
Transaction fee,block height and block creation time
"""
response = self.tron.manager.request('/walletsolidity/gettransactioninfobyid', {
'value': tx_id
})
return response
def get_band_width(self, address=None):
"""Query bandwidth information.
Args:
address (str): address
Returns:
Bandwidth information for the account.
If a field doesn't appear, then the corresponding value is 0.
{
"freeNetUsed": 557,
"freeNetLimit": 5000,
"NetUsed": 353,
"NetLimit": 5239157853,
"TotalNetLimit": 43200000000,
"TotalNetWeight": 41228
}
"""
if address is None:
address = self.tron.default_address.hex
if not self.tron.isAddress(address):
raise InvalidTronError('Invalid address provided')
response = self.tron.manager.request('/wallet/getaccountnet', {
'address': self.tron.address.to_hex(address)
})
free_net_limit = 0 if 'freeNetLimit' not in response else response['freeNetLimit']
free_net_used = 0 if 'freeNetUsed' not in response else response['freeNetUsed']
net_limit = 0 if 'NetLimit' not in response else response['NetLimit']
net_used = 0 if 'NetUsed' not in response else response['NetUsed']
return (free_net_limit - free_net_used) + (net_limit - net_used)
def get_transaction_count(self):
"""Count all transactions on the network
Note: Possible delays
Returns:
Total number of transactions.
"""
response = self.tron.manager.request('/wallet/totaltransaction')
return response.get('num')
def send(self, to, amount, options=None):
"""Send funds to the Tron account (option 2)"""
return self.send_transaction(to, amount, options)
def send_trx(self, to, amount, options=None):
"""Send funds to the Tron account (option 3)"""
return self.send_transaction(to, amount, options)
def send_transaction(self, to, amount, options=None):
"""Send an asset to another account.
Will create and broadcast the transaction if a private key is provided.
Args:
to (str): Address to send TRX to.
amount (float): Amount of TRX to send.
options (Any, optional): Options
"""
if options is None:
options = {}
if 'from' not in options:
options = assoc(options, 'from', self.tron.default_address.hex)
tx = self.tron.transaction_builder.send_transaction(
to,
amount,
options['from']
)
# If a comment is attached to the transaction,
# in this case adding to the object
if 'message' in options:
tx['raw_data']['data'] = self.tron.toHex(text=str(options['message']))
sign = self.sign(tx)
result = self.broadcast(sign)
return result
def send_token(self, to, amount, token_id=None, account=None, options=None):
"""Transfer Token
Args:
to (str): is the recipient address
amount (int): is the amount of token to transfer. must be integer instead of float
token_id (str): Token Name(NOT SYMBOL)
account: (str): is the address of the withdrawal account
options (Any, optional): Options
Returns:
Token transfer Transaction raw data
"""
if account is None:
account = self.tron.default_address.hex
tx = self.tron.transaction_builder.send_token(
to,
amount,
token_id,
account
)
# If a comment is attached to the transaction,
# in this case adding to the object
if 'message' in options:
tx['raw_data']['data'] = self.tron.toHex(text=str(options['message']))
sign = self.sign(tx)
result = self.broadcast(sign)
return result
def freeze_balance(self, amount=0, duration=3, resource='BANDWIDTH', account=None):
"""
Freezes an amount of TRX.
Will give bandwidth OR Energy and TRON Power(voting rights)
to the owner of the frozen tokens.
Args:
amount (int): number of frozen trx
duration (int): duration in days to be frozen
resource (str): type of resource, must be either "ENERGY" or "BANDWIDTH"
account (str): address that is freezing trx account
"""
if account is None:
account = self.tron.default_address.hex
transaction = self.tron.transaction_builder.freeze_balance(
amount,
duration,
resource,
account
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def unfreeze_balance(self, resource='BANDWIDTH', account=None):
"""
Unfreeze TRX that has passed the minimum freeze duration.
Unfreezing will remove bandwidth and TRON Power.
Args:
resource (str): type of resource, must be either "ENERGY" or "BANDWIDTH"
account (str): address that is freezing trx account
"""
if account is None:
account = self.tron.default_address.hex
transaction = self.tron.transaction_builder.unfreeze_balance(
resource,
account
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def online_sign(self, transaction: dict):
"""Online transaction signature
Sign the transaction, the api has the risk of leaking the private key,
please make sure to call the api in a secure environment
Warnings:
Do not use this in any web / user-facing applications.
This will expose the private key.
Args:
transaction (dict): transaction details
"""
if 'signature' in transaction:
raise TronError('Transaction is already signed')
address = self.tron.address.from_private_key(self.tron.private_key).hex.lower()
owner_address = transaction['raw_data']['contract'][0]['parameter']['value']['owner_address']
if address != owner_address:
raise ValueError('Private key does not match address in transaction')
return self.tron.manager.request('/wallet/gettransactionsign', {
'transaction': transaction,
'privateKey': self.tron.private_key
})
def sign(self, transaction: Any, use_tron: bool = True):
"""Sign the transaction, the api has the risk of leaking the private key,
please make sure to call the api in a secure environment
Warnings:
Do not use this in any web / user-facing applications.
This will expose the private key.
Args:
transaction (Any): transaction details
use_tron (bool): is Tron header
"""
if is_string(transaction):
if not is_hex(transaction):
raise TronError('Expected hex message input')
# Determine which header to attach to the message
# before encrypting or decrypting
header = TRX_MESSAGE_HEADER if use_tron else ETH_MESSAGE_HEADER
header += str(len(transaction))
message_hash = self.tron.sha3(
text=header + transaction
)
signed_message = Account.signHash(message_hash, private_key=self.tron.private_key)
return signed_message
if 'signature' in transaction:
raise TronError('Transaction is already signed')
address = self.tron.address.from_private_key(self.tron.private_key).hex.lower()
owner_address = transaction['raw_data']['contract'][0]['parameter']['value']['owner_address']
if address != owner_address:
raise ValueError('Private key does not match address in transaction')
# This option deals with signing of transactions, and writing to the array
signed_tx = Account.signHash(message_hash=transaction['txID'],
private_key=self.tron.private_key)
transaction['signature'] = [signed_tx['signature'].hex()[2:]]
return transaction
def broadcast(self, signed_transaction):
"""Broadcast the signed transaction
Args:
signed_transaction (object): signed transaction contract data
"""
if not is_object(signed_transaction):
raise InvalidTronError('Invalid transaction provided')
if 'signature' not in signed_transaction:
raise TronError('Transaction is not signed')
response = self.tron.manager.request('/wallet/broadcasttransaction',
signed_transaction)
if 'result' in response:
response.update({
'transaction': signed_transaction
})
return response
def verify_message(self, message, signed_message=None, address=None, use_tron: bool = True):
""" Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
Args:
message (str): The message in the format "hex"
signed_message (AttributeDict): Signature
address (str): is Address
use_tron (bool): is Tron header
"""
if address is None:
address = self.tron.default_address.base58
if not is_hex(message):
raise TronError('Expected hex message input')
# Determine which header to attach to the message
# before encrypting or decrypting
header = TRX_MESSAGE_HEADER if use_tron else ETH_MESSAGE_HEADER
message_hash = self.tron.sha3(text=header + message)
recovered = Account.recoverHash(message_hash, signature=signed_message.signature)
tron_address = '41' + recovered[2:]
base58address = self.tron.address.from_hex(tron_address).decode()
if base58address == address:
return True
raise ValueError('Signature does not match')
def update_account(self, account_name, address=None):
"""Modify account name
Note: Username is allowed to edit only once.
Args:
account_name (str): name of the account
address (str): address
"""
if address is None:
address = self.tron.default_address.hex
transaction = self.tron.transaction_builder.update_account(
account_name,
address
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def apply_for_sr(self, url, address):
"""Apply to become a super representative
Note: Applied to become a super representative. Cost 9999 TRX.
Args:
url (str): official website address
address (str): address
"""
if address is None:
address = self.tron.default_address.hex
transaction = self.tron.transaction_builder.apply_for_sr(
url,
address
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def list_nodes(self):
"""List the nodes which the api fullnode is connecting on the network"""
response = self.tron.manager.request('/wallet/listnodes')
callback = map(lambda x: {
'address': '{}:{}'.format(self.tron.toText(x['address']['host']),
str(x['address']['port']))
}, response['nodes'])
return list(callback)
def get_tokens_issued_by_address(self, address):
"""List the tokens issued by an account.
Args:
address (str): address
Returns:
The token issued by the account.
An account can issue only one token.
"""
if not self.tron.isAddress(address):
raise InvalidTronError('Invalid address provided')
address = self.tron.address.to_hex(address)
return self.tron.manager.request('/wallet/getassetissuebyaccount', {
'address': address
})
def get_token_from_id(self, token_id: str):
"""Query token by name.
Args:
token_id (str): The name of the token
"""
if not isinstance(token_id, str) or not len(token_id):
raise InvalidTronError('Invalid token ID provided')
return self.tron.manager.request('/wallet/getassetissuebyname', {
'value': self.tron.toHex(text=token_id)
})
def get_block_range(self, start, end):
"""Query a range of blocks by block height
Args:
start (int): starting block height, including this block
end (int): ending block height, excluding that block
"""
if not is_integer(start) or start < 0:
raise InvalidTronError('Invalid start of range provided')
if not is_integer(end) or end <= start:
raise InvalidTronError('Invalid end of range provided')
response = self.tron.manager.request('/wallet/getblockbylimitnext', {
'startNum': int(start),
'endNum': int(end) + 1
}, 'post')
return response.get('block')
def get_latest_blocks(self, num=1):
"""Query the latest blocks
Args:
num (int): the number of blocks to query
"""
if not is_integer(num) or num <= 0:
raise InvalidTronError('Invalid limit provided')
response = self.tron.manager.request('/wallet/getblockbylatestnum', {
'num': num
})
return response.get('block')
def list_super_representatives(self):
"""Query the list of Super Representatives"""
response = self.tron.manager.request('/wallet/listwitnesses')
return response.get('witnesses')
def list_tokens(self, limit=0, offset=0):
"""Query the list of Tokens with pagination
Args:
limit (int): index of the starting Token
offset (int): number of Tokens expected to be returned
Returns:
List of Tokens
"""
if not is_integer(limit) or (limit and offset < 1):
raise InvalidTronError('Invalid limit provided')
if not is_integer(offset) or offset < 0:
raise InvalidTronError('Invalid offset provided')
if not limit:
return self.tron.manager.request('/wallet/getassetissuelist').get('assetIssue')
return self.tron.manager.request('/wallet/getpaginatedassetissuelist', {
'limit': int(limit),
'offset': int(offset)
})
def time_until_next_vote_cycle(self):
"""Get the time of the next Super Representative vote
Returns:
Number of milliseconds until the next voting time.
"""
num = self.tron.manager.request('/wallet/getnextmaintenancetime').get('num')
if num == -1:
raise Exception('Failed to get time until next vote cycle')
return math.floor(num / 1000)
def get_contract(self, contract_address):
"""Queries a contract's information from the blockchain.
Args:
contract_address (str): contract address
Returns:
SmartContract object.
"""
if not self.tron.isAddress(contract_address):
raise InvalidTronError('Invalid contract address provided')
return self.tron.manager.request('/wallet/getcontract', {
'value': self.tron.address.to_hex(contract_address)
})
def contract(self, address=None, **kwargs):
contract_factory_class = kwargs.pop('contract_factory_class',
self.default_contract_factory)
contract_factory = contract_factory_class.factory(self.tron, **kwargs)
if address:
return contract_factory(address)
else:
return contract_factory
def validate_address(self, address, _is_hex=False):
"""Validate address
Args:
address (str): The address, should be in base58checksum
_is_hex (bool): hexString or base64 format
"""
if _is_hex:
address = self.tron.address.to_hex(address)
return self.tron.manager.request('/wallet/validateaddress', {
'address': address
})
def get_chain_parameters(self):
"""Getting chain parameters"""
return self.tron.manager.request('/wallet/getchainparameters')
def get_exchange_by_id(self, exchange_id):
"""Find exchange by id
Args:
exchange_id (str): ID Exchange
"""
if not isinstance(exchange_id, int) or exchange_id < 0:
raise InvalidTronError('Invalid exchangeID provided')
return self.tron.manager.request('/wallet/getexchangebyid', {
'id': exchange_id
})
def get_list_exchangers(self):
"""Get list exchangers"""
return self.tron.manager.request('/wallet/listexchanges')
def get_proposal(self, proposal_id):
"""Query proposal based on id
Args:
proposal_id (int): ID
"""
if not isinstance(proposal_id, int) or proposal_id < 0:
raise InvalidTronError('Invalid proposalID provided')
return self.tron.manager.request('/wallet/getproposalbyid', {
'id': int(proposal_id)
})
def list_proposals(self):
"""Query all proposals
Returns:
Proposal list information
"""
return self.tron.manager.request('/wallet/listproposals')
def vote_proposal(self, proposal_id, has_approval, voter_address):
"""Proposal approval
Args:
proposal_id (int): proposal id
has_approval (bool): Approved
voter_address (str): Approve address
Returns:
Approval of the proposed transaction
"""
if voter_address is None:
voter_address = self.tron.default_address.hex
transaction = self.tron.transaction_builder.vote_proposal(
proposal_id,
has_approval,
voter_address
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def proposal_delete(self, proposal_id: int, issuer_address: str):
"""Delete proposal
Args:
proposal_id (int): proposal id
issuer_address (str): delete the person's address
Results:
Delete the proposal's transaction
"""
if issuer_address is None:
issuer_address = self.tron.default_address.hex
transaction = self.tron.transaction_builder.delete_proposal(
proposal_id,
issuer_address
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def list_exchanges_paginated(self, limit=10, offset=0):
"""Paged query transaction pair list
Args:
limit (int): number of trading pairs expected to be returned.
offset (int): index of the starting trading pair
"""
return self.tron.manager.request('/wallet/listexchangespaginated', {
'limit': limit,
'offset': offset
})
def get_node_info(self):
"""Get info about thre node"""
return self.tron.manager.request('wallet/getnodeinfo', {})
| 31.232026
| 101
| 0.5997
|
import math
from typing import Any
from eth_account import Account
from tronapi.base.transactions import wait_for_transaction_id
from tronapi.contract import Contract
from tronapi.exceptions import InvalidTronError, TronError, TimeExhausted
from tronapi.module import Module
from tronapi.utils.blocks import select_method_for_block
from tronapi.utils.hexadecimal import is_hex
from tronapi.utils.types import is_integer, is_object, is_string
from tronapi.base.toolz import (
assoc
)
TRX_MESSAGE_HEADER = '\x19TRON Signed Message:\n'
ETH_MESSAGE_HEADER = '\x19Ethereum Signed Message:\n'
class Trx(Module):
default_contract_factory = Contract
def get_current_block(self):
return self.tron.manager.request(url='/wallet/getnowblock')
def get_block(self, block: Any = None):
if block is None:
block = self.tron.default_block
if block == 'latest':
return self.get_current_block()
elif block == 'earliest':
return self.get_block(0)
method = select_method_for_block(
block,
if_hash={'url': '/wallet/getblockbyid', 'field': 'value'},
if_number={'url': '/wallet/getblockbynum', 'field': 'num'},
)
response = self.tron.manager.request(method['url'], {
method['field']: block
})
return response
def get_block_transaction_count(self, block: Any):
transaction = self.get_block(block)
if 'transactions' not in transaction:
raise TronError('Parameter "transactions" not found')
return len(transaction)
def get_transaction_from_block(self, block: Any, index: int = 0):
if not is_integer(index) or index < 0:
raise InvalidTronError('Invalid transaction index provided')
transactions = self.get_block(block).get('transactions')
if not transactions or len(transactions) < index:
raise TronError('Transaction not found in block')
return transactions[index]
def wait_for_transaction_id(self, transaction_hash: str, timeout: int = 120):
try:
return wait_for_transaction_id(self.tron, transaction_hash, timeout)
except TimeoutError:
raise TimeExhausted(
"Transaction {} is not in the chain, after {} seconds".format(
transaction_hash,
timeout,
)
)
def get_transaction(self, transaction_id: str,
is_confirm: bool = False):
method = 'walletsolidity' if is_confirm else 'wallet'
response = self.tron.manager.request('/{}/gettransactionbyid'.format(method), {
'value': transaction_id
})
if not response:
raise TronError('Transaction not found')
return response
def get_account_resource(self, address=None):
if address is None:
address = self.tron.default_address.hex
if not self.tron.isAddress(address):
raise InvalidTronError('Invalid address provided')
return self.tron.manager.request('/wallet/getaccountresource', {
'address': self.tron.address.to_hex(address)
})
def get_account(self, address=None):
if address is None:
address = self.tron.default_address.hex
if not self.tron.isAddress(address):
raise InvalidTronError('Invalid address provided')
return self.tron.manager.request('/walletsolidity/getaccount', {
'address': self.tron.address.to_hex(address)
})
def get_balance(self, address=None, is_float=False):
response = self.get_account(address)
if 'balance' not in response:
return 0
if is_float:
return self.tron.fromSun(response['balance'])
return response['balance']
def get_transactions_related(self, address, direction='all', limit=30, offset=0):
if direction not in ['from', 'to', 'all']:
raise InvalidTronError('Invalid direction provided: Expected "to", "from" or "all"')
if direction == 'all':
from_direction = {'from': self.get_transactions_related(address, 'from', limit, offset)}
to_direction = {'to': self.get_transactions_related(address, 'to', limit, offset)}
callback = from_direction
callback.update(to_direction)
return callback
if address is None:
address = self.tron.default_address.hex
if not self.tron.isAddress(address):
raise InvalidTronError('Invalid address provided')
if not isinstance(limit, int) or limit < 0 or (offset and limit < 1):
raise InvalidTronError('Invalid limit provided')
if not isinstance(offset, int) or offset < 0:
raise InvalidTronError('Invalid offset provided')
path = '/walletextension/gettransactions{0}this'.format(direction)
response = self.tron.manager.request(path, {
'account': {
'address': self.tron.address.to_hex(address)
},
'limit': limit,
'offset': offset
})
return response
def get_transactions_to_address(self, address=None, limit=30, offset=0):
return self.get_transactions_related(address, 'to', limit, offset)
def get_transactions_from_address(self, address=None, limit=30, offset=0):
return self.get_transactions_related(address, 'from', limit, offset)
def get_transaction_info(self, tx_id):
response = self.tron.manager.request('/walletsolidity/gettransactioninfobyid', {
'value': tx_id
})
return response
def get_band_width(self, address=None):
if address is None:
address = self.tron.default_address.hex
if not self.tron.isAddress(address):
raise InvalidTronError('Invalid address provided')
response = self.tron.manager.request('/wallet/getaccountnet', {
'address': self.tron.address.to_hex(address)
})
free_net_limit = 0 if 'freeNetLimit' not in response else response['freeNetLimit']
free_net_used = 0 if 'freeNetUsed' not in response else response['freeNetUsed']
net_limit = 0 if 'NetLimit' not in response else response['NetLimit']
net_used = 0 if 'NetUsed' not in response else response['NetUsed']
return (free_net_limit - free_net_used) + (net_limit - net_used)
def get_transaction_count(self):
response = self.tron.manager.request('/wallet/totaltransaction')
return response.get('num')
def send(self, to, amount, options=None):
return self.send_transaction(to, amount, options)
def send_trx(self, to, amount, options=None):
return self.send_transaction(to, amount, options)
def send_transaction(self, to, amount, options=None):
if options is None:
options = {}
if 'from' not in options:
options = assoc(options, 'from', self.tron.default_address.hex)
tx = self.tron.transaction_builder.send_transaction(
to,
amount,
options['from']
)
if 'message' in options:
tx['raw_data']['data'] = self.tron.toHex(text=str(options['message']))
sign = self.sign(tx)
result = self.broadcast(sign)
return result
def send_token(self, to, amount, token_id=None, account=None, options=None):
if account is None:
account = self.tron.default_address.hex
tx = self.tron.transaction_builder.send_token(
to,
amount,
token_id,
account
)
if 'message' in options:
tx['raw_data']['data'] = self.tron.toHex(text=str(options['message']))
sign = self.sign(tx)
result = self.broadcast(sign)
return result
def freeze_balance(self, amount=0, duration=3, resource='BANDWIDTH', account=None):
if account is None:
account = self.tron.default_address.hex
transaction = self.tron.transaction_builder.freeze_balance(
amount,
duration,
resource,
account
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def unfreeze_balance(self, resource='BANDWIDTH', account=None):
if account is None:
account = self.tron.default_address.hex
transaction = self.tron.transaction_builder.unfreeze_balance(
resource,
account
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def online_sign(self, transaction: dict):
if 'signature' in transaction:
raise TronError('Transaction is already signed')
address = self.tron.address.from_private_key(self.tron.private_key).hex.lower()
owner_address = transaction['raw_data']['contract'][0]['parameter']['value']['owner_address']
if address != owner_address:
raise ValueError('Private key does not match address in transaction')
return self.tron.manager.request('/wallet/gettransactionsign', {
'transaction': transaction,
'privateKey': self.tron.private_key
})
def sign(self, transaction: Any, use_tron: bool = True):
if is_string(transaction):
if not is_hex(transaction):
raise TronError('Expected hex message input')
header = TRX_MESSAGE_HEADER if use_tron else ETH_MESSAGE_HEADER
header += str(len(transaction))
message_hash = self.tron.sha3(
text=header + transaction
)
signed_message = Account.signHash(message_hash, private_key=self.tron.private_key)
return signed_message
if 'signature' in transaction:
raise TronError('Transaction is already signed')
address = self.tron.address.from_private_key(self.tron.private_key).hex.lower()
owner_address = transaction['raw_data']['contract'][0]['parameter']['value']['owner_address']
if address != owner_address:
raise ValueError('Private key does not match address in transaction')
signed_tx = Account.signHash(message_hash=transaction['txID'],
private_key=self.tron.private_key)
transaction['signature'] = [signed_tx['signature'].hex()[2:]]
return transaction
def broadcast(self, signed_transaction):
if not is_object(signed_transaction):
raise InvalidTronError('Invalid transaction provided')
if 'signature' not in signed_transaction:
raise TronError('Transaction is not signed')
response = self.tron.manager.request('/wallet/broadcasttransaction',
signed_transaction)
if 'result' in response:
response.update({
'transaction': signed_transaction
})
return response
def verify_message(self, message, signed_message=None, address=None, use_tron: bool = True):
if address is None:
address = self.tron.default_address.base58
if not is_hex(message):
raise TronError('Expected hex message input')
header = TRX_MESSAGE_HEADER if use_tron else ETH_MESSAGE_HEADER
message_hash = self.tron.sha3(text=header + message)
recovered = Account.recoverHash(message_hash, signature=signed_message.signature)
tron_address = '41' + recovered[2:]
base58address = self.tron.address.from_hex(tron_address).decode()
if base58address == address:
return True
raise ValueError('Signature does not match')
def update_account(self, account_name, address=None):
if address is None:
address = self.tron.default_address.hex
transaction = self.tron.transaction_builder.update_account(
account_name,
address
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def apply_for_sr(self, url, address):
if address is None:
address = self.tron.default_address.hex
transaction = self.tron.transaction_builder.apply_for_sr(
url,
address
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def list_nodes(self):
response = self.tron.manager.request('/wallet/listnodes')
callback = map(lambda x: {
'address': '{}:{}'.format(self.tron.toText(x['address']['host']),
str(x['address']['port']))
}, response['nodes'])
return list(callback)
def get_tokens_issued_by_address(self, address):
if not self.tron.isAddress(address):
raise InvalidTronError('Invalid address provided')
address = self.tron.address.to_hex(address)
return self.tron.manager.request('/wallet/getassetissuebyaccount', {
'address': address
})
def get_token_from_id(self, token_id: str):
if not isinstance(token_id, str) or not len(token_id):
raise InvalidTronError('Invalid token ID provided')
return self.tron.manager.request('/wallet/getassetissuebyname', {
'value': self.tron.toHex(text=token_id)
})
def get_block_range(self, start, end):
if not is_integer(start) or start < 0:
raise InvalidTronError('Invalid start of range provided')
if not is_integer(end) or end <= start:
raise InvalidTronError('Invalid end of range provided')
response = self.tron.manager.request('/wallet/getblockbylimitnext', {
'startNum': int(start),
'endNum': int(end) + 1
}, 'post')
return response.get('block')
def get_latest_blocks(self, num=1):
if not is_integer(num) or num <= 0:
raise InvalidTronError('Invalid limit provided')
response = self.tron.manager.request('/wallet/getblockbylatestnum', {
'num': num
})
return response.get('block')
def list_super_representatives(self):
response = self.tron.manager.request('/wallet/listwitnesses')
return response.get('witnesses')
def list_tokens(self, limit=0, offset=0):
if not is_integer(limit) or (limit and offset < 1):
raise InvalidTronError('Invalid limit provided')
if not is_integer(offset) or offset < 0:
raise InvalidTronError('Invalid offset provided')
if not limit:
return self.tron.manager.request('/wallet/getassetissuelist').get('assetIssue')
return self.tron.manager.request('/wallet/getpaginatedassetissuelist', {
'limit': int(limit),
'offset': int(offset)
})
def time_until_next_vote_cycle(self):
num = self.tron.manager.request('/wallet/getnextmaintenancetime').get('num')
if num == -1:
raise Exception('Failed to get time until next vote cycle')
return math.floor(num / 1000)
def get_contract(self, contract_address):
if not self.tron.isAddress(contract_address):
raise InvalidTronError('Invalid contract address provided')
return self.tron.manager.request('/wallet/getcontract', {
'value': self.tron.address.to_hex(contract_address)
})
def contract(self, address=None, **kwargs):
contract_factory_class = kwargs.pop('contract_factory_class',
self.default_contract_factory)
contract_factory = contract_factory_class.factory(self.tron, **kwargs)
if address:
return contract_factory(address)
else:
return contract_factory
def validate_address(self, address, _is_hex=False):
if _is_hex:
address = self.tron.address.to_hex(address)
return self.tron.manager.request('/wallet/validateaddress', {
'address': address
})
def get_chain_parameters(self):
return self.tron.manager.request('/wallet/getchainparameters')
def get_exchange_by_id(self, exchange_id):
if not isinstance(exchange_id, int) or exchange_id < 0:
raise InvalidTronError('Invalid exchangeID provided')
return self.tron.manager.request('/wallet/getexchangebyid', {
'id': exchange_id
})
def get_list_exchangers(self):
return self.tron.manager.request('/wallet/listexchanges')
def get_proposal(self, proposal_id):
if not isinstance(proposal_id, int) or proposal_id < 0:
raise InvalidTronError('Invalid proposalID provided')
return self.tron.manager.request('/wallet/getproposalbyid', {
'id': int(proposal_id)
})
def list_proposals(self):
return self.tron.manager.request('/wallet/listproposals')
def vote_proposal(self, proposal_id, has_approval, voter_address):
if voter_address is None:
voter_address = self.tron.default_address.hex
transaction = self.tron.transaction_builder.vote_proposal(
proposal_id,
has_approval,
voter_address
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def proposal_delete(self, proposal_id: int, issuer_address: str):
if issuer_address is None:
issuer_address = self.tron.default_address.hex
transaction = self.tron.transaction_builder.delete_proposal(
proposal_id,
issuer_address
)
sign = self.sign(transaction)
response = self.broadcast(sign)
return response
def list_exchanges_paginated(self, limit=10, offset=0):
return self.tron.manager.request('/wallet/listexchangespaginated', {
'limit': limit,
'offset': offset
})
def get_node_info(self):
return self.tron.manager.request('wallet/getnodeinfo', {})
| true
| true
|
1c41f38a304fee9acbe8dcd174694925a69d4026
| 737
|
py
|
Python
|
gc_apps/geo_utils/file_hasher.py
|
IQSS/geoconnect
|
09c91be7ffbc04fbfa9850f6b740277d971ac8a7
|
[
"Apache-2.0"
] | 6
|
2015-10-28T15:35:04.000Z
|
2020-08-20T10:18:33.000Z
|
gc_apps/geo_utils/file_hasher.py
|
IQSS/geoconnect
|
09c91be7ffbc04fbfa9850f6b740277d971ac8a7
|
[
"Apache-2.0"
] | 50
|
2015-01-05T15:09:00.000Z
|
2021-06-30T04:11:36.000Z
|
gc_apps/geo_utils/file_hasher.py
|
IQSS/geoconnect
|
09c91be7ffbc04fbfa9850f6b740277d971ac8a7
|
[
"Apache-2.0"
] | 2
|
2017-02-28T02:18:49.000Z
|
2017-07-14T02:40:43.000Z
|
import hashlib
def hashfile(file_path, hasher=hashlib.md5(), blocksize=65536):
"""For verifying checksums of bigger files
From: http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
:param file_path: full path to file to check
:type file_path: string or unicode
:param hasher: hash algorithm instance,
:param type: e.g. hashlib.md5(), hashlib.sha256()
:returns: checksum
:rtype: str or unicode
"""
if file_path is None or hasher is None:
return None
fhandler = open(file_path, 'rb')
buf = fhandler.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = fhandler.read(blocksize)
return hasher.hexdigest()
| 30.708333
| 88
| 0.660787
|
import hashlib
def hashfile(file_path, hasher=hashlib.md5(), blocksize=65536):
if file_path is None or hasher is None:
return None
fhandler = open(file_path, 'rb')
buf = fhandler.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = fhandler.read(blocksize)
return hasher.hexdigest()
| true
| true
|
1c41f3db279e3c4e09bf5816b442f61dd9fd181e
| 6,771
|
py
|
Python
|
git_stacktrace/server.py
|
fakeNetflix/pinterest-repo-git-stacktrace
|
3c9681c167f0b07288743e2fb65cf37db6023377
|
[
"Apache-2.0"
] | null | null | null |
git_stacktrace/server.py
|
fakeNetflix/pinterest-repo-git-stacktrace
|
3c9681c167f0b07288743e2fb65cf37db6023377
|
[
"Apache-2.0"
] | null | null | null |
git_stacktrace/server.py
|
fakeNetflix/pinterest-repo-git-stacktrace
|
3c9681c167f0b07288743e2fb65cf37db6023377
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import json
import logging
import os
from cgi import escape
from git_stacktrace import api
from six.moves.html_parser import HTMLParser
from six.moves.urllib_parse import parse_qs
from string import Template
log = logging.getLogger(__name__)
dir_path = os.path.dirname(os.path.realpath(__file__))
class Args(object):
@staticmethod
def from_json_body(body):
return Args(json.loads(body))
@staticmethod
def from_qs(query_string):
return Args(parse_qs(query_string.lstrip('?')))
def __init__(self, params):
self.params = params
def _get_field(self, field, default=''):
val = self.params.get(field, [default])
val = val[0] if isinstance(val, list) else val
return HTMLParser().unescape(val)
@property
def type(self):
return self._get_field('option-type')
@property
def range(self):
return self._get_field('range')
@property
def branch(self):
return self._get_field('branch')
@property
def since(self):
return self._get_field('since')
@property
def trace(self):
return self._get_field('trace')
@property
def fast(self):
return self._get_field('fast') == 'on'
def validate(self):
if not self.type:
return None
if self.type == 'by-date':
if not self.since:
return 'Missing `since` value. Plese specify a date.'
self.git_range = api.convert_since(self.since, branch=self.branch)
if not api.valid_range(self.git_range):
return "Found no commits in '%s'" % self.git_range
elif self.type == 'by-range':
self.git_range = self.range
if not api.valid_range(self.git_range):
return "Found no commits in '%s'" % self.git_range
else:
return 'Invalid `type` value. Expected `by-date` or `by-range`.'
return None
def get_results(self):
if self.trace:
traceback = api.parse_trace(self.trace)
return api.lookup_stacktrace(traceback, self.git_range, fast=self.fast)
else:
return None
class ResultsOutput(object):
def __init__(self, args):
self.cwd = os.getcwd()
self.args = args
try:
self.messages = args.validate()
self.results = args.get_results()
except Exception as e:
self.messages = (e.message)
self.results = None
def results_as_json(self):
if self.results is None:
return json.dumps({
'errors': self.messages,
'commits': [],
})
elif len(self.results) == 0:
return json.dumps({
'errors': 'No matches found',
'commits': [],
})
else:
return json.dumps({
'errors': None,
'commits': self.results.get_sorted_results_by_dict(),
})
def results_as_html(self):
if not self.results:
return ''
else:
sorted_results = self.results.get_sorted_results()
return '\n<hr/>\n'.join(
['<pre><code>' + escape(str(result)) + '</code></pre>' for result in sorted_results]
)
def messages_as_html(self):
if self.messages is None:
return ''
with open(os.path.join(dir_path, 'templates', 'messages.html')) as f:
return Template(f.read()).substitute(
messages=escape(self.messages)
).encode('utf-8')
def render_page(self):
optionType = 'by-date' if not self.args.type else self.args.type
with open(os.path.join(dir_path, 'templates', 'page.html')) as f:
return Template(f.read()).substitute(
pwd=escape(self.cwd),
messages=self.messages_as_html(),
range=escape(self.args.range),
branch=escape(self.args.branch),
since=escape(self.args.since),
trace=escape(self.args.trace),
fast='checked' if self.args.fast else '',
optionType=escape(optionType),
isByDate='true' if optionType == 'by-date' else 'false',
isByRange='true' if optionType == 'by-range' else 'false',
byDateClass='active' if optionType == 'by-date' else '',
byRangeClass='active' if optionType == 'by-range' else '',
results=self.results_as_html(),
).encode('utf-8')
class GitStacktraceApplication(object):
def __init__(self, environ, start_response):
self.environ = environ
self.start_response = start_response
self.path = environ['PATH_INFO']
def __iter__(self):
method = self.environ['REQUEST_METHOD']
if method == 'GET':
yield self.do_GET() or ''
elif method == 'POST':
yield self.do_POST() or ''
elif method == 'HEAD':
self._set_headers()
yield ''
else:
self._set_headers(500)
yield ''
def _set_headers(self, code=200, content_type='text/html'):
codes = {
200: "200 OK",
404: "404 Not Found",
}
self.start_response(
codes.get(code, "500 Internal Server Error"),
[('Content-type', content_type)])
def _request_body(self):
content_length = int(self.environ['CONTENT_LENGTH'])
return self.environ['wsgi.input'].read(content_length)
def do_GET(self):
if self.path == '/favicon.ico':
self._set_headers()
elif self.path == '/':
try:
args = Args.from_qs(self.environ['QUERY_STRING'])
out = ResultsOutput(args).render_page()
self._set_headers()
return out
except Exception:
log.exception('Unable to render trace page as html')
self._set_headers(500)
else:
self._set_headers(404)
def do_POST(self):
if self.path == '/':
try:
args = Args.from_json_body(self._request_body())
out = ResultsOutput(args).results_as_json()
self._set_headers(200, 'application/json')
return out
except Exception as e:
log.exception('Unable to load trace results as json')
self._set_headers(500, 'application/json')
return json.dumps({'error': str(e)})
else:
self._set_headers(404, 'application/json')
application = GitStacktraceApplication
| 31.938679
| 100
| 0.560331
|
from __future__ import print_function
import json
import logging
import os
from cgi import escape
from git_stacktrace import api
from six.moves.html_parser import HTMLParser
from six.moves.urllib_parse import parse_qs
from string import Template
log = logging.getLogger(__name__)
dir_path = os.path.dirname(os.path.realpath(__file__))
class Args(object):
@staticmethod
def from_json_body(body):
return Args(json.loads(body))
@staticmethod
def from_qs(query_string):
return Args(parse_qs(query_string.lstrip('?')))
def __init__(self, params):
self.params = params
def _get_field(self, field, default=''):
val = self.params.get(field, [default])
val = val[0] if isinstance(val, list) else val
return HTMLParser().unescape(val)
@property
def type(self):
return self._get_field('option-type')
@property
def range(self):
return self._get_field('range')
@property
def branch(self):
return self._get_field('branch')
@property
def since(self):
return self._get_field('since')
@property
def trace(self):
return self._get_field('trace')
@property
def fast(self):
return self._get_field('fast') == 'on'
def validate(self):
if not self.type:
return None
if self.type == 'by-date':
if not self.since:
return 'Missing `since` value. Plese specify a date.'
self.git_range = api.convert_since(self.since, branch=self.branch)
if not api.valid_range(self.git_range):
return "Found no commits in '%s'" % self.git_range
elif self.type == 'by-range':
self.git_range = self.range
if not api.valid_range(self.git_range):
return "Found no commits in '%s'" % self.git_range
else:
return 'Invalid `type` value. Expected `by-date` or `by-range`.'
return None
def get_results(self):
if self.trace:
traceback = api.parse_trace(self.trace)
return api.lookup_stacktrace(traceback, self.git_range, fast=self.fast)
else:
return None
class ResultsOutput(object):
def __init__(self, args):
self.cwd = os.getcwd()
self.args = args
try:
self.messages = args.validate()
self.results = args.get_results()
except Exception as e:
self.messages = (e.message)
self.results = None
def results_as_json(self):
if self.results is None:
return json.dumps({
'errors': self.messages,
'commits': [],
})
elif len(self.results) == 0:
return json.dumps({
'errors': 'No matches found',
'commits': [],
})
else:
return json.dumps({
'errors': None,
'commits': self.results.get_sorted_results_by_dict(),
})
def results_as_html(self):
if not self.results:
return ''
else:
sorted_results = self.results.get_sorted_results()
return '\n<hr/>\n'.join(
['<pre><code>' + escape(str(result)) + '</code></pre>' for result in sorted_results]
)
def messages_as_html(self):
if self.messages is None:
return ''
with open(os.path.join(dir_path, 'templates', 'messages.html')) as f:
return Template(f.read()).substitute(
messages=escape(self.messages)
).encode('utf-8')
def render_page(self):
optionType = 'by-date' if not self.args.type else self.args.type
with open(os.path.join(dir_path, 'templates', 'page.html')) as f:
return Template(f.read()).substitute(
pwd=escape(self.cwd),
messages=self.messages_as_html(),
range=escape(self.args.range),
branch=escape(self.args.branch),
since=escape(self.args.since),
trace=escape(self.args.trace),
fast='checked' if self.args.fast else '',
optionType=escape(optionType),
isByDate='true' if optionType == 'by-date' else 'false',
isByRange='true' if optionType == 'by-range' else 'false',
byDateClass='active' if optionType == 'by-date' else '',
byRangeClass='active' if optionType == 'by-range' else '',
results=self.results_as_html(),
).encode('utf-8')
class GitStacktraceApplication(object):
def __init__(self, environ, start_response):
self.environ = environ
self.start_response = start_response
self.path = environ['PATH_INFO']
def __iter__(self):
method = self.environ['REQUEST_METHOD']
if method == 'GET':
yield self.do_GET() or ''
elif method == 'POST':
yield self.do_POST() or ''
elif method == 'HEAD':
self._set_headers()
yield ''
else:
self._set_headers(500)
yield ''
def _set_headers(self, code=200, content_type='text/html'):
codes = {
200: "200 OK",
404: "404 Not Found",
}
self.start_response(
codes.get(code, "500 Internal Server Error"),
[('Content-type', content_type)])
def _request_body(self):
content_length = int(self.environ['CONTENT_LENGTH'])
return self.environ['wsgi.input'].read(content_length)
def do_GET(self):
if self.path == '/favicon.ico':
self._set_headers()
elif self.path == '/':
try:
args = Args.from_qs(self.environ['QUERY_STRING'])
out = ResultsOutput(args).render_page()
self._set_headers()
return out
except Exception:
log.exception('Unable to render trace page as html')
self._set_headers(500)
else:
self._set_headers(404)
def do_POST(self):
if self.path == '/':
try:
args = Args.from_json_body(self._request_body())
out = ResultsOutput(args).results_as_json()
self._set_headers(200, 'application/json')
return out
except Exception as e:
log.exception('Unable to load trace results as json')
self._set_headers(500, 'application/json')
return json.dumps({'error': str(e)})
else:
self._set_headers(404, 'application/json')
application = GitStacktraceApplication
| true
| true
|
1c41f440650c94402b8519babfeabcfcf87ebd3a
| 1,314
|
py
|
Python
|
app/recipe/serializers.py
|
mahinm20/recipe-app-api
|
f9d6c69ae71cdd3c265f50b503cb027c6cb307a9
|
[
"MIT"
] | null | null | null |
app/recipe/serializers.py
|
mahinm20/recipe-app-api
|
f9d6c69ae71cdd3c265f50b503cb027c6cb307a9
|
[
"MIT"
] | null | null | null |
app/recipe/serializers.py
|
mahinm20/recipe-app-api
|
f9d6c69ae71cdd3c265f50b503cb027c6cb307a9
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from core.models import Tag, Ingredient, Recipe
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
fields = ('id','name')
read_only_field = ('id',)
class IngredientSerializer(serializers.ModelSerializer):
class Meta:
model = Ingredient
fields = ('id','name')
read_only_field = ('id',)
class RecipeSerializer(serializers.ModelSerializer):
"""Serialize a recipe"""
ingredients = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Ingredient.objects.all()
)
tags = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Tag.objects.all()
)
class Meta:
model = Recipe
fields = (
'id', 'title', 'ingredients', 'tags', 'time_minutes', 'price',
'link',
)
read_only_fields = ('id',)
class RecipeDetailSerializer(RecipeSerializer):
ingredients = IngredientSerializer(many=True, read_only=True)
tags = TagSerializer(many=True, read_only=True)
class RecipeImageSerializer(serializers.ModelSerializer):
"""Serializer for uploading images to recipe"""
class Meta:
model = Recipe
fields = ('id', 'image')
read_only_fields = ('id',)
| 23.890909
| 74
| 0.636225
|
from rest_framework import serializers
from core.models import Tag, Ingredient, Recipe
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
fields = ('id','name')
read_only_field = ('id',)
class IngredientSerializer(serializers.ModelSerializer):
class Meta:
model = Ingredient
fields = ('id','name')
read_only_field = ('id',)
class RecipeSerializer(serializers.ModelSerializer):
ingredients = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Ingredient.objects.all()
)
tags = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Tag.objects.all()
)
class Meta:
model = Recipe
fields = (
'id', 'title', 'ingredients', 'tags', 'time_minutes', 'price',
'link',
)
read_only_fields = ('id',)
class RecipeDetailSerializer(RecipeSerializer):
ingredients = IngredientSerializer(many=True, read_only=True)
tags = TagSerializer(many=True, read_only=True)
class RecipeImageSerializer(serializers.ModelSerializer):
class Meta:
model = Recipe
fields = ('id', 'image')
read_only_fields = ('id',)
| true
| true
|
1c41f4b8767f74cf84df48568599996ec6265e81
| 581
|
py
|
Python
|
igamelister/amiga/kickstart.py
|
chris-vg/igamelister
|
807f4d504911341edbc7ffc187c3a19b29a72ace
|
[
"MIT"
] | null | null | null |
igamelister/amiga/kickstart.py
|
chris-vg/igamelister
|
807f4d504911341edbc7ffc187c3a19b29a72ace
|
[
"MIT"
] | null | null | null |
igamelister/amiga/kickstart.py
|
chris-vg/igamelister
|
807f4d504911341edbc7ffc187c3a19b29a72ace
|
[
"MIT"
] | null | null | null |
class Kickstart:
"""Represents a Kickstart ROM used by a WHDLoad Slave.
"""
name = None
checksum = None
def __init__(self, name: str, checksum: int) -> None:
"""Initialize a new instance of the Kickstart class.
:param name: The name of the Kickstart ROM.
:param checksum: The checksum (CRC16) of the Kickstart ROM.
"""
self.name = name
self.checksum = checksum
def __hash__(self):
return hash((self.name, self.checksum))
def __str__(self):
return f"{self.name} ({hex(self.checksum)})"
| 27.666667
| 67
| 0.609294
|
class Kickstart:
name = None
checksum = None
def __init__(self, name: str, checksum: int) -> None:
self.name = name
self.checksum = checksum
def __hash__(self):
return hash((self.name, self.checksum))
def __str__(self):
return f"{self.name} ({hex(self.checksum)})"
| true
| true
|
1c41f5160840f5e973981924338d7cbdf89f91b9
| 5,139
|
py
|
Python
|
app/project/settings/base.py
|
Thibaut-Deveraux/explore.ac
|
b7a536101cb0560239e81d919e6f290b687710b9
|
[
"MIT"
] | 5
|
2020-10-28T12:51:05.000Z
|
2022-02-26T01:25:11.000Z
|
app/project/settings/base.py
|
Thibaut-Deveraux/explore.ac
|
b7a536101cb0560239e81d919e6f290b687710b9
|
[
"MIT"
] | 1
|
2019-07-25T10:57:53.000Z
|
2019-07-25T10:57:53.000Z
|
app/project/settings/base.py
|
Thibdx/explore.ac
|
b7a536101cb0560239e81d919e6f290b687710b9
|
[
"MIT"
] | null | null | null |
"""
Django settings for project project.
Generated by 'django-admin startproject' using Django 2.2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(PROJECT_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
# MODIFIED !
SECRET_KEY = os.getenv('SECRET_KEY','oxdmu06k18^8wb6m+z3+9^5p-qlapr0!w0$5n#rye@ojf@jb')
# SECURITY WARNING: don't run with debug turned on in production!
# MODIFIED !
if os.getenv('PRODUCTION','') == 'true' :
DEBUG = False
else:
DEBUG = True
ALLOWED_HOSTS = os.getenv('ALLOWED_HOSTS', 'localhost').split(',')
# Application definition
INSTALLED_APPS = [
'home',
'search',
'wagtail.contrib.forms',
'wagtail.contrib.redirects',
'wagtail.embeds',
'wagtail.sites',
'wagtail.users',
'wagtail.snippets',
'wagtail.documents',
'wagtail.images',
'wagtail.search',
'wagtail.admin',
'wagtail.core',
'modelcluster',
'taggit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'wagtail.api.v2',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'wagtail.core.middleware.SiteMiddleware',
'wagtail.contrib.redirects.middleware.RedirectMiddleware',
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(PROJECT_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
# MODIFIED !
DATABASES = {
'default': {
'ENGINE': os.getenv('SQL_ENGINE', 'django.db.backends.sqlite3'),
'NAME': os.environ.get('POSTGRES_NAME', os.path.join(BASE_DIR, 'db.sqlite3')),
'USER': os.environ.get('POSTGRES_USER', 'user'),
'PASSWORD': os.environ.get('POSTGRES_PASSWORD', 'password'),
'HOST': os.environ.get('POSTGRES_HOST', 'localhost'),
'PORT': os.environ.get('POSTGRES_PORT', '5432'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATICFILES_DIRS = [
os.path.join(PROJECT_DIR, 'static'),
]
# ManifestStaticFilesStorage is recommended in production, to prevent outdated
# Javascript / CSS assets being served from cache (e.g. after a Wagtail upgrade).
# See https://docs.djangoproject.com/en/2.2/ref/contrib/staticfiles/#manifeststaticfilesstorage
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'static')
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'media')
MEDIA_URL = '/media/'
# Wagtail settings
WAGTAIL_SITE_NAME = "project"
# Base URL to use when referring to full URLs within the Wagtail admin backend -
# e.g. in notification emails. Don't include '/admin' or a trailing slash
# MODIFIED
BASE_URL = os.getenv('BASE_URL', 'http://example.com')
| 27.778378
| 95
| 0.69605
|
import os
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(PROJECT_DIR)
SECRET_KEY = os.getenv('SECRET_KEY','oxdmu06k18^8wb6m+z3+9^5p-qlapr0!w0$5n#rye@ojf@jb')
# MODIFIED !
if os.getenv('PRODUCTION','') == 'true' :
DEBUG = False
else:
DEBUG = True
ALLOWED_HOSTS = os.getenv('ALLOWED_HOSTS', 'localhost').split(',')
# Application definition
INSTALLED_APPS = [
'home',
'search',
'wagtail.contrib.forms',
'wagtail.contrib.redirects',
'wagtail.embeds',
'wagtail.sites',
'wagtail.users',
'wagtail.snippets',
'wagtail.documents',
'wagtail.images',
'wagtail.search',
'wagtail.admin',
'wagtail.core',
'modelcluster',
'taggit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'wagtail.api.v2',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'wagtail.core.middleware.SiteMiddleware',
'wagtail.contrib.redirects.middleware.RedirectMiddleware',
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(PROJECT_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
# MODIFIED !
DATABASES = {
'default': {
'ENGINE': os.getenv('SQL_ENGINE', 'django.db.backends.sqlite3'),
'NAME': os.environ.get('POSTGRES_NAME', os.path.join(BASE_DIR, 'db.sqlite3')),
'USER': os.environ.get('POSTGRES_USER', 'user'),
'PASSWORD': os.environ.get('POSTGRES_PASSWORD', 'password'),
'HOST': os.environ.get('POSTGRES_HOST', 'localhost'),
'PORT': os.environ.get('POSTGRES_PORT', '5432'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATICFILES_DIRS = [
os.path.join(PROJECT_DIR, 'static'),
]
# ManifestStaticFilesStorage is recommended in production, to prevent outdated
# Javascript / CSS assets being served from cache (e.g. after a Wagtail upgrade).
# See https://docs.djangoproject.com/en/2.2/ref/contrib/staticfiles/#manifeststaticfilesstorage
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'static')
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'media')
MEDIA_URL = '/media/'
# Wagtail settings
WAGTAIL_SITE_NAME = "project"
# Base URL to use when referring to full URLs within the Wagtail admin backend -
# e.g. in notification emails. Don't include '/admin' or a trailing slash
BASE_URL = os.getenv('BASE_URL', 'http://example.com')
| true
| true
|
1c41f54fbf7c3f3a79cb23d6fac3f1966d269193
| 3,156
|
py
|
Python
|
tests/unit/states/zcbuildout_test.py
|
preoctopus/salt
|
aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d
|
[
"Apache-2.0"
] | 18
|
2015-02-22T12:53:50.000Z
|
2019-03-15T16:45:10.000Z
|
tests/unit/states/zcbuildout_test.py
|
preoctopus/salt
|
aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d
|
[
"Apache-2.0"
] | 20
|
2015-01-20T22:35:02.000Z
|
2017-11-06T11:17:34.000Z
|
tests/unit/states/zcbuildout_test.py
|
preoctopus/salt
|
aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d
|
[
"Apache-2.0"
] | 5
|
2015-01-13T04:23:09.000Z
|
2019-01-03T17:00:31.000Z
|
# -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import
import os
# Import Salt Testing libs
from salttesting import skipIf
from salttesting.helpers import (
ensure_in_syspath,
requires_network,
)
ensure_in_syspath('../../')
import integration
# Import Salt libs
import salt.utils
from unit.modules.zcbuildout_test import Base, KNOWN_VIRTUALENV_BINARY_NAMES
from salt.modules import zcbuildout as modbuildout
from salt.states import zcbuildout as buildout
from salt.modules import cmdmod as cmd
ROOT = os.path.join(os.path.dirname(integration.__file__),
'files/file/base/buildout')
modbuildout.__env__ = 'base'
modbuildout.__opts__ = {'test': False}
modbuildout.__salt__ = {
'cmd.run_all': cmd.run_all,
'cmd.run': cmd.run,
'cmd.retcode': cmd.retcode,
'buildout.buildout': modbuildout.buildout,
}
buildout.__env__ = 'base'
buildout.__opts__ = {'test': False}
buildout.__salt__ = {
'cmd.run_all': cmd.run_all,
'cmd.run': cmd.run,
'cmd.retcode': cmd.retcode,
'buildout.buildout': modbuildout.buildout,
}
@skipIf(salt.utils.which_bin(KNOWN_VIRTUALENV_BINARY_NAMES) is None,
'The \'virtualenv\' packaged needs to be installed')
class BuildoutTestCase(Base):
@requires_network()
def test_quiet(self):
c_dir = os.path.join(self.tdir, 'c')
cret = buildout.installed(c_dir, python=self.py_st, quiet=True)
self.assertTrue(cret['result'])
self.assertFalse('OUTPUT:' in cret['comment'])
self.assertFalse('Log summary:' in cret['comment'])
@requires_network()
def test_error(self):
b_dir = os.path.join(self.tdir, 'e')
ret = buildout.installed(b_dir, python=self.py_st)
self.assertTrue(
'We did not get any expectable '
'answer from buildout'
in ret['comment'])
self.assertTrue(
'An internal error occurred due to a bug in'
' either zc.buildout '
in ret['comment'])
self.assertFalse(ret['result'])
@requires_network()
def test_installed(self):
b_dir = os.path.join(self.tdir, 'b')
ret = buildout.installed(b_dir,
python=self.py_st,
onlyif='/bin/false')
self.assertEqual(ret['comment'], '\nonlyif execution failed')
self.assertEqual(ret['result'], True)
self.assertTrue('/b' in ret['name'])
b_dir = os.path.join(self.tdir, 'b')
ret = buildout.installed(b_dir,
python=self.py_st,
unless='/bin/true')
self.assertEqual(ret['comment'], '\nunless execution succeeded')
self.assertEqual(ret['result'], True)
self.assertTrue('/b' in ret['name'])
ret = buildout.installed(b_dir, python=self.py_st)
self.assertEqual(ret['result'], True)
self.assertTrue('OUTPUT:' in ret['comment'])
self.assertTrue('Log summary:' in ret['comment'])
if __name__ == '__main__':
from integration import run_tests
run_tests(BuildoutTestCase, needs_daemon=False)
| 32.536082
| 76
| 0.635615
|
from __future__ import absolute_import
import os
from salttesting import skipIf
from salttesting.helpers import (
ensure_in_syspath,
requires_network,
)
ensure_in_syspath('../../')
import integration
import salt.utils
from unit.modules.zcbuildout_test import Base, KNOWN_VIRTUALENV_BINARY_NAMES
from salt.modules import zcbuildout as modbuildout
from salt.states import zcbuildout as buildout
from salt.modules import cmdmod as cmd
ROOT = os.path.join(os.path.dirname(integration.__file__),
'files/file/base/buildout')
modbuildout.__env__ = 'base'
modbuildout.__opts__ = {'test': False}
modbuildout.__salt__ = {
'cmd.run_all': cmd.run_all,
'cmd.run': cmd.run,
'cmd.retcode': cmd.retcode,
'buildout.buildout': modbuildout.buildout,
}
buildout.__env__ = 'base'
buildout.__opts__ = {'test': False}
buildout.__salt__ = {
'cmd.run_all': cmd.run_all,
'cmd.run': cmd.run,
'cmd.retcode': cmd.retcode,
'buildout.buildout': modbuildout.buildout,
}
@skipIf(salt.utils.which_bin(KNOWN_VIRTUALENV_BINARY_NAMES) is None,
'The \'virtualenv\' packaged needs to be installed')
class BuildoutTestCase(Base):
@requires_network()
def test_quiet(self):
c_dir = os.path.join(self.tdir, 'c')
cret = buildout.installed(c_dir, python=self.py_st, quiet=True)
self.assertTrue(cret['result'])
self.assertFalse('OUTPUT:' in cret['comment'])
self.assertFalse('Log summary:' in cret['comment'])
@requires_network()
def test_error(self):
b_dir = os.path.join(self.tdir, 'e')
ret = buildout.installed(b_dir, python=self.py_st)
self.assertTrue(
'We did not get any expectable '
'answer from buildout'
in ret['comment'])
self.assertTrue(
'An internal error occurred due to a bug in'
' either zc.buildout '
in ret['comment'])
self.assertFalse(ret['result'])
@requires_network()
def test_installed(self):
b_dir = os.path.join(self.tdir, 'b')
ret = buildout.installed(b_dir,
python=self.py_st,
onlyif='/bin/false')
self.assertEqual(ret['comment'], '\nonlyif execution failed')
self.assertEqual(ret['result'], True)
self.assertTrue('/b' in ret['name'])
b_dir = os.path.join(self.tdir, 'b')
ret = buildout.installed(b_dir,
python=self.py_st,
unless='/bin/true')
self.assertEqual(ret['comment'], '\nunless execution succeeded')
self.assertEqual(ret['result'], True)
self.assertTrue('/b' in ret['name'])
ret = buildout.installed(b_dir, python=self.py_st)
self.assertEqual(ret['result'], True)
self.assertTrue('OUTPUT:' in ret['comment'])
self.assertTrue('Log summary:' in ret['comment'])
if __name__ == '__main__':
from integration import run_tests
run_tests(BuildoutTestCase, needs_daemon=False)
| true
| true
|
1c41f61bd8a46c108a8e9aa698a2b42f2628b203
| 581
|
py
|
Python
|
sonia_navigation_states/src/sonia_navigation_states/modules/navigation_utilities.py
|
sonia-auv/sonia-behaviors
|
28519551f954616e83b474e6cab6ba3762d238f2
|
[
"BSD-3-Clause"
] | null | null | null |
sonia_navigation_states/src/sonia_navigation_states/modules/navigation_utilities.py
|
sonia-auv/sonia-behaviors
|
28519551f954616e83b474e6cab6ba3762d238f2
|
[
"BSD-3-Clause"
] | 1
|
2022-02-16T01:31:51.000Z
|
2022-02-21T22:30:46.000Z
|
sonia_navigation_states/src/sonia_navigation_states/modules/navigation_utilities.py
|
sonia-auv/sonia-behaviors
|
28519551f954616e83b474e6cab6ba3762d238f2
|
[
"BSD-3-Clause"
] | 1
|
2021-11-09T13:34:43.000Z
|
2021-11-09T13:34:43.000Z
|
# Navigation function libraries.
# Usefull for navigation states
# includes
from sonia_common.msg import AddPose, MultiAddPose
# fill and return an addpose object
def addpose( x, y, z, rx, ry,rz, frame, speed, fine, rot):
buffer = AddPose()
buffer.position.x = x
buffer.position.y = y
buffer.position.z = z
buffer.orientation.x = rx
buffer.orientation.y = ry
buffer.orientation.z = rz
buffer.frame = frame
buffer.speed = speed
buffer.fine = fine
buffer.rotation = rot
return buffer
| 27.666667
| 58
| 0.628227
|
from sonia_common.msg import AddPose, MultiAddPose
def addpose( x, y, z, rx, ry,rz, frame, speed, fine, rot):
buffer = AddPose()
buffer.position.x = x
buffer.position.y = y
buffer.position.z = z
buffer.orientation.x = rx
buffer.orientation.y = ry
buffer.orientation.z = rz
buffer.frame = frame
buffer.speed = speed
buffer.fine = fine
buffer.rotation = rot
return buffer
| true
| true
|
1c41f62ab5976273b38fd1d35a4484ea7b232115
| 8,866
|
py
|
Python
|
siamese_tracking/test_siamrpn.py
|
FMsunyh/SiamDW
|
ef7a97ee6bdf732edbb7dc2943daf15b92535019
|
[
"MIT"
] | null | null | null |
siamese_tracking/test_siamrpn.py
|
FMsunyh/SiamDW
|
ef7a97ee6bdf732edbb7dc2943daf15b92535019
|
[
"MIT"
] | null | null | null |
siamese_tracking/test_siamrpn.py
|
FMsunyh/SiamDW
|
ef7a97ee6bdf732edbb7dc2943daf15b92535019
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Houwen Peng and Zhipeng Zhang
# Email: houwen.peng@microsoft.com
# Clean testing scripts for SiamRPN
# New: support GENE and TPE tuning
# ------------------------------------------------------------------------------
import _init_paths
import os
import cv2
import random
import argparse
import numpy as np
# import matlab.engine
from os.path import exists, join
import models.models as models
from tracker.siamrpn import SiamRPN
from torch.autograd import Variable
from easydict import EasyDict as edict
from utils.utils import load_pretrain, cxy_wh_2_rect, get_axis_aligned_bbox, load_dataset, poly_iou
# eng = matlab.engine.start_matlab()
def parse_args():
"""
args for rpn testing.
"""
parser = argparse.ArgumentParser(description='PyTorch SiamRPN Tracking Test')
parser.add_argument('--arch', dest='arch', default='SiamRPNIncep22', help='backbone architecture')
parser.add_argument('--resume', required=True, type=str, help='pretrained model')
parser.add_argument('--dataset', default='VOT2017', help='dataset test')
parser.add_argument('--anchor_nums', default=5, type=int, help='anchor numbers')
parser.add_argument('--cls_type', default="thinner", type=str, help='cls/loss type, thicker or thinner or else you defined')
parser.add_argument('--epoch_test', default=False, type=bool, help='multi-gpu epoch test flag')
args = parser.parse_args()
return args
def track(tracker, net, video, args):
start_frame, lost_times, toc = 0, 0, 0
# save result to evaluate
if args.epoch_test:
suffix = args.resume.split('/')[-1]
suffix = suffix.split('.')[0]
tracker_path = os.path.join('result', args.dataset, args.arch + suffix)
else:
tracker_path = os.path.join('result', args.dataset, args.arch)
if not os.path.exists(tracker_path):
os.makedirs(tracker_path)
if 'VOT' in args.dataset:
baseline_path = join(tracker_path, 'baseline')
video_path = join(baseline_path, video['name'])
if not os.path.exists(video_path):
os.makedirs(video_path)
result_path = join(video_path, video['name'] + '_001.txt')
else:
result_path = join(tracker_path, '{:s}.txt'.format(video['name']))
if os.path.exists(result_path):
return 0 # for mult-gputesting
regions = [] # result and states[1 init / 2 lost / 0 skip]
image_files, gt = video['image_files'], video['gt']
for f, image_file in enumerate(image_files):
im = cv2.imread(image_file)
if len(im.shape) == 2:
im = cv2.cvtColor(im, cv2.COLOR_GRAY2BGR)
tic = cv2.getTickCount()
if f == start_frame: # init
cx, cy, w, h = get_axis_aligned_bbox(gt[f])
target_pos = np.array([cx, cy])
target_sz = np.array([w, h])
state = tracker.init(im, target_pos, target_sz, net) # init tracker
regions.append(1 if 'VOT' in args.dataset else gt[f])
elif f > start_frame: # tracking
state = tracker.track(state, im) # track
location = cxy_wh_2_rect(state['target_pos'], state['target_sz'])
b_overlap = poly_iou(gt[f], location) if 'VOT' in args.dataset else 1
if b_overlap > 0:
regions.append(location)
else:
regions.append(2)
lost_times += 1
start_frame = f + 5 # skip 5 frames
else: # skip
regions.append(0)
toc += cv2.getTickCount() - tic
toc /= cv2.getTickFrequency()
with open(result_path, "w") as fin:
if 'VOT' in args.dataset:
for x in regions:
if isinstance(x, int):
fin.write("{:d}\n".format(x))
else:
p_bbox = x.copy()
fin.write(','.join([str(i) for i in p_bbox]) + '\n')
else:
for x in regions:
p_bbox = x.copy()
fin.write(','.join([str(i + 1) if idx == 0 or idx == 1 else str(i) for idx, i in enumerate(p_bbox)]) + '\n')
print('Video: {:12s} Time: {:2.1f}s Speed: {:3.1f}fps Lost: {:d}'.format(video['name'], toc, f / toc, lost_times))
return lost_times
def main():
args = parse_args()
total_lost = 0
# prepare model
net = models.__dict__[args.arch](anchors_nums=args.anchor_nums, cls_type=args.cls_type)
net = load_pretrain(net, args.resume)
net.eval()
net = net.cuda()
# prepare video
dataset = load_dataset(args.dataset)
video_keys = list(dataset.keys()).copy()
# prepare tracker
info = edict()
info.arch = args.arch
info.cls_type = args.cls_type
info.dataset = args.dataset
info.epoch_test = args.epoch_test
tracker = SiamRPN(info)
for video in video_keys:
total_lost += track(tracker, net, dataset[video], args)
print('Total Lost: {:d}'.format(total_lost))
# ------------------------------------------------------------
# The next few functions are utilized for tuning
# Only VOT is supported
# About 1000 - 3000 group is needed
# ------------------------------------------------------------
def track_tune(tracker, net, video, config):
arch = config['arch']
benchmark_name = config['benchmark']
resume = config['resume']
hp = config['hp'] # penalty_k, scale_lr, window_influence, adaptive size (for vot2017 or later)
tracker_path = join('test', (benchmark_name + resume.split('/')[-1].split('.')[0] +
'_small_size_{:.4f}'.format(hp['small_sz']) +
'_big_size_{:.4f}'.format(hp['big_sz']) +
'_penalty_k_{:.4f}'.format(hp['penalty_k']) +
'_w_influence_{:.4f}'.format(hp['window_influence']) +
'_scale_lr_{:.4f}'.format(hp['lr'])).replace('.', '_')) # no .
if not os.path.exists(tracker_path):
os.makedirs(tracker_path)
if 'VOT' in benchmark_name:
baseline_path = join(tracker_path, 'baseline')
video_path = join(baseline_path, video['name'])
if not os.path.exists(video_path):
os.makedirs(video_path)
result_path = join(video_path, video['name'] + '_001.txt')
else:
raise ValueError('Only VOT is supported')
# occ for parallel running
if not os.path.exists(result_path):
fin = open(result_path, 'w')
fin.close()
else:
if benchmark_name.startswith('VOT'):
return 0
else:
raise ValueError('Only VOT is supported')
start_frame, lost_times, toc = 0, 0, 0
regions = [] # result and states[1 init / 2 lost / 0 skip]
image_files, gt = video['image_files'], video['gt']
for f, image_file in enumerate(image_files):
im = cv2.imread(image_file)
if len(im.shape) == 2:
im = cv2.cvtColor(im, cv2.COLOR_GRAY2BGR)
if f == start_frame: # init
cx, cy, w, h = get_axis_aligned_bbox(gt[f])
target_pos = np.array([cx, cy])
target_sz = np.array([w, h])
state = tracker.init(im, target_pos, target_sz, net, hp=hp) # init tracker
regions.append([float(1)] if 'VOT' in benchmark_name else gt[f])
elif f > start_frame: # tracking
state = tracker.track(state, im) # track
location = cxy_wh_2_rect(state['target_pos'], state['target_sz'])
b_overlap = poly_iou(gt[f], location) if 'VOT' in benchmark_name else 1
if b_overlap > 0:
regions.append(location)
else:
regions.append([float(2)])
lost_times += 1
start_frame = f + 5 # skip 5 frames
else: # skip
regions.append([float(0)])
# save results for OTB
if benchmark_name.startswith('VOT'):
return regions
else:
raise ValueError('Only VOT is supported')
def eao_vot_rpn(tracker, net, config):
dataset = load_dataset(config['benchmark'])
video_keys = sorted(list(dataset.keys()).copy())
results = []
for video in video_keys:
video_result = track_tune(tracker, net, dataset[video], config)
results.append(video_result)
year = config['benchmark'][-4:] # need a str, instead of a int
eng.cd('./lib/core')
eao = eng.get_eao(results, year)
return eao
if __name__ == '__main__':
main()
| 37.252101
| 129
| 0.565531
|
import _init_paths
import os
import cv2
import random
import argparse
import numpy as np
from os.path import exists, join
import models.models as models
from tracker.siamrpn import SiamRPN
from torch.autograd import Variable
from easydict import EasyDict as edict
from utils.utils import load_pretrain, cxy_wh_2_rect, get_axis_aligned_bbox, load_dataset, poly_iou
def parse_args():
parser = argparse.ArgumentParser(description='PyTorch SiamRPN Tracking Test')
parser.add_argument('--arch', dest='arch', default='SiamRPNIncep22', help='backbone architecture')
parser.add_argument('--resume', required=True, type=str, help='pretrained model')
parser.add_argument('--dataset', default='VOT2017', help='dataset test')
parser.add_argument('--anchor_nums', default=5, type=int, help='anchor numbers')
parser.add_argument('--cls_type', default="thinner", type=str, help='cls/loss type, thicker or thinner or else you defined')
parser.add_argument('--epoch_test', default=False, type=bool, help='multi-gpu epoch test flag')
args = parser.parse_args()
return args
def track(tracker, net, video, args):
start_frame, lost_times, toc = 0, 0, 0
if args.epoch_test:
suffix = args.resume.split('/')[-1]
suffix = suffix.split('.')[0]
tracker_path = os.path.join('result', args.dataset, args.arch + suffix)
else:
tracker_path = os.path.join('result', args.dataset, args.arch)
if not os.path.exists(tracker_path):
os.makedirs(tracker_path)
if 'VOT' in args.dataset:
baseline_path = join(tracker_path, 'baseline')
video_path = join(baseline_path, video['name'])
if not os.path.exists(video_path):
os.makedirs(video_path)
result_path = join(video_path, video['name'] + '_001.txt')
else:
result_path = join(tracker_path, '{:s}.txt'.format(video['name']))
if os.path.exists(result_path):
return 0
regions = []
image_files, gt = video['image_files'], video['gt']
for f, image_file in enumerate(image_files):
im = cv2.imread(image_file)
if len(im.shape) == 2:
im = cv2.cvtColor(im, cv2.COLOR_GRAY2BGR)
tic = cv2.getTickCount()
if f == start_frame:
cx, cy, w, h = get_axis_aligned_bbox(gt[f])
target_pos = np.array([cx, cy])
target_sz = np.array([w, h])
state = tracker.init(im, target_pos, target_sz, net)
regions.append(1 if 'VOT' in args.dataset else gt[f])
elif f > start_frame:
state = tracker.track(state, im)
location = cxy_wh_2_rect(state['target_pos'], state['target_sz'])
b_overlap = poly_iou(gt[f], location) if 'VOT' in args.dataset else 1
if b_overlap > 0:
regions.append(location)
else:
regions.append(2)
lost_times += 1
start_frame = f + 5
else:
regions.append(0)
toc += cv2.getTickCount() - tic
toc /= cv2.getTickFrequency()
with open(result_path, "w") as fin:
if 'VOT' in args.dataset:
for x in regions:
if isinstance(x, int):
fin.write("{:d}\n".format(x))
else:
p_bbox = x.copy()
fin.write(','.join([str(i) for i in p_bbox]) + '\n')
else:
for x in regions:
p_bbox = x.copy()
fin.write(','.join([str(i + 1) if idx == 0 or idx == 1 else str(i) for idx, i in enumerate(p_bbox)]) + '\n')
print('Video: {:12s} Time: {:2.1f}s Speed: {:3.1f}fps Lost: {:d}'.format(video['name'], toc, f / toc, lost_times))
return lost_times
def main():
args = parse_args()
total_lost = 0
net = models.__dict__[args.arch](anchors_nums=args.anchor_nums, cls_type=args.cls_type)
net = load_pretrain(net, args.resume)
net.eval()
net = net.cuda()
dataset = load_dataset(args.dataset)
video_keys = list(dataset.keys()).copy()
info = edict()
info.arch = args.arch
info.cls_type = args.cls_type
info.dataset = args.dataset
info.epoch_test = args.epoch_test
tracker = SiamRPN(info)
for video in video_keys:
total_lost += track(tracker, net, dataset[video], args)
print('Total Lost: {:d}'.format(total_lost))
def track_tune(tracker, net, video, config):
arch = config['arch']
benchmark_name = config['benchmark']
resume = config['resume']
hp = config['hp']
tracker_path = join('test', (benchmark_name + resume.split('/')[-1].split('.')[0] +
'_small_size_{:.4f}'.format(hp['small_sz']) +
'_big_size_{:.4f}'.format(hp['big_sz']) +
'_penalty_k_{:.4f}'.format(hp['penalty_k']) +
'_w_influence_{:.4f}'.format(hp['window_influence']) +
'_scale_lr_{:.4f}'.format(hp['lr'])).replace('.', '_'))
if not os.path.exists(tracker_path):
os.makedirs(tracker_path)
if 'VOT' in benchmark_name:
baseline_path = join(tracker_path, 'baseline')
video_path = join(baseline_path, video['name'])
if not os.path.exists(video_path):
os.makedirs(video_path)
result_path = join(video_path, video['name'] + '_001.txt')
else:
raise ValueError('Only VOT is supported')
if not os.path.exists(result_path):
fin = open(result_path, 'w')
fin.close()
else:
if benchmark_name.startswith('VOT'):
return 0
else:
raise ValueError('Only VOT is supported')
start_frame, lost_times, toc = 0, 0, 0
regions = []
image_files, gt = video['image_files'], video['gt']
for f, image_file in enumerate(image_files):
im = cv2.imread(image_file)
if len(im.shape) == 2:
im = cv2.cvtColor(im, cv2.COLOR_GRAY2BGR)
if f == start_frame:
cx, cy, w, h = get_axis_aligned_bbox(gt[f])
target_pos = np.array([cx, cy])
target_sz = np.array([w, h])
state = tracker.init(im, target_pos, target_sz, net, hp=hp)
regions.append([float(1)] if 'VOT' in benchmark_name else gt[f])
elif f > start_frame:
state = tracker.track(state, im)
location = cxy_wh_2_rect(state['target_pos'], state['target_sz'])
b_overlap = poly_iou(gt[f], location) if 'VOT' in benchmark_name else 1
if b_overlap > 0:
regions.append(location)
else:
regions.append([float(2)])
lost_times += 1
start_frame = f + 5
else:
regions.append([float(0)])
if benchmark_name.startswith('VOT'):
return regions
else:
raise ValueError('Only VOT is supported')
def eao_vot_rpn(tracker, net, config):
dataset = load_dataset(config['benchmark'])
video_keys = sorted(list(dataset.keys()).copy())
results = []
for video in video_keys:
video_result = track_tune(tracker, net, dataset[video], config)
results.append(video_result)
year = config['benchmark'][-4:]
eng.cd('./lib/core')
eao = eng.get_eao(results, year)
return eao
if __name__ == '__main__':
main()
| true
| true
|
1c41f6623efb264d5446ee55e530428b257b58c3
| 626
|
py
|
Python
|
ros/build/waypoint_follower/catkin_generated/pkg.develspace.context.pc.py
|
Emad-W/CarND-Capstone-Project
|
d058533d0815559918f4128051b12d47b995980d
|
[
"MIT"
] | null | null | null |
ros/build/waypoint_follower/catkin_generated/pkg.develspace.context.pc.py
|
Emad-W/CarND-Capstone-Project
|
d058533d0815559918f4128051b12d47b995980d
|
[
"MIT"
] | 10
|
2019-12-16T22:12:07.000Z
|
2022-02-10T00:24:31.000Z
|
ros/build/waypoint_follower/catkin_generated/pkg.develspace.context.pc.py
|
Emad-W/CarND-Capstone-Project
|
d058533d0815559918f4128051b12d47b995980d
|
[
"MIT"
] | null | null | null |
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/student/capstone/CarND-Capstone/ros/src/waypoint_follower/include".split(';') if "/home/student/capstone/CarND-Capstone/ros/src/waypoint_follower/include" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp;std_msgs;tf;geometry_msgs;styx_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-llibwaypoint_follower".split(';') if "-llibwaypoint_follower" != "" else []
PROJECT_NAME = "waypoint_follower"
PROJECT_SPACE_DIR = "/home/student/capstone/CarND-Capstone/ros/devel"
PROJECT_VERSION = "0.0.0"
| 69.555556
| 209
| 0.78115
|
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/student/capstone/CarND-Capstone/ros/src/waypoint_follower/include".split(';') if "/home/student/capstone/CarND-Capstone/ros/src/waypoint_follower/include" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp;std_msgs;tf;geometry_msgs;styx_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-llibwaypoint_follower".split(';') if "-llibwaypoint_follower" != "" else []
PROJECT_NAME = "waypoint_follower"
PROJECT_SPACE_DIR = "/home/student/capstone/CarND-Capstone/ros/devel"
PROJECT_VERSION = "0.0.0"
| true
| true
|
1c41f7aace29301015d66f27f3003cf64a0e2c9b
| 769
|
py
|
Python
|
tests/test_pool.py
|
evgenia-ch/happybase-mock
|
6fbf4a4f9685829b32ad8dc3de3e01b2a9fba964
|
[
"MIT"
] | 9
|
2015-06-08T02:26:37.000Z
|
2018-12-13T09:45:04.000Z
|
tests/test_pool.py
|
evgenia-ch/happybase-mock
|
6fbf4a4f9685829b32ad8dc3de3e01b2a9fba964
|
[
"MIT"
] | 5
|
2017-02-17T20:35:29.000Z
|
2019-12-27T09:33:14.000Z
|
tests/test_pool.py
|
evgenia-ch/happybase-mock
|
6fbf4a4f9685829b32ad8dc3de3e01b2a9fba964
|
[
"MIT"
] | 7
|
2016-03-16T17:25:40.000Z
|
2019-10-15T13:00:05.000Z
|
from .base import BaseTestCase
from happybase_mock.pool import Connection, ConnectionPool
class TestConnectionPool(BaseTestCase):
def tearDown(self):
Connection._instances.clear()
def test_connection(self):
pool = ConnectionPool(5, host='myhost', port=9999, table_prefix='test')
with pool.connection() as conn:
self.assertEqual(conn.host, 'myhost')
self.assertEqual(conn.port, 9999)
self.assertEqual(conn.table_prefix, 'test')
# Test creating table and putting data
conn.create_table('hello', {'d': dict()})
table = conn.table('hello')
table.put(b'key', {b'd:data': b'world'})
self.assertEqual(table.row(b'key'), {b'd:data': b'world'})
| 34.954545
| 79
| 0.626788
|
from .base import BaseTestCase
from happybase_mock.pool import Connection, ConnectionPool
class TestConnectionPool(BaseTestCase):
def tearDown(self):
Connection._instances.clear()
def test_connection(self):
pool = ConnectionPool(5, host='myhost', port=9999, table_prefix='test')
with pool.connection() as conn:
self.assertEqual(conn.host, 'myhost')
self.assertEqual(conn.port, 9999)
self.assertEqual(conn.table_prefix, 'test')
conn.create_table('hello', {'d': dict()})
table = conn.table('hello')
table.put(b'key', {b'd:data': b'world'})
self.assertEqual(table.row(b'key'), {b'd:data': b'world'})
| true
| true
|
1c41f7c69e44158c4efe46da3c150864b32701aa
| 27,495
|
py
|
Python
|
sdk/python/kfp/components/_structures.py
|
PabloRR100/pipelines
|
475b6e165f7cd4d6a330b5e3849377f3cba6f70e
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/kfp/components/_structures.py
|
PabloRR100/pipelines
|
475b6e165f7cd4d6a330b5e3849377f3cba6f70e
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/kfp/components/_structures.py
|
PabloRR100/pipelines
|
475b6e165f7cd4d6a330b5e3849377f3cba6f70e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'InputSpec',
'OutputSpec',
'InputValuePlaceholder',
'InputPathPlaceholder',
'OutputPathPlaceholder',
'InputUriPlaceholder',
'OutputUriPlaceholder',
'InputMetadataPlaceholder',
'InputOutputPortNamePlaceholder',
'OutputMetadataPlaceholder',
'ExecutorInputPlaceholder',
'ConcatPlaceholder',
'IsPresentPlaceholder',
'IfPlaceholderStructure',
'IfPlaceholder',
'ContainerSpec',
'ContainerImplementation',
'ComponentSpec',
'ComponentReference',
'GraphInputReference',
'GraphInputArgument',
'TaskOutputReference',
'TaskOutputArgument',
'EqualsPredicate',
'NotEqualsPredicate',
'GreaterThanPredicate',
'GreaterThanOrEqualPredicate',
'LessThenPredicate',
'LessThenOrEqualPredicate',
'NotPredicate',
'AndPredicate',
'OrPredicate',
'RetryStrategySpec',
'CachingStrategySpec',
'ExecutionOptionsSpec',
'TaskSpec',
'GraphSpec',
'GraphImplementation',
'PipelineRunSpec',
]
from collections import OrderedDict
from typing import Any, Dict, List, Mapping, Optional, Sequence, Union
from .modelbase import ModelBase
PrimitiveTypes = Union[str, int, float, bool]
PrimitiveTypesIncludingNone = Optional[PrimitiveTypes]
TypeSpecType = Union[str, Dict, List]
class InputSpec(ModelBase):
'''Describes the component input specification'''
def __init__(self,
name: str,
type: Optional[TypeSpecType] = None,
description: Optional[str] = None,
default: Optional[PrimitiveTypes] = None,
optional: Optional[bool] = False,
annotations: Optional[Dict[str, Any]] = None,
):
super().__init__(locals())
class OutputSpec(ModelBase):
'''Describes the component output specification'''
def __init__(self,
name: str,
type: Optional[TypeSpecType] = None,
description: Optional[str] = None,
annotations: Optional[Dict[str, Any]] = None,
):
super().__init__(locals())
class InputValuePlaceholder(ModelBase): #Non-standard attr names
'''Represents the command-line argument placeholder that will be replaced at run-time by the input argument value.'''
_serialized_names = {
'input_name': 'inputValue',
}
def __init__(self,
input_name: str,
):
super().__init__(locals())
class InputPathPlaceholder(ModelBase): #Non-standard attr names
'''Represents the command-line argument placeholder that will be replaced at run-time by a local file path pointing to a file containing the input argument value.'''
_serialized_names = {
'input_name': 'inputPath',
}
def __init__(self,
input_name: str,
):
super().__init__(locals())
class OutputPathPlaceholder(ModelBase): #Non-standard attr names
'''Represents the command-line argument placeholder that will be replaced at run-time by a local file path pointing to a file where the program should write its output data.'''
_serialized_names = {
'output_name': 'outputPath',
}
def __init__(self,
output_name: str,
):
super().__init__(locals())
class InputUriPlaceholder(ModelBase): # Non-standard attr names
"""Represents a placeholder for the URI of an input artifact.
Represents the command-line argument placeholder that will be replaced at
run-time by the URI of the input artifact argument.
"""
_serialized_names = {
'input_name': 'inputUri',
}
def __init__(self,
input_name: str,
):
super().__init__(locals())
class OutputUriPlaceholder(ModelBase): # Non-standard attr names
"""Represents a placeholder for the URI of an output artifact.
Represents the command-line argument placeholder that will be replaced at
run-time by a URI of the output artifac where the program should write its
output data.
"""
_serialized_names = {
'output_name': 'outputUri',
}
def __init__(self,
output_name: str,
):
super().__init__(locals())
class InputMetadataPlaceholder(ModelBase): # Non-standard attr names
"""Represents the file path to an input artifact metadata.
During runtime, this command-line argument placeholder will be replaced
by the path where the metadata file associated with this artifact has been
written to. Currently only supported in v2 components.
"""
_serialized_names = {
'input_name': 'inputMetadata',
}
def __init__(self, input_name: str):
super().__init__(locals())
class InputOutputPortNamePlaceholder(ModelBase): # Non-standard attr names
"""Represents the output port name of an input artifact.
During compile time, this command-line argument placeholder will be replaced
by the actual output port name used by the producer task. Currently only
supported in v2 components.
"""
_serialized_names = {
'input_name': 'inputOutputPortName',
}
def __init__(self, input_name: str):
super().__init__(locals())
class OutputMetadataPlaceholder(ModelBase): # Non-standard attr names
"""Represents the output metadata JSON file location of this task.
This file will encode the metadata information produced by this task:
- Artifacts metadata, but not the content of the artifact, and
- output parameters.
Only supported in v2 components.
"""
_serialized_names = {
'output_metadata': 'outputMetadata',
}
def __init__(self, output_metadata: type(None) = None):
if output_metadata:
raise RuntimeError(
'Output metadata placeholder cannot be associated with key')
super().__init__(locals())
def to_dict(self) -> Mapping[str, Any]:
# Override parent implementation. Otherwise it always returns {}.
return {'outputMetadata': None}
class ExecutorInputPlaceholder(ModelBase): # Non-standard attr names
"""Represents the serialized ExecutorInput message at runtime.
This placeholder will be replaced by a serialized
[ExecutorInput](https://github.com/kubeflow/pipelines/blob/61f9c2c328d245d89c9d9b8c923f24dbbd08cdc9/api/v2alpha1/pipeline_spec.proto#L730)
proto message at runtime, which includes parameters of the task, artifact
URIs and metadata.
"""
_serialized_names = {
'executor_input': 'executorInput',
}
def __init__(self, executor_input: type(None) = None):
if executor_input:
raise RuntimeError(
'Executor input placeholder cannot be associated with input key'
'. Got %s' % executor_input)
super().__init__(locals())
def to_dict(self) -> Mapping[str, Any]:
# Override parent implementation. Otherwise it always returns {}.
return {'executorInput': None}
CommandlineArgumentType = Union[
str,
InputValuePlaceholder,
InputPathPlaceholder,
OutputPathPlaceholder,
InputUriPlaceholder,
OutputUriPlaceholder,
InputMetadataPlaceholder,
InputOutputPortNamePlaceholder,
OutputMetadataPlaceholder,
ExecutorInputPlaceholder,
'ConcatPlaceholder',
'IfPlaceholder',
]
class ConcatPlaceholder(ModelBase): #Non-standard attr names
'''Represents the command-line argument placeholder that will be replaced at run-time by the concatenated values of its items.'''
_serialized_names = {
'items': 'concat',
}
def __init__(self,
items: List[CommandlineArgumentType],
):
super().__init__(locals())
class IsPresentPlaceholder(ModelBase): #Non-standard attr names
'''Represents the command-line argument placeholder that will be replaced at run-time by a boolean value specifying whether the caller has passed an argument for the specified optional input.'''
_serialized_names = {
'input_name': 'isPresent',
}
def __init__(self,
input_name: str,
):
super().__init__(locals())
IfConditionArgumentType = Union[bool, str, IsPresentPlaceholder, InputValuePlaceholder]
class IfPlaceholderStructure(ModelBase): #Non-standard attr names
'''Used in by the IfPlaceholder - the command-line argument placeholder that will be replaced at run-time by the expanded value of either "then_value" or "else_value" depending on the submissio-time resolved value of the "cond" predicate.'''
_serialized_names = {
'condition': 'cond',
'then_value': 'then',
'else_value': 'else',
}
def __init__(self,
condition: IfConditionArgumentType,
then_value: Union[CommandlineArgumentType, List[CommandlineArgumentType]],
else_value: Optional[Union[CommandlineArgumentType, List[CommandlineArgumentType]]] = None,
):
super().__init__(locals())
class IfPlaceholder(ModelBase): #Non-standard attr names
'''Represents the command-line argument placeholder that will be replaced at run-time by the expanded value of either "then_value" or "else_value" depending on the submissio-time resolved value of the "cond" predicate.'''
_serialized_names = {
'if_structure': 'if',
}
def __init__(self,
if_structure: IfPlaceholderStructure,
):
super().__init__(locals())
class ContainerSpec(ModelBase):
'''Describes the container component implementation.'''
_serialized_names = {
'file_outputs': 'fileOutputs', #TODO: rename to something like legacy_unconfigurable_output_paths
}
def __init__(self,
image: str,
command: Optional[List[CommandlineArgumentType]] = None,
args: Optional[List[CommandlineArgumentType]] = None,
env: Optional[Mapping[str, str]] = None,
file_outputs: Optional[Mapping[str, str]] = None, #TODO: rename to something like legacy_unconfigurable_output_paths
):
super().__init__(locals())
class ContainerImplementation(ModelBase):
'''Represents the container component implementation.'''
def __init__(self,
container: ContainerSpec,
):
super().__init__(locals())
ImplementationType = Union[ContainerImplementation, 'GraphImplementation']
class MetadataSpec(ModelBase):
def __init__(self,
annotations: Optional[Dict[str, str]] = None,
labels: Optional[Dict[str, str]] = None,
):
super().__init__(locals())
class ComponentSpec(ModelBase):
'''Component specification. Describes the metadata (name, description, annotations and labels), the interface (inputs and outputs) and the implementation of the component.'''
def __init__(
self,
name: Optional[str] = None, #? Move to metadata?
description: Optional[str] = None, #? Move to metadata?
metadata: Optional[MetadataSpec] = None,
inputs: Optional[List[InputSpec]] = None,
outputs: Optional[List[OutputSpec]] = None,
implementation: Optional[ImplementationType] = None,
version: Optional[str] = 'google.com/cloud/pipelines/component/v1',
#tags: Optional[Set[str]] = None,
):
super().__init__(locals())
self._post_init()
def _post_init(self):
#Checking input names for uniqueness
self._inputs_dict = {}
if self.inputs:
for input in self.inputs:
if input.name in self._inputs_dict:
raise ValueError('Non-unique input name "{}"'.format(input.name))
self._inputs_dict[input.name] = input
#Checking output names for uniqueness
self._outputs_dict = {}
if self.outputs:
for output in self.outputs:
if output.name in self._outputs_dict:
raise ValueError('Non-unique output name "{}"'.format(output.name))
self._outputs_dict[output.name] = output
if isinstance(self.implementation, ContainerImplementation):
container = self.implementation.container
if container.file_outputs:
for output_name, path in container.file_outputs.items():
if output_name not in self._outputs_dict:
raise TypeError('Unconfigurable output entry "{}" references non-existing output.'.format({output_name: path}))
def verify_arg(arg):
if arg is None:
pass
elif isinstance(
arg, (str, int, float, bool,
OutputMetadataPlaceholder, ExecutorInputPlaceholder)):
pass
elif isinstance(arg, list):
for arg2 in arg:
verify_arg(arg2)
elif isinstance(
arg, (InputUriPlaceholder, InputValuePlaceholder,
InputPathPlaceholder, IsPresentPlaceholder,
InputMetadataPlaceholder,
InputOutputPortNamePlaceholder)):
if arg.input_name not in self._inputs_dict:
raise TypeError(
'Argument "{}" references non-existing input.'.format(arg))
elif isinstance(arg, (OutputUriPlaceholder, OutputPathPlaceholder)):
if arg.output_name not in self._outputs_dict:
raise TypeError(
'Argument "{}" references non-existing output.'.format(arg))
elif isinstance(arg, ConcatPlaceholder):
for arg2 in arg.items:
verify_arg(arg2)
elif isinstance(arg, IfPlaceholder):
verify_arg(arg.if_structure.condition)
verify_arg(arg.if_structure.then_value)
verify_arg(arg.if_structure.else_value)
else:
raise TypeError('Unexpected argument "{}"'.format(arg))
verify_arg(container.command)
verify_arg(container.args)
if isinstance(self.implementation, GraphImplementation):
graph = self.implementation.graph
if graph.output_values is not None:
for output_name, argument in graph.output_values.items():
if output_name not in self._outputs_dict:
raise TypeError('Graph output argument entry "{}" references non-existing output.'.format({output_name: argument}))
if graph.tasks is not None:
for task in graph.tasks.values():
if task.arguments is not None:
for argument in task.arguments.values():
if isinstance(argument, GraphInputArgument) and argument.graph_input.input_name not in self._inputs_dict:
raise TypeError('Argument "{}" references non-existing input.'.format(argument))
def save(self, file_path: str):
'''Saves the component definition to file. It can be shared online and later loaded using the load_component function.'''
from ._yaml_utils import dump_yaml
component_yaml = dump_yaml(self.to_dict())
with open(file_path, 'w') as f:
f.write(component_yaml)
class ComponentReference(ModelBase):
'''Component reference. Contains information that can be used to locate and load a component by name, digest or URL'''
def __init__(self,
name: Optional[str] = None,
digest: Optional[str] = None,
tag: Optional[str] = None,
url: Optional[str] = None,
spec: Optional[ComponentSpec] = None,
):
super().__init__(locals())
self._post_init()
def _post_init(self) -> None:
if not any([self.name, self.digest, self.tag, self.url, self.spec]):
raise TypeError('Need at least one argument.')
class GraphInputReference(ModelBase):
'''References the input of the graph (the scope is a single graph).'''
_serialized_names = {
'input_name': 'inputName',
}
def __init__(self,
input_name: str,
type: Optional[TypeSpecType] = None, # Can be used to override the reference data type
):
super().__init__(locals())
def as_argument(self) -> 'GraphInputArgument':
return GraphInputArgument(graph_input=self)
def with_type(self, type_spec: TypeSpecType) -> 'GraphInputReference':
return GraphInputReference(
input_name=self.input_name,
type=type_spec,
)
def without_type(self) -> 'GraphInputReference':
return self.with_type(None)
class GraphInputArgument(ModelBase):
'''Represents the component argument value that comes from the graph component input.'''
_serialized_names = {
'graph_input': 'graphInput',
}
def __init__(self,
graph_input: GraphInputReference,
):
super().__init__(locals())
class TaskOutputReference(ModelBase):
'''References the output of some task (the scope is a single graph).'''
_serialized_names = {
'task_id': 'taskId',
'output_name': 'outputName',
}
def __init__(self,
output_name: str,
task_id: Optional[str] = None, # Used for linking to the upstream task in serialized component file.
task: Optional['TaskSpec'] = None, # Used for linking to the upstream task in runtime since Task does not have an ID until inserted into a graph.
type: Optional[TypeSpecType] = None, # Can be used to override the reference data type
):
super().__init__(locals())
if self.task_id is None and self.task is None:
raise TypeError('task_id and task cannot be None at the same time.')
def with_type(self, type_spec: TypeSpecType) -> 'TaskOutputReference':
return TaskOutputReference(
output_name=self.output_name,
task_id=self.task_id,
task=self.task,
type=type_spec,
)
def without_type(self) -> 'TaskOutputReference':
return self.with_type(None)
class TaskOutputArgument(ModelBase): #Has additional constructor for convenience
'''Represents the component argument value that comes from the output of another task.'''
_serialized_names = {
'task_output': 'taskOutput',
}
def __init__(self,
task_output: TaskOutputReference,
):
super().__init__(locals())
@staticmethod
def construct(
task_id: str,
output_name: str,
) -> 'TaskOutputArgument':
return TaskOutputArgument(TaskOutputReference(
task_id=task_id,
output_name=output_name,
))
def with_type(self, type_spec: TypeSpecType) -> 'TaskOutputArgument':
return TaskOutputArgument(
task_output=self.task_output.with_type(type_spec),
)
def without_type(self) -> 'TaskOutputArgument':
return self.with_type(None)
ArgumentType = Union[PrimitiveTypes, GraphInputArgument, TaskOutputArgument]
class TwoOperands(ModelBase):
def __init__(self,
op1: ArgumentType,
op2: ArgumentType,
):
super().__init__(locals())
class BinaryPredicate(ModelBase): #abstract base type
def __init__(self,
operands: TwoOperands
):
super().__init__(locals())
class EqualsPredicate(BinaryPredicate):
'''Represents the "equals" comparison predicate.'''
_serialized_names = {'operands': '=='}
class NotEqualsPredicate(BinaryPredicate):
'''Represents the "not equals" comparison predicate.'''
_serialized_names = {'operands': '!='}
class GreaterThanPredicate(BinaryPredicate):
'''Represents the "greater than" comparison predicate.'''
_serialized_names = {'operands': '>'}
class GreaterThanOrEqualPredicate(BinaryPredicate):
'''Represents the "greater than or equal" comparison predicate.'''
_serialized_names = {'operands': '>='}
class LessThenPredicate(BinaryPredicate):
'''Represents the "less than" comparison predicate.'''
_serialized_names = {'operands': '<'}
class LessThenOrEqualPredicate(BinaryPredicate):
'''Represents the "less than or equal" comparison predicate.'''
_serialized_names = { 'operands': '<='}
PredicateType = Union[
ArgumentType,
EqualsPredicate, NotEqualsPredicate, GreaterThanPredicate, GreaterThanOrEqualPredicate, LessThenPredicate, LessThenOrEqualPredicate,
'NotPredicate', 'AndPredicate', 'OrPredicate',
]
class TwoBooleanOperands(ModelBase):
def __init__(self,
op1: PredicateType,
op2: PredicateType,
):
super().__init__(locals())
class NotPredicate(ModelBase):
'''Represents the "not" logical operation.'''
_serialized_names = {'operand': 'not'}
def __init__(self,
operand: PredicateType
):
super().__init__(locals())
class AndPredicate(ModelBase):
'''Represents the "and" logical operation.'''
_serialized_names = {'operands': 'and'}
def __init__(self,
operands: TwoBooleanOperands
) :
super().__init__(locals())
class OrPredicate(ModelBase):
'''Represents the "or" logical operation.'''
_serialized_names = {'operands': 'or'}
def __init__(self,
operands: TwoBooleanOperands
):
super().__init__(locals())
class RetryStrategySpec(ModelBase):
_serialized_names = {
'max_retries': 'maxRetries',
}
def __init__(self,
max_retries: int,
):
super().__init__(locals())
class CachingStrategySpec(ModelBase):
_serialized_names = {
'max_cache_staleness': 'maxCacheStaleness',
}
def __init__(self,
max_cache_staleness: Optional[str] = None, # RFC3339 compliant duration: P30DT1H22M3S
):
super().__init__(locals())
class ExecutionOptionsSpec(ModelBase):
_serialized_names = {
'retry_strategy': 'retryStrategy',
'caching_strategy': 'cachingStrategy',
}
def __init__(self,
retry_strategy: Optional[RetryStrategySpec] = None,
caching_strategy: Optional[CachingStrategySpec] = None,
):
super().__init__(locals())
class TaskSpec(ModelBase):
'''Task specification. Task is a "configured" component - a component supplied with arguments and other applied configuration changes.'''
_serialized_names = {
'component_ref': 'componentRef',
'is_enabled': 'isEnabled',
'execution_options': 'executionOptions'
}
def __init__(self,
component_ref: ComponentReference,
arguments: Optional[Mapping[str, ArgumentType]] = None,
is_enabled: Optional[PredicateType] = None,
execution_options: Optional[ExecutionOptionsSpec] = None,
annotations: Optional[Dict[str, Any]] = None,
):
super().__init__(locals())
#TODO: If component_ref is resolved to component spec, then check that the arguments correspond to the inputs
def _init_outputs(self):
#Adding output references to the task
if self.component_ref.spec is None:
return
task_outputs = OrderedDict()
for output in self.component_ref.spec.outputs or []:
task_output_ref = TaskOutputReference(
output_name=output.name,
task=self,
type=output.type, # TODO: Resolve type expressions. E.g. type: {TypeOf: Input 1}
)
task_output_arg = TaskOutputArgument(task_output=task_output_ref)
task_outputs[output.name] = task_output_arg
self.outputs = task_outputs
if len(task_outputs) == 1:
self.output = list(task_outputs.values())[0]
class GraphSpec(ModelBase):
'''Describes the graph component implementation. It represents a graph of component tasks connected to the upstream sources of data using the argument specifications. It also describes the sources of graph output values.'''
_serialized_names = {
'output_values': 'outputValues',
}
def __init__(self,
tasks: Mapping[str, TaskSpec],
output_values: Mapping[str, ArgumentType] = None,
):
super().__init__(locals())
self._post_init()
def _post_init(self):
#Checking task output references and preparing the dependency table
task_dependencies = {}
for task_id, task in self.tasks.items():
dependencies = set()
task_dependencies[task_id] = dependencies
if task.arguments is not None:
for argument in task.arguments.values():
if isinstance(argument, TaskOutputArgument):
dependencies.add(argument.task_output.task_id)
if argument.task_output.task_id not in self.tasks:
raise TypeError('Argument "{}" references non-existing task.'.format(argument))
#Topologically sorting tasks to detect cycles
task_dependents = {k: set() for k in task_dependencies.keys()}
for task_id, dependencies in task_dependencies.items():
for dependency in dependencies:
task_dependents[dependency].add(task_id)
task_number_of_remaining_dependencies = {k: len(v) for k, v in task_dependencies.items()}
sorted_tasks = OrderedDict()
def process_task(task_id):
if task_number_of_remaining_dependencies[task_id] == 0 and task_id not in sorted_tasks:
sorted_tasks[task_id] = self.tasks[task_id]
for dependent_task in task_dependents[task_id]:
task_number_of_remaining_dependencies[dependent_task] = task_number_of_remaining_dependencies[dependent_task] - 1
process_task(dependent_task)
for task_id in task_dependencies.keys():
process_task(task_id)
if len(sorted_tasks) != len(task_dependencies):
tasks_with_unsatisfied_dependencies = {k: v for k, v in task_number_of_remaining_dependencies.items() if v > 0}
task_wth_minimal_number_of_unsatisfied_dependencies = min(tasks_with_unsatisfied_dependencies.keys(), key=lambda task_id: tasks_with_unsatisfied_dependencies[task_id])
raise ValueError('Task "{}" has cyclical dependency.'.format(task_wth_minimal_number_of_unsatisfied_dependencies))
self._toposorted_tasks = sorted_tasks
class GraphImplementation(ModelBase):
'''Represents the graph component implementation.'''
def __init__(self,
graph: GraphSpec,
):
super().__init__(locals())
class PipelineRunSpec(ModelBase):
'''The object that can be sent to the backend to start a new Run.'''
_serialized_names = {
'root_task': 'rootTask',
#'on_exit_task': 'onExitTask',
}
def __init__(self,
root_task: TaskSpec,
#on_exit_task: Optional[TaskSpec] = None,
):
super().__init__(locals())
| 34.36875
| 245
| 0.657065
|
__all__ = [
'InputSpec',
'OutputSpec',
'InputValuePlaceholder',
'InputPathPlaceholder',
'OutputPathPlaceholder',
'InputUriPlaceholder',
'OutputUriPlaceholder',
'InputMetadataPlaceholder',
'InputOutputPortNamePlaceholder',
'OutputMetadataPlaceholder',
'ExecutorInputPlaceholder',
'ConcatPlaceholder',
'IsPresentPlaceholder',
'IfPlaceholderStructure',
'IfPlaceholder',
'ContainerSpec',
'ContainerImplementation',
'ComponentSpec',
'ComponentReference',
'GraphInputReference',
'GraphInputArgument',
'TaskOutputReference',
'TaskOutputArgument',
'EqualsPredicate',
'NotEqualsPredicate',
'GreaterThanPredicate',
'GreaterThanOrEqualPredicate',
'LessThenPredicate',
'LessThenOrEqualPredicate',
'NotPredicate',
'AndPredicate',
'OrPredicate',
'RetryStrategySpec',
'CachingStrategySpec',
'ExecutionOptionsSpec',
'TaskSpec',
'GraphSpec',
'GraphImplementation',
'PipelineRunSpec',
]
from collections import OrderedDict
from typing import Any, Dict, List, Mapping, Optional, Sequence, Union
from .modelbase import ModelBase
PrimitiveTypes = Union[str, int, float, bool]
PrimitiveTypesIncludingNone = Optional[PrimitiveTypes]
TypeSpecType = Union[str, Dict, List]
class InputSpec(ModelBase):
def __init__(self,
name: str,
type: Optional[TypeSpecType] = None,
description: Optional[str] = None,
default: Optional[PrimitiveTypes] = None,
optional: Optional[bool] = False,
annotations: Optional[Dict[str, Any]] = None,
):
super().__init__(locals())
class OutputSpec(ModelBase):
def __init__(self,
name: str,
type: Optional[TypeSpecType] = None,
description: Optional[str] = None,
annotations: Optional[Dict[str, Any]] = None,
):
super().__init__(locals())
class InputValuePlaceholder(ModelBase):
_serialized_names = {
'input_name': 'inputValue',
}
def __init__(self,
input_name: str,
):
super().__init__(locals())
class InputPathPlaceholder(ModelBase):
_serialized_names = {
'input_name': 'inputPath',
}
def __init__(self,
input_name: str,
):
super().__init__(locals())
class OutputPathPlaceholder(ModelBase):
_serialized_names = {
'output_name': 'outputPath',
}
def __init__(self,
output_name: str,
):
super().__init__(locals())
class InputUriPlaceholder(ModelBase):
_serialized_names = {
'input_name': 'inputUri',
}
def __init__(self,
input_name: str,
):
super().__init__(locals())
class OutputUriPlaceholder(ModelBase):
_serialized_names = {
'output_name': 'outputUri',
}
def __init__(self,
output_name: str,
):
super().__init__(locals())
class InputMetadataPlaceholder(ModelBase):
_serialized_names = {
'input_name': 'inputMetadata',
}
def __init__(self, input_name: str):
super().__init__(locals())
class InputOutputPortNamePlaceholder(ModelBase):
_serialized_names = {
'input_name': 'inputOutputPortName',
}
def __init__(self, input_name: str):
super().__init__(locals())
class OutputMetadataPlaceholder(ModelBase):
_serialized_names = {
'output_metadata': 'outputMetadata',
}
def __init__(self, output_metadata: type(None) = None):
if output_metadata:
raise RuntimeError(
'Output metadata placeholder cannot be associated with key')
super().__init__(locals())
def to_dict(self) -> Mapping[str, Any]:
return {'outputMetadata': None}
class ExecutorInputPlaceholder(ModelBase):
_serialized_names = {
'executor_input': 'executorInput',
}
def __init__(self, executor_input: type(None) = None):
if executor_input:
raise RuntimeError(
'Executor input placeholder cannot be associated with input key'
'. Got %s' % executor_input)
super().__init__(locals())
def to_dict(self) -> Mapping[str, Any]:
return {'executorInput': None}
CommandlineArgumentType = Union[
str,
InputValuePlaceholder,
InputPathPlaceholder,
OutputPathPlaceholder,
InputUriPlaceholder,
OutputUriPlaceholder,
InputMetadataPlaceholder,
InputOutputPortNamePlaceholder,
OutputMetadataPlaceholder,
ExecutorInputPlaceholder,
'ConcatPlaceholder',
'IfPlaceholder',
]
class ConcatPlaceholder(ModelBase):
_serialized_names = {
'items': 'concat',
}
def __init__(self,
items: List[CommandlineArgumentType],
):
super().__init__(locals())
class IsPresentPlaceholder(ModelBase):
_serialized_names = {
'input_name': 'isPresent',
}
def __init__(self,
input_name: str,
):
super().__init__(locals())
IfConditionArgumentType = Union[bool, str, IsPresentPlaceholder, InputValuePlaceholder]
class IfPlaceholderStructure(ModelBase):
_serialized_names = {
'condition': 'cond',
'then_value': 'then',
'else_value': 'else',
}
def __init__(self,
condition: IfConditionArgumentType,
then_value: Union[CommandlineArgumentType, List[CommandlineArgumentType]],
else_value: Optional[Union[CommandlineArgumentType, List[CommandlineArgumentType]]] = None,
):
super().__init__(locals())
class IfPlaceholder(ModelBase):
_serialized_names = {
'if_structure': 'if',
}
def __init__(self,
if_structure: IfPlaceholderStructure,
):
super().__init__(locals())
class ContainerSpec(ModelBase):
_serialized_names = {
'file_outputs': 'fileOutputs',
}
def __init__(self,
image: str,
command: Optional[List[CommandlineArgumentType]] = None,
args: Optional[List[CommandlineArgumentType]] = None,
env: Optional[Mapping[str, str]] = None,
file_outputs: Optional[Mapping[str, str]] = None,
):
super().__init__(locals())
class ContainerImplementation(ModelBase):
def __init__(self,
container: ContainerSpec,
):
super().__init__(locals())
ImplementationType = Union[ContainerImplementation, 'GraphImplementation']
class MetadataSpec(ModelBase):
def __init__(self,
annotations: Optional[Dict[str, str]] = None,
labels: Optional[Dict[str, str]] = None,
):
super().__init__(locals())
class ComponentSpec(ModelBase):
def __init__(
self,
name: Optional[str] = None,
description: Optional[str] = None,
metadata: Optional[MetadataSpec] = None,
inputs: Optional[List[InputSpec]] = None,
outputs: Optional[List[OutputSpec]] = None,
implementation: Optional[ImplementationType] = None,
version: Optional[str] = 'google.com/cloud/pipelines/component/v1',
):
super().__init__(locals())
self._post_init()
def _post_init(self):
self._inputs_dict = {}
if self.inputs:
for input in self.inputs:
if input.name in self._inputs_dict:
raise ValueError('Non-unique input name "{}"'.format(input.name))
self._inputs_dict[input.name] = input
self._outputs_dict = {}
if self.outputs:
for output in self.outputs:
if output.name in self._outputs_dict:
raise ValueError('Non-unique output name "{}"'.format(output.name))
self._outputs_dict[output.name] = output
if isinstance(self.implementation, ContainerImplementation):
container = self.implementation.container
if container.file_outputs:
for output_name, path in container.file_outputs.items():
if output_name not in self._outputs_dict:
raise TypeError('Unconfigurable output entry "{}" references non-existing output.'.format({output_name: path}))
def verify_arg(arg):
if arg is None:
pass
elif isinstance(
arg, (str, int, float, bool,
OutputMetadataPlaceholder, ExecutorInputPlaceholder)):
pass
elif isinstance(arg, list):
for arg2 in arg:
verify_arg(arg2)
elif isinstance(
arg, (InputUriPlaceholder, InputValuePlaceholder,
InputPathPlaceholder, IsPresentPlaceholder,
InputMetadataPlaceholder,
InputOutputPortNamePlaceholder)):
if arg.input_name not in self._inputs_dict:
raise TypeError(
'Argument "{}" references non-existing input.'.format(arg))
elif isinstance(arg, (OutputUriPlaceholder, OutputPathPlaceholder)):
if arg.output_name not in self._outputs_dict:
raise TypeError(
'Argument "{}" references non-existing output.'.format(arg))
elif isinstance(arg, ConcatPlaceholder):
for arg2 in arg.items:
verify_arg(arg2)
elif isinstance(arg, IfPlaceholder):
verify_arg(arg.if_structure.condition)
verify_arg(arg.if_structure.then_value)
verify_arg(arg.if_structure.else_value)
else:
raise TypeError('Unexpected argument "{}"'.format(arg))
verify_arg(container.command)
verify_arg(container.args)
if isinstance(self.implementation, GraphImplementation):
graph = self.implementation.graph
if graph.output_values is not None:
for output_name, argument in graph.output_values.items():
if output_name not in self._outputs_dict:
raise TypeError('Graph output argument entry "{}" references non-existing output.'.format({output_name: argument}))
if graph.tasks is not None:
for task in graph.tasks.values():
if task.arguments is not None:
for argument in task.arguments.values():
if isinstance(argument, GraphInputArgument) and argument.graph_input.input_name not in self._inputs_dict:
raise TypeError('Argument "{}" references non-existing input.'.format(argument))
def save(self, file_path: str):
from ._yaml_utils import dump_yaml
component_yaml = dump_yaml(self.to_dict())
with open(file_path, 'w') as f:
f.write(component_yaml)
class ComponentReference(ModelBase):
def __init__(self,
name: Optional[str] = None,
digest: Optional[str] = None,
tag: Optional[str] = None,
url: Optional[str] = None,
spec: Optional[ComponentSpec] = None,
):
super().__init__(locals())
self._post_init()
def _post_init(self) -> None:
if not any([self.name, self.digest, self.tag, self.url, self.spec]):
raise TypeError('Need at least one argument.')
class GraphInputReference(ModelBase):
_serialized_names = {
'input_name': 'inputName',
}
def __init__(self,
input_name: str,
type: Optional[TypeSpecType] = None,
):
super().__init__(locals())
def as_argument(self) -> 'GraphInputArgument':
return GraphInputArgument(graph_input=self)
def with_type(self, type_spec: TypeSpecType) -> 'GraphInputReference':
return GraphInputReference(
input_name=self.input_name,
type=type_spec,
)
def without_type(self) -> 'GraphInputReference':
return self.with_type(None)
class GraphInputArgument(ModelBase):
_serialized_names = {
'graph_input': 'graphInput',
}
def __init__(self,
graph_input: GraphInputReference,
):
super().__init__(locals())
class TaskOutputReference(ModelBase):
_serialized_names = {
'task_id': 'taskId',
'output_name': 'outputName',
}
def __init__(self,
output_name: str,
task_id: Optional[str] = None,
task: Optional['TaskSpec'] = None,
type: Optional[TypeSpecType] = None,
):
super().__init__(locals())
if self.task_id is None and self.task is None:
raise TypeError('task_id and task cannot be None at the same time.')
def with_type(self, type_spec: TypeSpecType) -> 'TaskOutputReference':
return TaskOutputReference(
output_name=self.output_name,
task_id=self.task_id,
task=self.task,
type=type_spec,
)
def without_type(self) -> 'TaskOutputReference':
return self.with_type(None)
class TaskOutputArgument(ModelBase):
_serialized_names = {
'task_output': 'taskOutput',
}
def __init__(self,
task_output: TaskOutputReference,
):
super().__init__(locals())
@staticmethod
def construct(
task_id: str,
output_name: str,
) -> 'TaskOutputArgument':
return TaskOutputArgument(TaskOutputReference(
task_id=task_id,
output_name=output_name,
))
def with_type(self, type_spec: TypeSpecType) -> 'TaskOutputArgument':
return TaskOutputArgument(
task_output=self.task_output.with_type(type_spec),
)
def without_type(self) -> 'TaskOutputArgument':
return self.with_type(None)
ArgumentType = Union[PrimitiveTypes, GraphInputArgument, TaskOutputArgument]
class TwoOperands(ModelBase):
def __init__(self,
op1: ArgumentType,
op2: ArgumentType,
):
super().__init__(locals())
class BinaryPredicate(ModelBase):
def __init__(self,
operands: TwoOperands
):
super().__init__(locals())
class EqualsPredicate(BinaryPredicate):
_serialized_names = {'operands': '=='}
class NotEqualsPredicate(BinaryPredicate):
_serialized_names = {'operands': '!='}
class GreaterThanPredicate(BinaryPredicate):
_serialized_names = {'operands': '>'}
class GreaterThanOrEqualPredicate(BinaryPredicate):
_serialized_names = {'operands': '>='}
class LessThenPredicate(BinaryPredicate):
_serialized_names = {'operands': '<'}
class LessThenOrEqualPredicate(BinaryPredicate):
_serialized_names = { 'operands': '<='}
PredicateType = Union[
ArgumentType,
EqualsPredicate, NotEqualsPredicate, GreaterThanPredicate, GreaterThanOrEqualPredicate, LessThenPredicate, LessThenOrEqualPredicate,
'NotPredicate', 'AndPredicate', 'OrPredicate',
]
class TwoBooleanOperands(ModelBase):
def __init__(self,
op1: PredicateType,
op2: PredicateType,
):
super().__init__(locals())
class NotPredicate(ModelBase):
_serialized_names = {'operand': 'not'}
def __init__(self,
operand: PredicateType
):
super().__init__(locals())
class AndPredicate(ModelBase):
_serialized_names = {'operands': 'and'}
def __init__(self,
operands: TwoBooleanOperands
) :
super().__init__(locals())
class OrPredicate(ModelBase):
_serialized_names = {'operands': 'or'}
def __init__(self,
operands: TwoBooleanOperands
):
super().__init__(locals())
class RetryStrategySpec(ModelBase):
_serialized_names = {
'max_retries': 'maxRetries',
}
def __init__(self,
max_retries: int,
):
super().__init__(locals())
class CachingStrategySpec(ModelBase):
_serialized_names = {
'max_cache_staleness': 'maxCacheStaleness',
}
def __init__(self,
max_cache_staleness: Optional[str] = None,
):
super().__init__(locals())
class ExecutionOptionsSpec(ModelBase):
_serialized_names = {
'retry_strategy': 'retryStrategy',
'caching_strategy': 'cachingStrategy',
}
def __init__(self,
retry_strategy: Optional[RetryStrategySpec] = None,
caching_strategy: Optional[CachingStrategySpec] = None,
):
super().__init__(locals())
class TaskSpec(ModelBase):
_serialized_names = {
'component_ref': 'componentRef',
'is_enabled': 'isEnabled',
'execution_options': 'executionOptions'
}
def __init__(self,
component_ref: ComponentReference,
arguments: Optional[Mapping[str, ArgumentType]] = None,
is_enabled: Optional[PredicateType] = None,
execution_options: Optional[ExecutionOptionsSpec] = None,
annotations: Optional[Dict[str, Any]] = None,
):
super().__init__(locals())
def _init_outputs(self):
if self.component_ref.spec is None:
return
task_outputs = OrderedDict()
for output in self.component_ref.spec.outputs or []:
task_output_ref = TaskOutputReference(
output_name=output.name,
task=self,
type=output.type,
)
task_output_arg = TaskOutputArgument(task_output=task_output_ref)
task_outputs[output.name] = task_output_arg
self.outputs = task_outputs
if len(task_outputs) == 1:
self.output = list(task_outputs.values())[0]
class GraphSpec(ModelBase):
_serialized_names = {
'output_values': 'outputValues',
}
def __init__(self,
tasks: Mapping[str, TaskSpec],
output_values: Mapping[str, ArgumentType] = None,
):
super().__init__(locals())
self._post_init()
def _post_init(self):
task_dependencies = {}
for task_id, task in self.tasks.items():
dependencies = set()
task_dependencies[task_id] = dependencies
if task.arguments is not None:
for argument in task.arguments.values():
if isinstance(argument, TaskOutputArgument):
dependencies.add(argument.task_output.task_id)
if argument.task_output.task_id not in self.tasks:
raise TypeError('Argument "{}" references non-existing task.'.format(argument))
task_dependents = {k: set() for k in task_dependencies.keys()}
for task_id, dependencies in task_dependencies.items():
for dependency in dependencies:
task_dependents[dependency].add(task_id)
task_number_of_remaining_dependencies = {k: len(v) for k, v in task_dependencies.items()}
sorted_tasks = OrderedDict()
def process_task(task_id):
if task_number_of_remaining_dependencies[task_id] == 0 and task_id not in sorted_tasks:
sorted_tasks[task_id] = self.tasks[task_id]
for dependent_task in task_dependents[task_id]:
task_number_of_remaining_dependencies[dependent_task] = task_number_of_remaining_dependencies[dependent_task] - 1
process_task(dependent_task)
for task_id in task_dependencies.keys():
process_task(task_id)
if len(sorted_tasks) != len(task_dependencies):
tasks_with_unsatisfied_dependencies = {k: v for k, v in task_number_of_remaining_dependencies.items() if v > 0}
task_wth_minimal_number_of_unsatisfied_dependencies = min(tasks_with_unsatisfied_dependencies.keys(), key=lambda task_id: tasks_with_unsatisfied_dependencies[task_id])
raise ValueError('Task "{}" has cyclical dependency.'.format(task_wth_minimal_number_of_unsatisfied_dependencies))
self._toposorted_tasks = sorted_tasks
class GraphImplementation(ModelBase):
def __init__(self,
graph: GraphSpec,
):
super().__init__(locals())
class PipelineRunSpec(ModelBase):
_serialized_names = {
'root_task': 'rootTask',
}
def __init__(self,
root_task: TaskSpec,
):
super().__init__(locals())
| true
| true
|
1c41f8c588032f50019617be31c2dd1199f80b94
| 21,038
|
py
|
Python
|
wsme/tests/protocol.py
|
mail2nsrajesh/wsme
|
9f84e4c7c59b172a09341af2ab496d07e29e01aa
|
[
"MIT"
] | 2
|
2020-11-05T06:09:20.000Z
|
2021-01-03T07:22:18.000Z
|
wsme/tests/protocol.py
|
mail2nsrajesh/wsme
|
9f84e4c7c59b172a09341af2ab496d07e29e01aa
|
[
"MIT"
] | null | null | null |
wsme/tests/protocol.py
|
mail2nsrajesh/wsme
|
9f84e4c7c59b172a09341af2ab496d07e29e01aa
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import unittest
import warnings
import datetime
import decimal
import six
from six import u, b
from webtest import TestApp
from wsme import WSRoot, Unset
from wsme import expose, validate
import wsme.types
import wsme.utils
warnings.filterwarnings('ignore', module='webob.dec')
binarysample = b('\x00\xff\x43')
try:
1 / 0
except ZeroDivisionError as e:
zerodivisionerrormsg = str(e)
class CallException(RuntimeError):
def __init__(self, faultcode, faultstring, debuginfo):
self.faultcode = faultcode
self.faultstring = faultstring
self.debuginfo = debuginfo
def __str__(self):
return 'faultcode=%s, faultstring=%s, debuginfo=%s' % (
self.faultcode, self.faultstring, self.debuginfo
)
myenumtype = wsme.types.Enum(wsme.types.bytes, 'v1', 'v2')
class NestedInner(object):
aint = int
def __init__(self, aint=None):
self.aint = aint
class NestedOuter(object):
inner = NestedInner
inner_array = wsme.types.wsattr([NestedInner])
inner_dict = {wsme.types.text: NestedInner}
def __init__(self):
self.inner = NestedInner(0)
class NamedAttrsObject(object):
def __init__(self, v1=Unset, v2=Unset):
self.attr_1 = v1
self.attr_2 = v2
attr_1 = wsme.types.wsattr(int, name='attr.1')
attr_2 = wsme.types.wsattr(int, name='attr.2')
class CustomObject(object):
aint = int
name = wsme.types.text
class ExtendedInt(wsme.types.UserType):
basetype = int
name = "Extended integer"
class NestedInnerApi(object):
@expose(bool)
def deepfunction(self):
return True
class NestedOuterApi(object):
inner = NestedInnerApi()
class ReturnTypes(object):
@expose(wsme.types.bytes)
def getbytes(self):
return b("astring")
@expose(wsme.types.text)
def gettext(self):
return u('\xe3\x81\xae')
@expose(int)
def getint(self):
return 2
@expose(float)
def getfloat(self):
return 3.14159265
@expose(decimal.Decimal)
def getdecimal(self):
return decimal.Decimal('3.14159265')
@expose(datetime.date)
def getdate(self):
return datetime.date(1994, 1, 26)
@expose(bool)
def getbooltrue(self):
return True
@expose(bool)
def getboolfalse(self):
return False
@expose(datetime.time)
def gettime(self):
return datetime.time(12, 0, 0)
@expose(datetime.datetime)
def getdatetime(self):
return datetime.datetime(1994, 1, 26, 12, 0, 0)
@expose(wsme.types.binary)
def getbinary(self):
return binarysample
@expose(NestedOuter)
def getnested(self):
n = NestedOuter()
return n
@expose([wsme.types.bytes])
def getbytesarray(self):
return [b("A"), b("B"), b("C")]
@expose([NestedOuter])
def getnestedarray(self):
return [NestedOuter(), NestedOuter()]
@expose({wsme.types.bytes: NestedOuter})
def getnesteddict(self):
return {b('a'): NestedOuter(), b('b'): NestedOuter()}
@expose(NestedOuter)
def getobjectarrayattribute(self):
obj = NestedOuter()
obj.inner_array = [NestedInner(12), NestedInner(13)]
return obj
@expose(NestedOuter)
def getobjectdictattribute(self):
obj = NestedOuter()
obj.inner_dict = {
'12': NestedInner(12),
'13': NestedInner(13)
}
return obj
@expose(myenumtype)
def getenum(self):
return b('v2')
@expose(NamedAttrsObject)
def getnamedattrsobj(self):
return NamedAttrsObject(5, 6)
class ArgTypes(object):
def assertEqual(self, a, b):
if not (a == b):
raise AssertionError('%s != %s' % (a, b))
def assertIsInstance(self, value, v_type):
assert isinstance(value, v_type), ("%s is not instance of type %s" %
(value, v_type))
@expose(wsme.types.bytes)
@validate(wsme.types.bytes)
def setbytes(self, value):
print(repr(value))
self.assertEqual(type(value), wsme.types.bytes)
return value
@expose(wsme.types.text)
@validate(wsme.types.text)
def settext(self, value):
print(repr(value))
self.assertEqual(type(value), wsme.types.text)
return value
@expose(wsme.types.text)
@validate(wsme.types.text)
def settextnone(self, value):
print(repr(value))
self.assertEqual(type(value), type(None))
return value
@expose(bool)
@validate(bool)
def setbool(self, value):
print(repr(value))
self.assertEqual(type(value), bool)
return value
@expose(int)
@validate(int)
def setint(self, value):
print(repr(value))
self.assertEqual(type(value), int)
return value
@expose(float)
@validate(float)
def setfloat(self, value):
print(repr(value))
self.assertEqual(type(value), float)
return value
@expose(decimal.Decimal)
@validate(decimal.Decimal)
def setdecimal(self, value):
print(repr(value))
self.assertEqual(type(value), decimal.Decimal)
return value
@expose(datetime.date)
@validate(datetime.date)
def setdate(self, value):
print(repr(value))
self.assertEqual(type(value), datetime.date)
return value
@expose(datetime.time)
@validate(datetime.time)
def settime(self, value):
print(repr(value))
self.assertEqual(type(value), datetime.time)
return value
@expose(datetime.datetime)
@validate(datetime.datetime)
def setdatetime(self, value):
print(repr(value))
self.assertEqual(type(value), datetime.datetime)
return value
@expose(wsme.types.binary)
@validate(wsme.types.binary)
def setbinary(self, value):
print(repr(value))
self.assertEqual(type(value), six.binary_type)
return value
@expose([wsme.types.bytes])
@validate([wsme.types.bytes])
def setbytesarray(self, value):
print(repr(value))
self.assertEqual(type(value), list)
self.assertEqual(type(value[0]), wsme.types.bytes)
return value
@expose([wsme.types.text])
@validate([wsme.types.text])
def settextarray(self, value):
print(repr(value))
self.assertEqual(type(value), list)
self.assertEqual(type(value[0]), wsme.types.text)
return value
@expose([datetime.datetime])
@validate([datetime.datetime])
def setdatetimearray(self, value):
print(repr(value))
self.assertEqual(type(value), list)
self.assertEqual(type(value[0]), datetime.datetime)
return value
@expose(NestedOuter)
@validate(NestedOuter)
def setnested(self, value):
print(repr(value))
self.assertEqual(type(value), NestedOuter)
return value
@expose([NestedOuter])
@validate([NestedOuter])
def setnestedarray(self, value):
print(repr(value))
self.assertEqual(type(value), list)
self.assertEqual(type(value[0]), NestedOuter)
return value
@expose({wsme.types.bytes: NestedOuter})
@validate({wsme.types.bytes: NestedOuter})
def setnesteddict(self, value):
print(repr(value))
self.assertEqual(type(value), dict)
self.assertEqual(type(list(value.keys())[0]), wsme.types.bytes)
self.assertEqual(type(list(value.values())[0]), NestedOuter)
return value
@expose(myenumtype)
@validate(myenumtype)
def setenum(self, value):
print(value)
self.assertEqual(type(value), wsme.types.bytes)
return value
@expose(NamedAttrsObject)
@validate(NamedAttrsObject)
def setnamedattrsobj(self, value):
print(value)
self.assertEqual(type(value), NamedAttrsObject)
self.assertEqual(value.attr_1, 10)
self.assertEqual(value.attr_2, 20)
return value
@expose(CustomObject)
@validate(CustomObject)
def setcustomobject(self, value):
self.assertIsInstance(value, CustomObject)
self.assertIsInstance(value.name, wsme.types.text)
self.assertIsInstance(value.aint, int)
return value
@expose(ExtendedInt())
@validate(ExtendedInt())
def setextendedint(self, value):
self.assertEqual(isinstance(value, ExtendedInt.basetype), True)
return value
class BodyTypes(object):
def assertEqual(self, a, b):
if not (a == b):
raise AssertionError('%s != %s' % (a, b))
@expose(int, body={wsme.types.text: int})
@validate(int)
def setdict(self, body):
print(body)
self.assertEqual(type(body), dict)
self.assertEqual(type(body['test']), int)
self.assertEqual(body['test'], 10)
return body['test']
@expose(int, body=[int])
@validate(int)
def setlist(self, body):
print(body)
self.assertEqual(type(body), list)
self.assertEqual(type(body[0]), int)
self.assertEqual(body[0], 10)
return body[0]
class WithErrors(object):
@expose()
def divide_by_zero(self):
1 / 0
class MiscFunctions(object):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
class WSTestRoot(WSRoot):
argtypes = ArgTypes()
returntypes = ReturnTypes()
bodytypes = BodyTypes()
witherrors = WithErrors()
nested = NestedOuterApi()
misc = MiscFunctions()
def reset(self):
self._touched = False
@expose()
def touch(self):
self._touched = True
class ProtocolTestCase(unittest.TestCase):
protocol_options = {}
def assertTypedEquals(self, a, b, convert):
if isinstance(a, six.string_types):
a = convert(a)
if isinstance(b, six.string_types):
b = convert(b)
self.assertEqual(a, b)
def assertDateEquals(self, a, b):
self.assertTypedEquals(a, b, wsme.utils.parse_isodate)
def assertTimeEquals(self, a, b):
self.assertTypedEquals(a, b, wsme.utils.parse_isotime)
def assertDateTimeEquals(self, a, b):
self.assertTypedEquals(a, b, wsme.utils.parse_isodatetime)
def assertIntEquals(self, a, b):
self.assertTypedEquals(a, b, int)
def assertFloatEquals(self, a, b):
self.assertTypedEquals(a, b, float)
def assertDecimalEquals(self, a, b):
self.assertTypedEquals(a, b, decimal.Decimal)
def setUp(self):
if self.__class__.__name__ != 'ProtocolTestCase':
self.root = WSTestRoot()
self.root.getapi()
self.root.addprotocol(self.protocol, **self.protocol_options)
self.app = TestApp(self.root.wsgiapp())
def test_invalid_path(self):
try:
res = self.call('invalid_function')
print(res)
assert "No error raised"
except CallException as e:
self.assertEqual(e.faultcode, 'Client')
self.assertEqual(e.faultstring.lower(),
u('unknown function name: invalid_function'))
def test_serverside_error(self):
try:
res = self.call('witherrors/divide_by_zero')
print(res)
assert "No error raised"
except CallException as e:
self.assertEqual(e.faultcode, 'Server')
self.assertEqual(e.faultstring, zerodivisionerrormsg)
assert e.debuginfo is not None
def test_serverside_error_nodebug(self):
self.root._debug = False
try:
res = self.call('witherrors/divide_by_zero')
print(res)
assert "No error raised"
except CallException as e:
self.assertEqual(e.faultcode, 'Server')
self.assertEqual(e.faultstring, zerodivisionerrormsg)
assert e.debuginfo is None
def test_touch(self):
r = self.call('touch')
assert r is None, r
def test_return_bytes(self):
r = self.call('returntypes/getbytes', _rt=wsme.types.bytes)
self.assertEqual(r, b('astring'))
def test_return_text(self):
r = self.call('returntypes/gettext', _rt=wsme.types.text)
self.assertEqual(r, u('\xe3\x81\xae'))
def test_return_int(self):
r = self.call('returntypes/getint')
self.assertIntEquals(r, 2)
def test_return_float(self):
r = self.call('returntypes/getfloat')
self.assertFloatEquals(r, 3.14159265)
def test_return_decimal(self):
r = self.call('returntypes/getdecimal')
self.assertDecimalEquals(r, '3.14159265')
def test_return_bool_true(self):
r = self.call('returntypes/getbooltrue', _rt=bool)
assert r
def test_return_bool_false(self):
r = self.call('returntypes/getboolfalse', _rt=bool)
assert not r
def test_return_date(self):
r = self.call('returntypes/getdate')
self.assertDateEquals(r, datetime.date(1994, 1, 26))
def test_return_time(self):
r = self.call('returntypes/gettime')
self.assertTimeEquals(r, datetime.time(12))
def test_return_datetime(self):
r = self.call('returntypes/getdatetime')
self.assertDateTimeEquals(r, datetime.datetime(1994, 1, 26, 12))
def test_return_binary(self):
r = self.call('returntypes/getbinary', _rt=wsme.types.binary)
self.assertEqual(r, binarysample)
def test_return_nested(self):
r = self.call('returntypes/getnested', _rt=NestedOuter)
self.assertEqual(r, {'inner': {'aint': 0}})
def test_return_bytesarray(self):
r = self.call('returntypes/getbytesarray', _rt=[six.binary_type])
self.assertEqual(r, [b('A'), b('B'), b('C')])
def test_return_nestedarray(self):
r = self.call('returntypes/getnestedarray', _rt=[NestedOuter])
self.assertEqual(r, [{'inner': {'aint': 0}}, {'inner': {'aint': 0}}])
def test_return_nesteddict(self):
r = self.call('returntypes/getnesteddict',
_rt={wsme.types.bytes: NestedOuter})
self.assertEqual(r, {
b('a'): {'inner': {'aint': 0}},
b('b'): {'inner': {'aint': 0}}
})
def test_return_objectarrayattribute(self):
r = self.call('returntypes/getobjectarrayattribute', _rt=NestedOuter)
self.assertEqual(r, {
'inner': {'aint': 0},
'inner_array': [{'aint': 12}, {'aint': 13}]
})
def test_return_objectdictattribute(self):
r = self.call('returntypes/getobjectdictattribute', _rt=NestedOuter)
self.assertEqual(r, {
'inner': {'aint': 0},
'inner_dict': {
'12': {'aint': 12},
'13': {'aint': 13}
}
})
def test_return_enum(self):
r = self.call('returntypes/getenum', _rt=myenumtype)
self.assertEqual(r, b('v2'), r)
def test_return_namedattrsobj(self):
r = self.call('returntypes/getnamedattrsobj', _rt=NamedAttrsObject)
self.assertEqual(r, {'attr.1': 5, 'attr.2': 6})
def test_setbytes(self):
assert self.call('argtypes/setbytes', value=b('astring'),
_rt=wsme.types.bytes) == b('astring')
def test_settext(self):
assert self.call('argtypes/settext', value=u('\xe3\x81\xae'),
_rt=wsme.types.text) == u('\xe3\x81\xae')
def test_settext_empty(self):
assert self.call('argtypes/settext', value=u(''),
_rt=wsme.types.text) == u('')
def test_settext_none(self):
self.assertEqual(
None,
self.call('argtypes/settextnone', value=None, _rt=wsme.types.text)
)
def test_setint(self):
r = self.call('argtypes/setint', value=3, _rt=int)
self.assertEqual(r, 3)
def test_setfloat(self):
assert self.call('argtypes/setfloat', value=3.54,
_rt=float) == 3.54
def test_setbool_true(self):
r = self.call('argtypes/setbool', value=True, _rt=bool)
assert r
def test_setbool_false(self):
r = self.call('argtypes/setbool', value=False, _rt=bool)
assert not r
def test_setdecimal(self):
value = decimal.Decimal('3.14')
assert self.call('argtypes/setdecimal', value=value,
_rt=decimal.Decimal) == value
def test_setdate(self):
value = datetime.date(2008, 4, 6)
r = self.call('argtypes/setdate', value=value,
_rt=datetime.date)
self.assertEqual(r, value)
def test_settime(self):
value = datetime.time(12, 12, 15)
r = self.call('argtypes/settime', value=value,
_rt=datetime.time)
self.assertEqual(r, datetime.time(12, 12, 15))
def test_setdatetime(self):
value = datetime.datetime(2008, 4, 6, 12, 12, 15)
r = self.call('argtypes/setdatetime', value=value,
_rt=datetime.datetime)
self.assertEqual(r, datetime.datetime(2008, 4, 6, 12, 12, 15))
def test_setbinary(self):
value = binarysample
r = self.call('argtypes/setbinary', value=(value, wsme.types.binary),
_rt=wsme.types.binary) == value
print(r)
def test_setnested(self):
value = {'inner': {'aint': 54}}
r = self.call('argtypes/setnested',
value=(value, NestedOuter),
_rt=NestedOuter)
self.assertEqual(r, value)
def test_setnested_nullobj(self):
value = {'inner': None}
r = self.call(
'argtypes/setnested',
value=(value, NestedOuter),
_rt=NestedOuter
)
self.assertEqual(r, value)
def test_setbytesarray(self):
value = [b("1"), b("2"), b("three")]
r = self.call('argtypes/setbytesarray',
value=(value, [wsme.types.bytes]),
_rt=[wsme.types.bytes])
self.assertEqual(r, value)
def test_settextarray(self):
value = [u("1")]
r = self.call('argtypes/settextarray',
value=(value, [wsme.types.text]),
_rt=[wsme.types.text])
self.assertEqual(r, value)
def test_setdatetimearray(self):
value = [
datetime.datetime(2008, 3, 6, 12, 12, 15),
datetime.datetime(2008, 4, 6, 2, 12, 15),
]
r = self.call('argtypes/setdatetimearray',
value=(value, [datetime.datetime]),
_rt=[datetime.datetime])
self.assertEqual(r, value)
def test_setnestedarray(self):
value = [
{'inner': {'aint': 54}},
{'inner': {'aint': 55}},
]
r = self.call('argtypes/setnestedarray',
value=(value, [NestedOuter]),
_rt=[NestedOuter])
self.assertEqual(r, value)
def test_setnesteddict(self):
value = {
b('o1'): {'inner': {'aint': 54}},
b('o2'): {'inner': {'aint': 55}},
}
r = self.call('argtypes/setnesteddict',
value=(value, {six.binary_type: NestedOuter}),
_rt={six.binary_type: NestedOuter})
print(r)
self.assertEqual(r, value)
def test_setenum(self):
value = b('v1')
r = self.call('argtypes/setenum', value=value,
_rt=myenumtype)
self.assertEqual(r, value)
def test_setnamedattrsobj(self):
value = {'attr.1': 10, 'attr.2': 20}
r = self.call('argtypes/setnamedattrsobj',
value=(value, NamedAttrsObject),
_rt=NamedAttrsObject)
self.assertEqual(r, value)
def test_nested_api(self):
r = self.call('nested/inner/deepfunction', _rt=bool)
assert r is True
def test_missing_argument(self):
try:
r = self.call('argtypes/setdatetime')
print(r)
assert "No error raised"
except CallException as e:
self.assertEqual(e.faultcode, 'Client')
self.assertEqual(e.faultstring, u('Missing argument: "value"'))
def test_misc_multiply(self):
self.assertEqual(self.call('misc/multiply', a=5, b=2, _rt=int), 10)
def test_html_format(self):
res = self.call('argtypes/setdatetime', _accept="text/html",
_no_result_decode=True)
self.assertEqual(res.content_type, 'text/html')
class RestOnlyProtocolTestCase(ProtocolTestCase):
def test_body_list(self):
r = self.call('bodytypes/setlist', body=([10], [int]), _rt=int)
self.assertEqual(r, 10)
def test_body_dict(self):
r = self.call('bodytypes/setdict',
body=({'test': 10}, {wsme.types.text: int}),
_rt=int)
self.assertEqual(r, 10)
| 29.178918
| 78
| 0.596445
|
import unittest
import warnings
import datetime
import decimal
import six
from six import u, b
from webtest import TestApp
from wsme import WSRoot, Unset
from wsme import expose, validate
import wsme.types
import wsme.utils
warnings.filterwarnings('ignore', module='webob.dec')
binarysample = b('\x00\xff\x43')
try:
1 / 0
except ZeroDivisionError as e:
zerodivisionerrormsg = str(e)
class CallException(RuntimeError):
def __init__(self, faultcode, faultstring, debuginfo):
self.faultcode = faultcode
self.faultstring = faultstring
self.debuginfo = debuginfo
def __str__(self):
return 'faultcode=%s, faultstring=%s, debuginfo=%s' % (
self.faultcode, self.faultstring, self.debuginfo
)
myenumtype = wsme.types.Enum(wsme.types.bytes, 'v1', 'v2')
class NestedInner(object):
aint = int
def __init__(self, aint=None):
self.aint = aint
class NestedOuter(object):
inner = NestedInner
inner_array = wsme.types.wsattr([NestedInner])
inner_dict = {wsme.types.text: NestedInner}
def __init__(self):
self.inner = NestedInner(0)
class NamedAttrsObject(object):
def __init__(self, v1=Unset, v2=Unset):
self.attr_1 = v1
self.attr_2 = v2
attr_1 = wsme.types.wsattr(int, name='attr.1')
attr_2 = wsme.types.wsattr(int, name='attr.2')
class CustomObject(object):
aint = int
name = wsme.types.text
class ExtendedInt(wsme.types.UserType):
basetype = int
name = "Extended integer"
class NestedInnerApi(object):
@expose(bool)
def deepfunction(self):
return True
class NestedOuterApi(object):
inner = NestedInnerApi()
class ReturnTypes(object):
@expose(wsme.types.bytes)
def getbytes(self):
return b("astring")
@expose(wsme.types.text)
def gettext(self):
return u('\xe3\x81\xae')
@expose(int)
def getint(self):
return 2
@expose(float)
def getfloat(self):
return 3.14159265
@expose(decimal.Decimal)
def getdecimal(self):
return decimal.Decimal('3.14159265')
@expose(datetime.date)
def getdate(self):
return datetime.date(1994, 1, 26)
@expose(bool)
def getbooltrue(self):
return True
@expose(bool)
def getboolfalse(self):
return False
@expose(datetime.time)
def gettime(self):
return datetime.time(12, 0, 0)
@expose(datetime.datetime)
def getdatetime(self):
return datetime.datetime(1994, 1, 26, 12, 0, 0)
@expose(wsme.types.binary)
def getbinary(self):
return binarysample
@expose(NestedOuter)
def getnested(self):
n = NestedOuter()
return n
@expose([wsme.types.bytes])
def getbytesarray(self):
return [b("A"), b("B"), b("C")]
@expose([NestedOuter])
def getnestedarray(self):
return [NestedOuter(), NestedOuter()]
@expose({wsme.types.bytes: NestedOuter})
def getnesteddict(self):
return {b('a'): NestedOuter(), b('b'): NestedOuter()}
@expose(NestedOuter)
def getobjectarrayattribute(self):
obj = NestedOuter()
obj.inner_array = [NestedInner(12), NestedInner(13)]
return obj
@expose(NestedOuter)
def getobjectdictattribute(self):
obj = NestedOuter()
obj.inner_dict = {
'12': NestedInner(12),
'13': NestedInner(13)
}
return obj
@expose(myenumtype)
def getenum(self):
return b('v2')
@expose(NamedAttrsObject)
def getnamedattrsobj(self):
return NamedAttrsObject(5, 6)
class ArgTypes(object):
def assertEqual(self, a, b):
if not (a == b):
raise AssertionError('%s != %s' % (a, b))
def assertIsInstance(self, value, v_type):
assert isinstance(value, v_type), ("%s is not instance of type %s" %
(value, v_type))
@expose(wsme.types.bytes)
@validate(wsme.types.bytes)
def setbytes(self, value):
print(repr(value))
self.assertEqual(type(value), wsme.types.bytes)
return value
@expose(wsme.types.text)
@validate(wsme.types.text)
def settext(self, value):
print(repr(value))
self.assertEqual(type(value), wsme.types.text)
return value
@expose(wsme.types.text)
@validate(wsme.types.text)
def settextnone(self, value):
print(repr(value))
self.assertEqual(type(value), type(None))
return value
@expose(bool)
@validate(bool)
def setbool(self, value):
print(repr(value))
self.assertEqual(type(value), bool)
return value
@expose(int)
@validate(int)
def setint(self, value):
print(repr(value))
self.assertEqual(type(value), int)
return value
@expose(float)
@validate(float)
def setfloat(self, value):
print(repr(value))
self.assertEqual(type(value), float)
return value
@expose(decimal.Decimal)
@validate(decimal.Decimal)
def setdecimal(self, value):
print(repr(value))
self.assertEqual(type(value), decimal.Decimal)
return value
@expose(datetime.date)
@validate(datetime.date)
def setdate(self, value):
print(repr(value))
self.assertEqual(type(value), datetime.date)
return value
@expose(datetime.time)
@validate(datetime.time)
def settime(self, value):
print(repr(value))
self.assertEqual(type(value), datetime.time)
return value
@expose(datetime.datetime)
@validate(datetime.datetime)
def setdatetime(self, value):
print(repr(value))
self.assertEqual(type(value), datetime.datetime)
return value
@expose(wsme.types.binary)
@validate(wsme.types.binary)
def setbinary(self, value):
print(repr(value))
self.assertEqual(type(value), six.binary_type)
return value
@expose([wsme.types.bytes])
@validate([wsme.types.bytes])
def setbytesarray(self, value):
print(repr(value))
self.assertEqual(type(value), list)
self.assertEqual(type(value[0]), wsme.types.bytes)
return value
@expose([wsme.types.text])
@validate([wsme.types.text])
def settextarray(self, value):
print(repr(value))
self.assertEqual(type(value), list)
self.assertEqual(type(value[0]), wsme.types.text)
return value
@expose([datetime.datetime])
@validate([datetime.datetime])
def setdatetimearray(self, value):
print(repr(value))
self.assertEqual(type(value), list)
self.assertEqual(type(value[0]), datetime.datetime)
return value
@expose(NestedOuter)
@validate(NestedOuter)
def setnested(self, value):
print(repr(value))
self.assertEqual(type(value), NestedOuter)
return value
@expose([NestedOuter])
@validate([NestedOuter])
def setnestedarray(self, value):
print(repr(value))
self.assertEqual(type(value), list)
self.assertEqual(type(value[0]), NestedOuter)
return value
@expose({wsme.types.bytes: NestedOuter})
@validate({wsme.types.bytes: NestedOuter})
def setnesteddict(self, value):
print(repr(value))
self.assertEqual(type(value), dict)
self.assertEqual(type(list(value.keys())[0]), wsme.types.bytes)
self.assertEqual(type(list(value.values())[0]), NestedOuter)
return value
@expose(myenumtype)
@validate(myenumtype)
def setenum(self, value):
print(value)
self.assertEqual(type(value), wsme.types.bytes)
return value
@expose(NamedAttrsObject)
@validate(NamedAttrsObject)
def setnamedattrsobj(self, value):
print(value)
self.assertEqual(type(value), NamedAttrsObject)
self.assertEqual(value.attr_1, 10)
self.assertEqual(value.attr_2, 20)
return value
@expose(CustomObject)
@validate(CustomObject)
def setcustomobject(self, value):
self.assertIsInstance(value, CustomObject)
self.assertIsInstance(value.name, wsme.types.text)
self.assertIsInstance(value.aint, int)
return value
@expose(ExtendedInt())
@validate(ExtendedInt())
def setextendedint(self, value):
self.assertEqual(isinstance(value, ExtendedInt.basetype), True)
return value
class BodyTypes(object):
def assertEqual(self, a, b):
if not (a == b):
raise AssertionError('%s != %s' % (a, b))
@expose(int, body={wsme.types.text: int})
@validate(int)
def setdict(self, body):
print(body)
self.assertEqual(type(body), dict)
self.assertEqual(type(body['test']), int)
self.assertEqual(body['test'], 10)
return body['test']
@expose(int, body=[int])
@validate(int)
def setlist(self, body):
print(body)
self.assertEqual(type(body), list)
self.assertEqual(type(body[0]), int)
self.assertEqual(body[0], 10)
return body[0]
class WithErrors(object):
@expose()
def divide_by_zero(self):
1 / 0
class MiscFunctions(object):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
class WSTestRoot(WSRoot):
argtypes = ArgTypes()
returntypes = ReturnTypes()
bodytypes = BodyTypes()
witherrors = WithErrors()
nested = NestedOuterApi()
misc = MiscFunctions()
def reset(self):
self._touched = False
@expose()
def touch(self):
self._touched = True
class ProtocolTestCase(unittest.TestCase):
protocol_options = {}
def assertTypedEquals(self, a, b, convert):
if isinstance(a, six.string_types):
a = convert(a)
if isinstance(b, six.string_types):
b = convert(b)
self.assertEqual(a, b)
def assertDateEquals(self, a, b):
self.assertTypedEquals(a, b, wsme.utils.parse_isodate)
def assertTimeEquals(self, a, b):
self.assertTypedEquals(a, b, wsme.utils.parse_isotime)
def assertDateTimeEquals(self, a, b):
self.assertTypedEquals(a, b, wsme.utils.parse_isodatetime)
def assertIntEquals(self, a, b):
self.assertTypedEquals(a, b, int)
def assertFloatEquals(self, a, b):
self.assertTypedEquals(a, b, float)
def assertDecimalEquals(self, a, b):
self.assertTypedEquals(a, b, decimal.Decimal)
def setUp(self):
if self.__class__.__name__ != 'ProtocolTestCase':
self.root = WSTestRoot()
self.root.getapi()
self.root.addprotocol(self.protocol, **self.protocol_options)
self.app = TestApp(self.root.wsgiapp())
def test_invalid_path(self):
try:
res = self.call('invalid_function')
print(res)
assert "No error raised"
except CallException as e:
self.assertEqual(e.faultcode, 'Client')
self.assertEqual(e.faultstring.lower(),
u('unknown function name: invalid_function'))
def test_serverside_error(self):
try:
res = self.call('witherrors/divide_by_zero')
print(res)
assert "No error raised"
except CallException as e:
self.assertEqual(e.faultcode, 'Server')
self.assertEqual(e.faultstring, zerodivisionerrormsg)
assert e.debuginfo is not None
def test_serverside_error_nodebug(self):
self.root._debug = False
try:
res = self.call('witherrors/divide_by_zero')
print(res)
assert "No error raised"
except CallException as e:
self.assertEqual(e.faultcode, 'Server')
self.assertEqual(e.faultstring, zerodivisionerrormsg)
assert e.debuginfo is None
def test_touch(self):
r = self.call('touch')
assert r is None, r
def test_return_bytes(self):
r = self.call('returntypes/getbytes', _rt=wsme.types.bytes)
self.assertEqual(r, b('astring'))
def test_return_text(self):
r = self.call('returntypes/gettext', _rt=wsme.types.text)
self.assertEqual(r, u('\xe3\x81\xae'))
def test_return_int(self):
r = self.call('returntypes/getint')
self.assertIntEquals(r, 2)
def test_return_float(self):
r = self.call('returntypes/getfloat')
self.assertFloatEquals(r, 3.14159265)
def test_return_decimal(self):
r = self.call('returntypes/getdecimal')
self.assertDecimalEquals(r, '3.14159265')
def test_return_bool_true(self):
r = self.call('returntypes/getbooltrue', _rt=bool)
assert r
def test_return_bool_false(self):
r = self.call('returntypes/getboolfalse', _rt=bool)
assert not r
def test_return_date(self):
r = self.call('returntypes/getdate')
self.assertDateEquals(r, datetime.date(1994, 1, 26))
def test_return_time(self):
r = self.call('returntypes/gettime')
self.assertTimeEquals(r, datetime.time(12))
def test_return_datetime(self):
r = self.call('returntypes/getdatetime')
self.assertDateTimeEquals(r, datetime.datetime(1994, 1, 26, 12))
def test_return_binary(self):
r = self.call('returntypes/getbinary', _rt=wsme.types.binary)
self.assertEqual(r, binarysample)
def test_return_nested(self):
r = self.call('returntypes/getnested', _rt=NestedOuter)
self.assertEqual(r, {'inner': {'aint': 0}})
def test_return_bytesarray(self):
r = self.call('returntypes/getbytesarray', _rt=[six.binary_type])
self.assertEqual(r, [b('A'), b('B'), b('C')])
def test_return_nestedarray(self):
r = self.call('returntypes/getnestedarray', _rt=[NestedOuter])
self.assertEqual(r, [{'inner': {'aint': 0}}, {'inner': {'aint': 0}}])
def test_return_nesteddict(self):
r = self.call('returntypes/getnesteddict',
_rt={wsme.types.bytes: NestedOuter})
self.assertEqual(r, {
b('a'): {'inner': {'aint': 0}},
b('b'): {'inner': {'aint': 0}}
})
def test_return_objectarrayattribute(self):
r = self.call('returntypes/getobjectarrayattribute', _rt=NestedOuter)
self.assertEqual(r, {
'inner': {'aint': 0},
'inner_array': [{'aint': 12}, {'aint': 13}]
})
def test_return_objectdictattribute(self):
r = self.call('returntypes/getobjectdictattribute', _rt=NestedOuter)
self.assertEqual(r, {
'inner': {'aint': 0},
'inner_dict': {
'12': {'aint': 12},
'13': {'aint': 13}
}
})
def test_return_enum(self):
r = self.call('returntypes/getenum', _rt=myenumtype)
self.assertEqual(r, b('v2'), r)
def test_return_namedattrsobj(self):
r = self.call('returntypes/getnamedattrsobj', _rt=NamedAttrsObject)
self.assertEqual(r, {'attr.1': 5, 'attr.2': 6})
def test_setbytes(self):
assert self.call('argtypes/setbytes', value=b('astring'),
_rt=wsme.types.bytes) == b('astring')
def test_settext(self):
assert self.call('argtypes/settext', value=u('\xe3\x81\xae'),
_rt=wsme.types.text) == u('\xe3\x81\xae')
def test_settext_empty(self):
assert self.call('argtypes/settext', value=u(''),
_rt=wsme.types.text) == u('')
def test_settext_none(self):
self.assertEqual(
None,
self.call('argtypes/settextnone', value=None, _rt=wsme.types.text)
)
def test_setint(self):
r = self.call('argtypes/setint', value=3, _rt=int)
self.assertEqual(r, 3)
def test_setfloat(self):
assert self.call('argtypes/setfloat', value=3.54,
_rt=float) == 3.54
def test_setbool_true(self):
r = self.call('argtypes/setbool', value=True, _rt=bool)
assert r
def test_setbool_false(self):
r = self.call('argtypes/setbool', value=False, _rt=bool)
assert not r
def test_setdecimal(self):
value = decimal.Decimal('3.14')
assert self.call('argtypes/setdecimal', value=value,
_rt=decimal.Decimal) == value
def test_setdate(self):
value = datetime.date(2008, 4, 6)
r = self.call('argtypes/setdate', value=value,
_rt=datetime.date)
self.assertEqual(r, value)
def test_settime(self):
value = datetime.time(12, 12, 15)
r = self.call('argtypes/settime', value=value,
_rt=datetime.time)
self.assertEqual(r, datetime.time(12, 12, 15))
def test_setdatetime(self):
value = datetime.datetime(2008, 4, 6, 12, 12, 15)
r = self.call('argtypes/setdatetime', value=value,
_rt=datetime.datetime)
self.assertEqual(r, datetime.datetime(2008, 4, 6, 12, 12, 15))
def test_setbinary(self):
value = binarysample
r = self.call('argtypes/setbinary', value=(value, wsme.types.binary),
_rt=wsme.types.binary) == value
print(r)
def test_setnested(self):
value = {'inner': {'aint': 54}}
r = self.call('argtypes/setnested',
value=(value, NestedOuter),
_rt=NestedOuter)
self.assertEqual(r, value)
def test_setnested_nullobj(self):
value = {'inner': None}
r = self.call(
'argtypes/setnested',
value=(value, NestedOuter),
_rt=NestedOuter
)
self.assertEqual(r, value)
def test_setbytesarray(self):
value = [b("1"), b("2"), b("three")]
r = self.call('argtypes/setbytesarray',
value=(value, [wsme.types.bytes]),
_rt=[wsme.types.bytes])
self.assertEqual(r, value)
def test_settextarray(self):
value = [u("1")]
r = self.call('argtypes/settextarray',
value=(value, [wsme.types.text]),
_rt=[wsme.types.text])
self.assertEqual(r, value)
def test_setdatetimearray(self):
value = [
datetime.datetime(2008, 3, 6, 12, 12, 15),
datetime.datetime(2008, 4, 6, 2, 12, 15),
]
r = self.call('argtypes/setdatetimearray',
value=(value, [datetime.datetime]),
_rt=[datetime.datetime])
self.assertEqual(r, value)
def test_setnestedarray(self):
value = [
{'inner': {'aint': 54}},
{'inner': {'aint': 55}},
]
r = self.call('argtypes/setnestedarray',
value=(value, [NestedOuter]),
_rt=[NestedOuter])
self.assertEqual(r, value)
def test_setnesteddict(self):
value = {
b('o1'): {'inner': {'aint': 54}},
b('o2'): {'inner': {'aint': 55}},
}
r = self.call('argtypes/setnesteddict',
value=(value, {six.binary_type: NestedOuter}),
_rt={six.binary_type: NestedOuter})
print(r)
self.assertEqual(r, value)
def test_setenum(self):
value = b('v1')
r = self.call('argtypes/setenum', value=value,
_rt=myenumtype)
self.assertEqual(r, value)
def test_setnamedattrsobj(self):
value = {'attr.1': 10, 'attr.2': 20}
r = self.call('argtypes/setnamedattrsobj',
value=(value, NamedAttrsObject),
_rt=NamedAttrsObject)
self.assertEqual(r, value)
def test_nested_api(self):
r = self.call('nested/inner/deepfunction', _rt=bool)
assert r is True
def test_missing_argument(self):
try:
r = self.call('argtypes/setdatetime')
print(r)
assert "No error raised"
except CallException as e:
self.assertEqual(e.faultcode, 'Client')
self.assertEqual(e.faultstring, u('Missing argument: "value"'))
def test_misc_multiply(self):
self.assertEqual(self.call('misc/multiply', a=5, b=2, _rt=int), 10)
def test_html_format(self):
res = self.call('argtypes/setdatetime', _accept="text/html",
_no_result_decode=True)
self.assertEqual(res.content_type, 'text/html')
class RestOnlyProtocolTestCase(ProtocolTestCase):
def test_body_list(self):
r = self.call('bodytypes/setlist', body=([10], [int]), _rt=int)
self.assertEqual(r, 10)
def test_body_dict(self):
r = self.call('bodytypes/setdict',
body=({'test': 10}, {wsme.types.text: int}),
_rt=int)
self.assertEqual(r, 10)
| true
| true
|
1c41f97d960e0a2706e8365e7924db3b951a7459
| 688
|
py
|
Python
|
test/example_thirdparty/urls.py
|
concentricsky/pybbm
|
90147b74cff4740e6580a94b073f5eb576a93b4f
|
[
"BSD-2-Clause"
] | null | null | null |
test/example_thirdparty/urls.py
|
concentricsky/pybbm
|
90147b74cff4740e6580a94b073f5eb576a93b4f
|
[
"BSD-2-Clause"
] | null | null | null |
test/example_thirdparty/urls.py
|
concentricsky/pybbm
|
90147b74cff4740e6580a94b073f5eb576a93b4f
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from account.views import ChangePasswordView, SignupView, LoginView
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('account.urls')),
# aliases to match original django-registration urls
url(r"^accounts/password/$", ChangePasswordView.as_view(), name="auth_password_change"),
url(r"^accounts/signup/$", SignupView.as_view(), name="registration_register"),
url(r"^accounts/login/$", LoginView.as_view(), name="auth_login"),
url(r'^', include('pybb.urls', namespace='pybb')),
)
| 34.4
| 92
| 0.706395
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from account.views import ChangePasswordView, SignupView, LoginView
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('account.urls')),
url(r"^accounts/password/$", ChangePasswordView.as_view(), name="auth_password_change"),
url(r"^accounts/signup/$", SignupView.as_view(), name="registration_register"),
url(r"^accounts/login/$", LoginView.as_view(), name="auth_login"),
url(r'^', include('pybb.urls', namespace='pybb')),
)
| true
| true
|
1c41f99ef9bab31faa0ff0a5649e4712bb354f1f
| 3,347
|
py
|
Python
|
crafter/__init__.py
|
ahn-ml/crafter
|
1ff80195a6a60d97b3c6e5d88d1d0adc195a99a6
|
[
"MIT"
] | null | null | null |
crafter/__init__.py
|
ahn-ml/crafter
|
1ff80195a6a60d97b3c6e5d88d1d0adc195a99a6
|
[
"MIT"
] | null | null | null |
crafter/__init__.py
|
ahn-ml/crafter
|
1ff80195a6a60d97b3c6e5d88d1d0adc195a99a6
|
[
"MIT"
] | 1
|
2022-03-08T06:06:03.000Z
|
2022-03-08T06:06:03.000Z
|
from .env import Env
from .recorder import Recorder
try:
import gym
gym.register(
id="CrafterReward-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True},
)
gym.register(
id="CrafterNoReward-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False},
)
gym.register(
id="CrafterReward-l0-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 0},
)
gym.register(
id="CrafterNoReward-l0-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 0},
)
gym.register(
id="CrafterReward-l1-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 1},
)
gym.register(
id="CrafterNoReward-l1-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 1},
)
gym.register(
id="CrafterReward-l2-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 2},
)
gym.register(
id="CrafterNoReward-l2-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 2},
)
gym.register(
id="CrafterReward-l3-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 3},
)
gym.register(
id="CrafterNoReward-l3-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 3},
)
gym.register(
id="CrafterReward-l4-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 4},
)
gym.register(
id="CrafterNoReward-l4-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 4},
)
gym.register(
id="CrafterReward-l5-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 5},
)
gym.register(
id="CrafterNoReward-l5-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 5},
)
gym.register(
id="CrafterReward-l6-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 7},
)
gym.register(
id="CrafterReward-l8-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 8},
)
gym.register(
id="CrafterReward-l9-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 9},
)
gym.register(
id="CrafterReward-l10-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 10},
)
gym.register(
id="CrafterReward-l11-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 11},
)
except ImportError:
pass
| 27.211382
| 45
| 0.564386
|
from .env import Env
from .recorder import Recorder
try:
import gym
gym.register(
id="CrafterReward-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True},
)
gym.register(
id="CrafterNoReward-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False},
)
gym.register(
id="CrafterReward-l0-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 0},
)
gym.register(
id="CrafterNoReward-l0-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 0},
)
gym.register(
id="CrafterReward-l1-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 1},
)
gym.register(
id="CrafterNoReward-l1-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 1},
)
gym.register(
id="CrafterReward-l2-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 2},
)
gym.register(
id="CrafterNoReward-l2-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 2},
)
gym.register(
id="CrafterReward-l3-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 3},
)
gym.register(
id="CrafterNoReward-l3-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 3},
)
gym.register(
id="CrafterReward-l4-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 4},
)
gym.register(
id="CrafterNoReward-l4-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 4},
)
gym.register(
id="CrafterReward-l5-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 5},
)
gym.register(
id="CrafterNoReward-l5-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": False, "level": 5},
)
gym.register(
id="CrafterReward-l6-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 7},
)
gym.register(
id="CrafterReward-l8-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 8},
)
gym.register(
id="CrafterReward-l9-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 9},
)
gym.register(
id="CrafterReward-l10-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 10},
)
gym.register(
id="CrafterReward-l11-v1",
entry_point="crafter:Env",
max_episode_steps=10000,
kwargs={"reward": True, "level": 11},
)
except ImportError:
pass
| true
| true
|
1c41f9f333035b2a106ed7f03155e9cf748d4f20
| 958
|
py
|
Python
|
prediction/bimod/BACs_and_TERF/C06_featurize.py
|
dylanhross/dmccs
|
8b403a90b6cb7edd9d7abc172462e9d9b62b5dd3
|
[
"MIT"
] | 3
|
2021-05-17T20:19:41.000Z
|
2022-02-01T21:43:30.000Z
|
prediction/bimod/BACs_and_TERF/C06_featurize.py
|
dylanhross/dmccs
|
8b403a90b6cb7edd9d7abc172462e9d9b62b5dd3
|
[
"MIT"
] | null | null | null |
prediction/bimod/BACs_and_TERF/C06_featurize.py
|
dylanhross/dmccs
|
8b403a90b6cb7edd9d7abc172462e9d9b62b5dd3
|
[
"MIT"
] | null | null | null |
#!/usr/local/Cellar/python@3.9/3.9.1_6/bin/python3
"""
"""
from pickle import load as pload
from numpy import savetxt
import sys
#from DmimData.data import DMD
from helpers import featurize
def main():
""" main execution sequence """
n = 5
smis = ['C[N+](C)(C)CCCCCC' for _ in range(n)]
structures = []
for i in range(1, n + 1):
fname = 'C06_c{}.out.mfj.xyzmq'.format(i)
with open(fname, 'r') as f:
structures.append(f.read())
X_cust = featurize(smis, structures, ['hac', 'c', 'adb', 'asv', 'ctv', 'hbam', 'hbd'], ['pmi1', 'pmi2', 'pmi3', 'rmd02'])
X_mqn = featurize(smis, structures, 'all', [])
X_md3d = featurize(smis, structures, [], 'all')
X_comb = featurize(smis, structures, 'all', 'all')
savetxt('C06_X_CUST.txt', X_cust)
savetxt('C06_X_MQN.txt', X_mqn)
savetxt('C06_X_MD3D.txt', X_md3d)
savetxt('C06_X_COMB.txt', X_comb)
if __name__ == '__main__':
main()
| 25.210526
| 125
| 0.605428
|
from pickle import load as pload
from numpy import savetxt
import sys
from helpers import featurize
def main():
n = 5
smis = ['C[N+](C)(C)CCCCCC' for _ in range(n)]
structures = []
for i in range(1, n + 1):
fname = 'C06_c{}.out.mfj.xyzmq'.format(i)
with open(fname, 'r') as f:
structures.append(f.read())
X_cust = featurize(smis, structures, ['hac', 'c', 'adb', 'asv', 'ctv', 'hbam', 'hbd'], ['pmi1', 'pmi2', 'pmi3', 'rmd02'])
X_mqn = featurize(smis, structures, 'all', [])
X_md3d = featurize(smis, structures, [], 'all')
X_comb = featurize(smis, structures, 'all', 'all')
savetxt('C06_X_CUST.txt', X_cust)
savetxt('C06_X_MQN.txt', X_mqn)
savetxt('C06_X_MD3D.txt', X_md3d)
savetxt('C06_X_COMB.txt', X_comb)
if __name__ == '__main__':
main()
| true
| true
|
1c41faae474a370da44723347057b4d2fa3cd34b
| 517
|
py
|
Python
|
app/marketing/migrations/0002_interestedperson.py
|
jasonrhaas/jumbobrew
|
b36b3404ee3db7c67ef53b7ab902743923cb6ffe
|
[
"MIT"
] | null | null | null |
app/marketing/migrations/0002_interestedperson.py
|
jasonrhaas/jumbobrew
|
b36b3404ee3db7c67ef53b7ab902743923cb6ffe
|
[
"MIT"
] | null | null | null |
app/marketing/migrations/0002_interestedperson.py
|
jasonrhaas/jumbobrew
|
b36b3404ee3db7c67ef53b7ab902743923cb6ffe
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.2 on 2018-02-04 20:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('marketing', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='InterestedPerson',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=100)),
],
),
]
| 24.619048
| 114
| 0.582205
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('marketing', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='InterestedPerson',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=100)),
],
),
]
| true
| true
|
1c41fd424828f0271d1d8b5dc77cbb913337a59f
| 405
|
py
|
Python
|
django/contrib/auth/migrations/0003_alter_user_email_max_length.py
|
Yoann-Vie/esgi-hearthstone
|
115d03426c7e8e80d89883b78ac72114c29bed12
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
django/contrib/auth/migrations/0003_alter_user_email_max_length.py
|
Yoann-Vie/esgi-hearthstone
|
115d03426c7e8e80d89883b78ac72114c29bed12
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
django/contrib/auth/migrations/0003_alter_user_email_max_length.py
|
Yoann-Vie/esgi-hearthstone
|
115d03426c7e8e80d89883b78ac72114c29bed12
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auth', '0002_alter_permission_name_max_length'),
]
operations = [
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=254, verbose_name='email address', blank=True),
),
]
| 23.823529
| 95
| 0.592593
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auth', '0002_alter_permission_name_max_length'),
]
operations = [
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=254, verbose_name='email address', blank=True),
),
]
| true
| true
|
1c41fd47a3343803bbe9b8b2460d63fc7e24a07c
| 2,484
|
py
|
Python
|
tests/algos/torch/test_awac_impl.py
|
meokz/d3rlpy
|
40504e2d8b424547558ab82786c523e8f4626a82
|
[
"MIT"
] | 2
|
2021-04-21T08:19:29.000Z
|
2021-05-17T09:08:06.000Z
|
tests/algos/torch/test_awac_impl.py
|
meokz/d3rlpy
|
40504e2d8b424547558ab82786c523e8f4626a82
|
[
"MIT"
] | null | null | null |
tests/algos/torch/test_awac_impl.py
|
meokz/d3rlpy
|
40504e2d8b424547558ab82786c523e8f4626a82
|
[
"MIT"
] | null | null | null |
import pytest
from d3rlpy.algos.torch.awac_impl import AWACImpl
from d3rlpy.augmentation import AugmentationPipeline
from tests.algos.algo_test import torch_impl_tester, DummyScaler
@pytest.mark.parametrize('observation_shape', [(100, ), (4, 84, 84)])
@pytest.mark.parametrize('action_size', [2])
@pytest.mark.parametrize('actor_learning_rate', [1e-3])
@pytest.mark.parametrize('critic_learning_rate', [1e-3])
@pytest.mark.parametrize('gamma', [0.99])
@pytest.mark.parametrize('tau', [0.05])
@pytest.mark.parametrize('lam', [1.0])
@pytest.mark.parametrize('n_action_samples', [10])
@pytest.mark.parametrize('max_weight', [20.0])
@pytest.mark.parametrize('actor_weight_decay', [1e-4])
@pytest.mark.parametrize('n_critics', [1])
@pytest.mark.parametrize('bootstrap', [False])
@pytest.mark.parametrize('share_encoder', [True])
@pytest.mark.parametrize('eps', [1e-4])
@pytest.mark.parametrize('use_batch_norm', [True, False])
@pytest.mark.parametrize('q_func_type', ['mean', 'qr', 'iqn', 'fqf'])
@pytest.mark.parametrize('scaler', [None, DummyScaler()])
@pytest.mark.parametrize('augmentation', [AugmentationPipeline()])
@pytest.mark.parametrize('n_augmentations', [1])
@pytest.mark.parametrize('encoder_params', [{}])
def test_awac_impl(observation_shape, action_size, actor_learning_rate,
critic_learning_rate, gamma, tau, lam, n_action_samples,
max_weight, actor_weight_decay, n_critics, bootstrap,
share_encoder, eps, use_batch_norm, q_func_type, scaler,
augmentation, n_augmentations, encoder_params):
impl = AWACImpl(observation_shape,
action_size,
actor_learning_rate,
critic_learning_rate,
gamma,
tau,
lam,
n_action_samples,
max_weight,
actor_weight_decay,
n_critics,
bootstrap,
share_encoder,
eps,
use_batch_norm,
q_func_type=q_func_type,
use_gpu=False,
scaler=scaler,
augmentation=augmentation,
n_augmentations=n_augmentations,
encoder_params=encoder_params)
torch_impl_tester(impl,
discrete=False,
deterministic_best_action=q_func_type != 'iqn')
| 43.578947
| 75
| 0.61876
|
import pytest
from d3rlpy.algos.torch.awac_impl import AWACImpl
from d3rlpy.augmentation import AugmentationPipeline
from tests.algos.algo_test import torch_impl_tester, DummyScaler
@pytest.mark.parametrize('observation_shape', [(100, ), (4, 84, 84)])
@pytest.mark.parametrize('action_size', [2])
@pytest.mark.parametrize('actor_learning_rate', [1e-3])
@pytest.mark.parametrize('critic_learning_rate', [1e-3])
@pytest.mark.parametrize('gamma', [0.99])
@pytest.mark.parametrize('tau', [0.05])
@pytest.mark.parametrize('lam', [1.0])
@pytest.mark.parametrize('n_action_samples', [10])
@pytest.mark.parametrize('max_weight', [20.0])
@pytest.mark.parametrize('actor_weight_decay', [1e-4])
@pytest.mark.parametrize('n_critics', [1])
@pytest.mark.parametrize('bootstrap', [False])
@pytest.mark.parametrize('share_encoder', [True])
@pytest.mark.parametrize('eps', [1e-4])
@pytest.mark.parametrize('use_batch_norm', [True, False])
@pytest.mark.parametrize('q_func_type', ['mean', 'qr', 'iqn', 'fqf'])
@pytest.mark.parametrize('scaler', [None, DummyScaler()])
@pytest.mark.parametrize('augmentation', [AugmentationPipeline()])
@pytest.mark.parametrize('n_augmentations', [1])
@pytest.mark.parametrize('encoder_params', [{}])
def test_awac_impl(observation_shape, action_size, actor_learning_rate,
critic_learning_rate, gamma, tau, lam, n_action_samples,
max_weight, actor_weight_decay, n_critics, bootstrap,
share_encoder, eps, use_batch_norm, q_func_type, scaler,
augmentation, n_augmentations, encoder_params):
impl = AWACImpl(observation_shape,
action_size,
actor_learning_rate,
critic_learning_rate,
gamma,
tau,
lam,
n_action_samples,
max_weight,
actor_weight_decay,
n_critics,
bootstrap,
share_encoder,
eps,
use_batch_norm,
q_func_type=q_func_type,
use_gpu=False,
scaler=scaler,
augmentation=augmentation,
n_augmentations=n_augmentations,
encoder_params=encoder_params)
torch_impl_tester(impl,
discrete=False,
deterministic_best_action=q_func_type != 'iqn')
| true
| true
|
1c41fd6ec43de71dd5eba76cd30cb0585524749e
| 4,980
|
py
|
Python
|
tests/test_backward_compat.py
|
gnomonsis/nncf_pytorch
|
9fc4a92b5cb1b2c240e633c4ffa69b4fae1917fb
|
[
"Apache-2.0"
] | null | null | null |
tests/test_backward_compat.py
|
gnomonsis/nncf_pytorch
|
9fc4a92b5cb1b2c240e633c4ffa69b4fae1917fb
|
[
"Apache-2.0"
] | 4
|
2020-07-17T11:12:35.000Z
|
2021-12-15T15:20:24.000Z
|
tests/test_backward_compat.py
|
gnomonsis/nncf_pytorch
|
9fc4a92b5cb1b2c240e633c4ffa69b4fae1917fb
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright (c) 2019-2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import pytest
import torch
from examples.common.distributed import configure_distributed
from examples.common.execution import ExecutionMode, prepare_model_for_execution, get_device
from examples.common.model_loader import load_model
from examples.common.sample_config import SampleConfig
from nncf.checkpoint_loading import load_state
from nncf.config import NNCFConfig
from tests.conftest import TEST_ROOT
from tests.test_compression_training import get_cli_dict_args, parse_best_acc1
from tests.test_helpers import create_compressed_model_and_algo_for_test
from tests.test_sanity_sample import Command, create_command_line
GLOBAL_CONFIG = {
TEST_ROOT.joinpath("data", "configs", "squeezenet1_1_cifar10_rb_sparsity_int8.json"): [
{
'checkpoint_name': 'squeezenet1_1_custom_cifar10_rb_sparsity_int8_dp.pth',
'dataset': "cifar10",
'execution_mode': ExecutionMode.GPU_DATAPARALLEL,
},
{
'checkpoint_name': 'squeezenet1_1_custom_cifar10_rb_sparsity_int8_ddp.pth',
'dataset': "cifar10",
'execution_mode': ExecutionMode.MULTIPROCESSING_DISTRIBUTED,
},
],
}
CONFIG_PARAMS = []
for config_path_, cases_list_ in GLOBAL_CONFIG.items():
for case_params_ in cases_list_:
CONFIG_PARAMS.append((config_path_, case_params_,))
@pytest.fixture(scope='module', params=CONFIG_PARAMS,
ids=['-'.join([str(p[0]), p[1]['execution_mode']]) for p in CONFIG_PARAMS])
def _params(request, backward_compat_models_path):
if backward_compat_models_path is None:
pytest.skip('Path to models weights for backward compatibility testing is not set,'
' use --backward-compat-models option.')
config_path, case_params = request.param
checkpoint_path = str(os.path.join(backward_compat_models_path, case_params['checkpoint_name']))
return {
'sample_config_path': config_path,
'checkpoint_path': checkpoint_path,
'execution_mode': case_params['execution_mode'],
'dataset': case_params['dataset']
}
def test_model_can_be_loaded_with_resume(_params):
p = _params
sample_config_path = p['sample_config_path']
checkpoint_path = p['checkpoint_path']
config = SampleConfig.from_json(str(sample_config_path))
nncf_config = NNCFConfig.from_json(str(sample_config_path))
config.execution_mode = p['execution_mode']
config.current_gpu = 0
config.device = get_device(config)
config.distributed = config.execution_mode in (ExecutionMode.DISTRIBUTED, ExecutionMode.MULTIPROCESSING_DISTRIBUTED)
if config.distributed:
config.dist_url = "tcp://127.0.0.1:9898"
config.dist_backend = "nccl"
config.rank = 0
config.world_size = 1
configure_distributed(config)
model_name = config['model']
model = load_model(model_name,
pretrained=False,
num_classes=config.get('num_classes', 1000),
model_params=config.get('model_params'))
model.to(config.device)
model, compression_ctrl = create_compressed_model_and_algo_for_test(model, nncf_config)
model, _ = prepare_model_for_execution(model, config)
if config.distributed:
compression_ctrl.distributed()
checkpoint = torch.load(checkpoint_path, map_location='cpu')
load_state(model, checkpoint['state_dict'], is_resume=True)
def test_loaded_model_evals_according_to_saved_acc(_params, tmp_path):
p = _params
config_path = p['sample_config_path']
checkpoint_path = p['checkpoint_path']
tmp_path = str(tmp_path)
args = {}
args['data'] = tmp_path + '/' + p['dataset']
args['dataset'] = p['dataset']
args['config'] = str(config_path)
args['mode'] = 'test'
args['log-dir'] = tmp_path
args['workers'] = 4
args['seed'] = 1
args['resume'] = checkpoint_path
if p['execution_mode'] == ExecutionMode.MULTIPROCESSING_DISTRIBUTED:
args['multiprocessing-distributed'] = ''
else:
pytest.skip("DataParallel eval takes too long for this test to be run during pre-commit")
runner = Command(create_command_line(get_cli_dict_args(args), "classification"))
res = runner.run()
assert res == 0
acc1 = parse_best_acc1(tmp_path)
assert torch.load(checkpoint_path)['best_acc1'] == pytest.approx(acc1)
| 38.307692
| 120
| 0.717269
|
import os
import pytest
import torch
from examples.common.distributed import configure_distributed
from examples.common.execution import ExecutionMode, prepare_model_for_execution, get_device
from examples.common.model_loader import load_model
from examples.common.sample_config import SampleConfig
from nncf.checkpoint_loading import load_state
from nncf.config import NNCFConfig
from tests.conftest import TEST_ROOT
from tests.test_compression_training import get_cli_dict_args, parse_best_acc1
from tests.test_helpers import create_compressed_model_and_algo_for_test
from tests.test_sanity_sample import Command, create_command_line
GLOBAL_CONFIG = {
TEST_ROOT.joinpath("data", "configs", "squeezenet1_1_cifar10_rb_sparsity_int8.json"): [
{
'checkpoint_name': 'squeezenet1_1_custom_cifar10_rb_sparsity_int8_dp.pth',
'dataset': "cifar10",
'execution_mode': ExecutionMode.GPU_DATAPARALLEL,
},
{
'checkpoint_name': 'squeezenet1_1_custom_cifar10_rb_sparsity_int8_ddp.pth',
'dataset': "cifar10",
'execution_mode': ExecutionMode.MULTIPROCESSING_DISTRIBUTED,
},
],
}
CONFIG_PARAMS = []
for config_path_, cases_list_ in GLOBAL_CONFIG.items():
for case_params_ in cases_list_:
CONFIG_PARAMS.append((config_path_, case_params_,))
@pytest.fixture(scope='module', params=CONFIG_PARAMS,
ids=['-'.join([str(p[0]), p[1]['execution_mode']]) for p in CONFIG_PARAMS])
def _params(request, backward_compat_models_path):
if backward_compat_models_path is None:
pytest.skip('Path to models weights for backward compatibility testing is not set,'
' use --backward-compat-models option.')
config_path, case_params = request.param
checkpoint_path = str(os.path.join(backward_compat_models_path, case_params['checkpoint_name']))
return {
'sample_config_path': config_path,
'checkpoint_path': checkpoint_path,
'execution_mode': case_params['execution_mode'],
'dataset': case_params['dataset']
}
def test_model_can_be_loaded_with_resume(_params):
p = _params
sample_config_path = p['sample_config_path']
checkpoint_path = p['checkpoint_path']
config = SampleConfig.from_json(str(sample_config_path))
nncf_config = NNCFConfig.from_json(str(sample_config_path))
config.execution_mode = p['execution_mode']
config.current_gpu = 0
config.device = get_device(config)
config.distributed = config.execution_mode in (ExecutionMode.DISTRIBUTED, ExecutionMode.MULTIPROCESSING_DISTRIBUTED)
if config.distributed:
config.dist_url = "tcp://127.0.0.1:9898"
config.dist_backend = "nccl"
config.rank = 0
config.world_size = 1
configure_distributed(config)
model_name = config['model']
model = load_model(model_name,
pretrained=False,
num_classes=config.get('num_classes', 1000),
model_params=config.get('model_params'))
model.to(config.device)
model, compression_ctrl = create_compressed_model_and_algo_for_test(model, nncf_config)
model, _ = prepare_model_for_execution(model, config)
if config.distributed:
compression_ctrl.distributed()
checkpoint = torch.load(checkpoint_path, map_location='cpu')
load_state(model, checkpoint['state_dict'], is_resume=True)
def test_loaded_model_evals_according_to_saved_acc(_params, tmp_path):
p = _params
config_path = p['sample_config_path']
checkpoint_path = p['checkpoint_path']
tmp_path = str(tmp_path)
args = {}
args['data'] = tmp_path + '/' + p['dataset']
args['dataset'] = p['dataset']
args['config'] = str(config_path)
args['mode'] = 'test'
args['log-dir'] = tmp_path
args['workers'] = 4
args['seed'] = 1
args['resume'] = checkpoint_path
if p['execution_mode'] == ExecutionMode.MULTIPROCESSING_DISTRIBUTED:
args['multiprocessing-distributed'] = ''
else:
pytest.skip("DataParallel eval takes too long for this test to be run during pre-commit")
runner = Command(create_command_line(get_cli_dict_args(args), "classification"))
res = runner.run()
assert res == 0
acc1 = parse_best_acc1(tmp_path)
assert torch.load(checkpoint_path)['best_acc1'] == pytest.approx(acc1)
| true
| true
|
1c41fe15bec83b66b7dc0ac3836337a91beddffe
| 3,310
|
py
|
Python
|
utils/pascal_utils.py
|
miltonbd/ECCV_2018_pedestrian_detection_challenege
|
24448247530555e8f34f8caa35dd7a3a40cc17c0
|
[
"MIT"
] | 18
|
2019-02-05T14:46:49.000Z
|
2020-08-12T21:48:45.000Z
|
utils/pascal_utils.py
|
miltonbd/ECCV_2018_pedestrian_detection_challenege
|
24448247530555e8f34f8caa35dd7a3a40cc17c0
|
[
"MIT"
] | null | null | null |
utils/pascal_utils.py
|
miltonbd/ECCV_2018_pedestrian_detection_challenege
|
24448247530555e8f34f8caa35dd7a3a40cc17c0
|
[
"MIT"
] | 3
|
2018-07-24T09:32:40.000Z
|
2019-03-19T09:27:41.000Z
|
from PIL import Image
import xml.etree.ElementTree as ET
from PIL import Image
from xml.dom import minidom
from statics import *
from data_reader import *
def write_pascal_annotation(file_name,obj_list,xml_file):
annotation=ET.Element('annotation')
filename=ET.SubElement(annotation,'filename')
filename.text=file_name
size = ET.SubElement(annotation, 'size')
img=Image.open(file_name)
width, height = img.size
height_elem=ET.SubElement(size,'height')
width_elem=ET.SubElement(size,'width')
height_elem.text=str(height)
width_elem.text=str(width)
# print(obj_list)
for i in range(0, len(obj_list), 5):
class_index = obj_list[i]
obj_cord = obj_list[i + 1:i + 5]
obj_cord[2] = int(obj_cord[2]) + int(obj_cord[0])
obj_cord[3] = int(obj_cord[3]) + int(obj_cord[1])
object = ET.SubElement(annotation, 'object')
get_object(object, obj_cord)
# print(ET.dump(annotation))
anno_txt=minidom.parseString(ET.tostring(annotation)).toprettyxml()
text_file = open(xml_file, "w")
text_file.write(anno_txt)
text_file.close()
return
def write_pascal_annotation_aug(file_name,obj_list,xml_file):
annotation=ET.Element('annotation')
filename=ET.SubElement(annotation,'filename')
filename.text=file_name
size = ET.SubElement(annotation, 'size')
img=Image.open(file_name)
width, height = img.size
height_elem=ET.SubElement(size,'height')
width_elem=ET.SubElement(size,'width')
height_elem.text=str(height)
width_elem.text=str(width)
# print(obj_list)
for i,obj in enumerate(obj_list):
class_index = obj[4]
obj_cord = obj[0:4]
object = ET.SubElement(annotation, 'object')
get_object(object, obj_cord)
# print(ET.dump(annotation))
anno_txt=minidom.parseString(ET.tostring(annotation)).toprettyxml()
text_file = open(xml_file, "w")
text_file.write(anno_txt)
text_file.close()
return
def get_object(object, obj_cord):
name = ET.SubElement(object, 'name')
name.text = 'pedestrian'
bndbox = ET.SubElement(object, 'bndbox')
difficult=ET.SubElement(object,'difficult')
difficult.text=str(0)
xmin = ET.SubElement(bndbox, 'xmin')
ymin = ET.SubElement(bndbox, 'ymin')
xmax = ET.SubElement(bndbox, 'xmax')
ymax = ET.SubElement(bndbox, 'ymax')
xmin.text=str(obj_cord[0])
ymin.text=str(obj_cord[1])
xmax.text=str(obj_cord[2])
ymax.text=str(obj_cord[3])
return
def read_pascal_annotation(anno_file):
"""
:param anno_file:
:return:
"""
tree = ET.parse(anno_file)
root = tree.getroot()
filename=root.find('filename').text
height=int(root.find('size/height').text)
width=int(root.find('size/width').text)
objs=root.findall('object')
objects=[]
for obj in objs:
class_label=obj.find('name').text
xmin=int(float(obj.find('bndbox/xmin').text))
xmax=int(float(obj.find('bndbox/xmax').text))
ymin=int(float(obj.find('bndbox/ymin').text))
ymax=int(float(obj.find('bndbox/ymax').text))
objects.append([xmin,ymin,xmax,ymax,1])
res={
'filename':filename,
'height':height,
'width':width,
'objects':objects
}
return res
| 30.366972
| 71
| 0.661329
|
from PIL import Image
import xml.etree.ElementTree as ET
from PIL import Image
from xml.dom import minidom
from statics import *
from data_reader import *
def write_pascal_annotation(file_name,obj_list,xml_file):
annotation=ET.Element('annotation')
filename=ET.SubElement(annotation,'filename')
filename.text=file_name
size = ET.SubElement(annotation, 'size')
img=Image.open(file_name)
width, height = img.size
height_elem=ET.SubElement(size,'height')
width_elem=ET.SubElement(size,'width')
height_elem.text=str(height)
width_elem.text=str(width)
for i in range(0, len(obj_list), 5):
class_index = obj_list[i]
obj_cord = obj_list[i + 1:i + 5]
obj_cord[2] = int(obj_cord[2]) + int(obj_cord[0])
obj_cord[3] = int(obj_cord[3]) + int(obj_cord[1])
object = ET.SubElement(annotation, 'object')
get_object(object, obj_cord)
anno_txt=minidom.parseString(ET.tostring(annotation)).toprettyxml()
text_file = open(xml_file, "w")
text_file.write(anno_txt)
text_file.close()
return
def write_pascal_annotation_aug(file_name,obj_list,xml_file):
annotation=ET.Element('annotation')
filename=ET.SubElement(annotation,'filename')
filename.text=file_name
size = ET.SubElement(annotation, 'size')
img=Image.open(file_name)
width, height = img.size
height_elem=ET.SubElement(size,'height')
width_elem=ET.SubElement(size,'width')
height_elem.text=str(height)
width_elem.text=str(width)
for i,obj in enumerate(obj_list):
class_index = obj[4]
obj_cord = obj[0:4]
object = ET.SubElement(annotation, 'object')
get_object(object, obj_cord)
anno_txt=minidom.parseString(ET.tostring(annotation)).toprettyxml()
text_file = open(xml_file, "w")
text_file.write(anno_txt)
text_file.close()
return
def get_object(object, obj_cord):
name = ET.SubElement(object, 'name')
name.text = 'pedestrian'
bndbox = ET.SubElement(object, 'bndbox')
difficult=ET.SubElement(object,'difficult')
difficult.text=str(0)
xmin = ET.SubElement(bndbox, 'xmin')
ymin = ET.SubElement(bndbox, 'ymin')
xmax = ET.SubElement(bndbox, 'xmax')
ymax = ET.SubElement(bndbox, 'ymax')
xmin.text=str(obj_cord[0])
ymin.text=str(obj_cord[1])
xmax.text=str(obj_cord[2])
ymax.text=str(obj_cord[3])
return
def read_pascal_annotation(anno_file):
tree = ET.parse(anno_file)
root = tree.getroot()
filename=root.find('filename').text
height=int(root.find('size/height').text)
width=int(root.find('size/width').text)
objs=root.findall('object')
objects=[]
for obj in objs:
class_label=obj.find('name').text
xmin=int(float(obj.find('bndbox/xmin').text))
xmax=int(float(obj.find('bndbox/xmax').text))
ymin=int(float(obj.find('bndbox/ymin').text))
ymax=int(float(obj.find('bndbox/ymax').text))
objects.append([xmin,ymin,xmax,ymax,1])
res={
'filename':filename,
'height':height,
'width':width,
'objects':objects
}
return res
| true
| true
|
1c41fe4d2ad877055a0e41bf4c9cdff8b60444ab
| 546
|
py
|
Python
|
stubs/micropython-v1_18-rp2/json.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/micropython-v1_18-rp2/json.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/micropython-v1_18-rp2/json.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
"""
Module: 'json' on micropython-v1.18-rp2
"""
# MCU: {'family': 'micropython', 'sysname': 'rp2', 'version': '1.18.0', 'build': '', 'mpy': 5637, 'port': 'rp2', 'platform': 'rp2', 'name': 'micropython', 'arch': 'armv7m', 'machine': 'Arduino Nano RP2040 Connect with RP2040', 'nodename': 'rp2', 'ver': 'v1.18', 'release': '1.18.0'}
# Stubber: 1.5.3
from typing import Any
def dump(*args, **kwargs) -> Any:
...
def dumps(*args, **kwargs) -> Any:
...
def load(*args, **kwargs) -> Any:
...
def loads(*args, **kwargs) -> Any:
...
| 27.3
| 282
| 0.562271
|
from typing import Any
def dump(*args, **kwargs) -> Any:
...
def dumps(*args, **kwargs) -> Any:
...
def load(*args, **kwargs) -> Any:
...
def loads(*args, **kwargs) -> Any:
...
| true
| true
|
1c41feda7dafeca312b2738522ba59265eca075d
| 5,579
|
py
|
Python
|
homeassistant/components/deconz/logbook.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 22,481
|
2020-03-02T13:09:59.000Z
|
2022-03-31T23:34:28.000Z
|
homeassistant/components/deconz/logbook.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
homeassistant/components/deconz/logbook.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 11,411
|
2020-03-02T14:19:20.000Z
|
2022-03-31T22:46:07.000Z
|
"""Describe deCONZ logbook events."""
from __future__ import annotations
from collections.abc import Callable
from homeassistant.const import ATTR_DEVICE_ID, CONF_EVENT
from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.event import Event
from .const import CONF_GESTURE, DOMAIN as DECONZ_DOMAIN
from .deconz_event import CONF_DECONZ_ALARM_EVENT, CONF_DECONZ_EVENT
from .device_trigger import (
CONF_BOTH_BUTTONS,
CONF_BOTTOM_BUTTONS,
CONF_BUTTON_1,
CONF_BUTTON_2,
CONF_BUTTON_3,
CONF_BUTTON_4,
CONF_CLOSE,
CONF_DIM_DOWN,
CONF_DIM_UP,
CONF_DOUBLE_PRESS,
CONF_DOUBLE_TAP,
CONF_LEFT,
CONF_LONG_PRESS,
CONF_LONG_RELEASE,
CONF_MOVE,
CONF_OPEN,
CONF_QUADRUPLE_PRESS,
CONF_QUINTUPLE_PRESS,
CONF_RIGHT,
CONF_ROTATE_FROM_SIDE_1,
CONF_ROTATE_FROM_SIDE_2,
CONF_ROTATE_FROM_SIDE_3,
CONF_ROTATE_FROM_SIDE_4,
CONF_ROTATE_FROM_SIDE_5,
CONF_ROTATE_FROM_SIDE_6,
CONF_ROTATED,
CONF_ROTATED_FAST,
CONF_ROTATION_STOPPED,
CONF_SHAKE,
CONF_SHORT_PRESS,
CONF_SHORT_RELEASE,
CONF_SIDE_1,
CONF_SIDE_2,
CONF_SIDE_3,
CONF_SIDE_4,
CONF_SIDE_5,
CONF_SIDE_6,
CONF_TOP_BUTTONS,
CONF_TRIPLE_PRESS,
CONF_TURN_OFF,
CONF_TURN_ON,
REMOTES,
_get_deconz_event_from_device,
)
ACTIONS = {
CONF_SHORT_PRESS: "Short press",
CONF_SHORT_RELEASE: "Short release",
CONF_LONG_PRESS: "Long press",
CONF_LONG_RELEASE: "Long release",
CONF_DOUBLE_PRESS: "Double press",
CONF_TRIPLE_PRESS: "Triple press",
CONF_QUADRUPLE_PRESS: "Quadruple press",
CONF_QUINTUPLE_PRESS: "Quintuple press",
CONF_ROTATED: "Rotated",
CONF_ROTATED_FAST: "Rotated fast",
CONF_ROTATION_STOPPED: "Rotated stopped",
CONF_MOVE: "Move",
CONF_DOUBLE_TAP: "Double tap",
CONF_SHAKE: "Shake",
CONF_ROTATE_FROM_SIDE_1: "Rotate from side 1",
CONF_ROTATE_FROM_SIDE_2: "Rotate from side 2",
CONF_ROTATE_FROM_SIDE_3: "Rotate from side 3",
CONF_ROTATE_FROM_SIDE_4: "Rotate from side 4",
CONF_ROTATE_FROM_SIDE_5: "Rotate from side 5",
CONF_ROTATE_FROM_SIDE_6: "Rotate from side 6",
}
INTERFACES = {
CONF_TURN_ON: "Turn on",
CONF_TURN_OFF: "Turn off",
CONF_DIM_UP: "Dim up",
CONF_DIM_DOWN: "Dim down",
CONF_LEFT: "Left",
CONF_RIGHT: "Right",
CONF_OPEN: "Open",
CONF_CLOSE: "Close",
CONF_BOTH_BUTTONS: "Both buttons",
CONF_TOP_BUTTONS: "Top buttons",
CONF_BOTTOM_BUTTONS: "Bottom buttons",
CONF_BUTTON_1: "Button 1",
CONF_BUTTON_2: "Button 2",
CONF_BUTTON_3: "Button 3",
CONF_BUTTON_4: "Button 4",
CONF_SIDE_1: "Side 1",
CONF_SIDE_2: "Side 2",
CONF_SIDE_3: "Side 3",
CONF_SIDE_4: "Side 4",
CONF_SIDE_5: "Side 5",
CONF_SIDE_6: "Side 6",
}
def _get_device_event_description(
modelid: str, event: int
) -> tuple[str | None, str | None]:
"""Get device event description."""
device_event_descriptions = REMOTES[modelid]
for event_type_tuple, event_dict in device_event_descriptions.items():
if event == event_dict.get(CONF_EVENT):
return event_type_tuple
if event == event_dict.get(CONF_GESTURE):
return event_type_tuple
return (None, None)
@callback
def async_describe_events(
hass: HomeAssistant,
async_describe_event: Callable[[str, str, Callable[[Event], dict[str, str]]], None],
) -> None:
"""Describe logbook events."""
device_registry = dr.async_get(hass)
@callback
def async_describe_deconz_alarm_event(event: Event) -> dict[str, str]:
"""Describe deCONZ logbook alarm event."""
device = device_registry.devices[event.data[ATTR_DEVICE_ID]]
deconz_alarm_event = _get_deconz_event_from_device(hass, device)
data = event.data[CONF_EVENT]
return {
"name": f"{deconz_alarm_event.device.name}",
"message": f"fired event '{data}'.",
}
@callback
def async_describe_deconz_event(event: Event) -> dict[str, str]:
"""Describe deCONZ logbook event."""
device = device_registry.devices[event.data[ATTR_DEVICE_ID]]
deconz_event = _get_deconz_event_from_device(hass, device)
action = None
interface = None
data = event.data.get(CONF_EVENT) or event.data.get(CONF_GESTURE, "")
if data and deconz_event.device.model_id in REMOTES:
action, interface = _get_device_event_description(
deconz_event.device.model_id, data
)
# Unknown event
if not data:
return {
"name": f"{deconz_event.device.name}",
"message": "fired an unknown event.",
}
# No device event match
if not action:
return {
"name": f"{deconz_event.device.name}",
"message": f"fired event '{data}'.",
}
# Gesture event
if not interface:
return {
"name": f"{deconz_event.device.name}",
"message": f"fired event '{ACTIONS[action]}'.",
}
return {
"name": f"{deconz_event.device.name}",
"message": f"'{ACTIONS[action]}' event for '{INTERFACES[interface]}' was fired.",
}
async_describe_event(
DECONZ_DOMAIN, CONF_DECONZ_ALARM_EVENT, async_describe_deconz_alarm_event
)
async_describe_event(DECONZ_DOMAIN, CONF_DECONZ_EVENT, async_describe_deconz_event)
| 29.675532
| 93
| 0.666965
|
from __future__ import annotations
from collections.abc import Callable
from homeassistant.const import ATTR_DEVICE_ID, CONF_EVENT
from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.event import Event
from .const import CONF_GESTURE, DOMAIN as DECONZ_DOMAIN
from .deconz_event import CONF_DECONZ_ALARM_EVENT, CONF_DECONZ_EVENT
from .device_trigger import (
CONF_BOTH_BUTTONS,
CONF_BOTTOM_BUTTONS,
CONF_BUTTON_1,
CONF_BUTTON_2,
CONF_BUTTON_3,
CONF_BUTTON_4,
CONF_CLOSE,
CONF_DIM_DOWN,
CONF_DIM_UP,
CONF_DOUBLE_PRESS,
CONF_DOUBLE_TAP,
CONF_LEFT,
CONF_LONG_PRESS,
CONF_LONG_RELEASE,
CONF_MOVE,
CONF_OPEN,
CONF_QUADRUPLE_PRESS,
CONF_QUINTUPLE_PRESS,
CONF_RIGHT,
CONF_ROTATE_FROM_SIDE_1,
CONF_ROTATE_FROM_SIDE_2,
CONF_ROTATE_FROM_SIDE_3,
CONF_ROTATE_FROM_SIDE_4,
CONF_ROTATE_FROM_SIDE_5,
CONF_ROTATE_FROM_SIDE_6,
CONF_ROTATED,
CONF_ROTATED_FAST,
CONF_ROTATION_STOPPED,
CONF_SHAKE,
CONF_SHORT_PRESS,
CONF_SHORT_RELEASE,
CONF_SIDE_1,
CONF_SIDE_2,
CONF_SIDE_3,
CONF_SIDE_4,
CONF_SIDE_5,
CONF_SIDE_6,
CONF_TOP_BUTTONS,
CONF_TRIPLE_PRESS,
CONF_TURN_OFF,
CONF_TURN_ON,
REMOTES,
_get_deconz_event_from_device,
)
ACTIONS = {
CONF_SHORT_PRESS: "Short press",
CONF_SHORT_RELEASE: "Short release",
CONF_LONG_PRESS: "Long press",
CONF_LONG_RELEASE: "Long release",
CONF_DOUBLE_PRESS: "Double press",
CONF_TRIPLE_PRESS: "Triple press",
CONF_QUADRUPLE_PRESS: "Quadruple press",
CONF_QUINTUPLE_PRESS: "Quintuple press",
CONF_ROTATED: "Rotated",
CONF_ROTATED_FAST: "Rotated fast",
CONF_ROTATION_STOPPED: "Rotated stopped",
CONF_MOVE: "Move",
CONF_DOUBLE_TAP: "Double tap",
CONF_SHAKE: "Shake",
CONF_ROTATE_FROM_SIDE_1: "Rotate from side 1",
CONF_ROTATE_FROM_SIDE_2: "Rotate from side 2",
CONF_ROTATE_FROM_SIDE_3: "Rotate from side 3",
CONF_ROTATE_FROM_SIDE_4: "Rotate from side 4",
CONF_ROTATE_FROM_SIDE_5: "Rotate from side 5",
CONF_ROTATE_FROM_SIDE_6: "Rotate from side 6",
}
INTERFACES = {
CONF_TURN_ON: "Turn on",
CONF_TURN_OFF: "Turn off",
CONF_DIM_UP: "Dim up",
CONF_DIM_DOWN: "Dim down",
CONF_LEFT: "Left",
CONF_RIGHT: "Right",
CONF_OPEN: "Open",
CONF_CLOSE: "Close",
CONF_BOTH_BUTTONS: "Both buttons",
CONF_TOP_BUTTONS: "Top buttons",
CONF_BOTTOM_BUTTONS: "Bottom buttons",
CONF_BUTTON_1: "Button 1",
CONF_BUTTON_2: "Button 2",
CONF_BUTTON_3: "Button 3",
CONF_BUTTON_4: "Button 4",
CONF_SIDE_1: "Side 1",
CONF_SIDE_2: "Side 2",
CONF_SIDE_3: "Side 3",
CONF_SIDE_4: "Side 4",
CONF_SIDE_5: "Side 5",
CONF_SIDE_6: "Side 6",
}
def _get_device_event_description(
modelid: str, event: int
) -> tuple[str | None, str | None]:
device_event_descriptions = REMOTES[modelid]
for event_type_tuple, event_dict in device_event_descriptions.items():
if event == event_dict.get(CONF_EVENT):
return event_type_tuple
if event == event_dict.get(CONF_GESTURE):
return event_type_tuple
return (None, None)
@callback
def async_describe_events(
hass: HomeAssistant,
async_describe_event: Callable[[str, str, Callable[[Event], dict[str, str]]], None],
) -> None:
device_registry = dr.async_get(hass)
@callback
def async_describe_deconz_alarm_event(event: Event) -> dict[str, str]:
device = device_registry.devices[event.data[ATTR_DEVICE_ID]]
deconz_alarm_event = _get_deconz_event_from_device(hass, device)
data = event.data[CONF_EVENT]
return {
"name": f"{deconz_alarm_event.device.name}",
"message": f"fired event '{data}'.",
}
@callback
def async_describe_deconz_event(event: Event) -> dict[str, str]:
device = device_registry.devices[event.data[ATTR_DEVICE_ID]]
deconz_event = _get_deconz_event_from_device(hass, device)
action = None
interface = None
data = event.data.get(CONF_EVENT) or event.data.get(CONF_GESTURE, "")
if data and deconz_event.device.model_id in REMOTES:
action, interface = _get_device_event_description(
deconz_event.device.model_id, data
)
if not data:
return {
"name": f"{deconz_event.device.name}",
"message": "fired an unknown event.",
}
if not action:
return {
"name": f"{deconz_event.device.name}",
"message": f"fired event '{data}'.",
}
if not interface:
return {
"name": f"{deconz_event.device.name}",
"message": f"fired event '{ACTIONS[action]}'.",
}
return {
"name": f"{deconz_event.device.name}",
"message": f"'{ACTIONS[action]}' event for '{INTERFACES[interface]}' was fired.",
}
async_describe_event(
DECONZ_DOMAIN, CONF_DECONZ_ALARM_EVENT, async_describe_deconz_alarm_event
)
async_describe_event(DECONZ_DOMAIN, CONF_DECONZ_EVENT, async_describe_deconz_event)
| true
| true
|
1c41ff0f1e9374a337b72a555782b1b0a7bc7a72
| 2,269
|
py
|
Python
|
growler/responder.py
|
pyGrowler/Growler
|
5492466d8828115bb04c665917d6aeb4f4323f44
|
[
"Apache-2.0"
] | 806
|
2015-02-18T18:54:40.000Z
|
2021-12-28T20:14:13.000Z
|
growler/responder.py
|
akubera/Growler
|
5492466d8828115bb04c665917d6aeb4f4323f44
|
[
"Apache-2.0"
] | 16
|
2016-05-05T08:32:36.000Z
|
2020-03-08T08:01:43.000Z
|
growler/responder.py
|
pyGrowler/Growler
|
5492466d8828115bb04c665917d6aeb4f4323f44
|
[
"Apache-2.0"
] | 36
|
2015-08-02T11:52:02.000Z
|
2020-05-09T15:25:29.000Z
|
#
# growler/responder.py
#
"""
Event loop independent class for managing clients' requests and
server responses.
"""
from typing import Optional
from asyncio import BaseTransport
from socket import socket as Socket
from abc import ABC, abstractmethod
class GrowlerResponder(ABC):
"""
Abstract base class for 'responder' objects that handle the
stream of client data.
Responders are designed to be event-loop independent, so
applications may change backend without lots of effort.
Unfortunately, this means that responders should NOT use
constructs provided by specific libraries (such as asyncio) and
instead try to use as much from standard python as they can.
"""
@abstractmethod
def on_data(self, data):
raise NotImplementedError()
class CoroutineResponder(GrowlerResponder):
"""
Special responder object that will 'send' data to a coroutine
object for processing.
"""
def __init__(self, coro):
self._coro = coro
def on_data(self, data):
self._coro.send(data)
class ResponderHandler:
"""
A common interface for classes that handle GrowlerResponder
objects.
The default implementation is the protocol object found in
growler.aio.protocol.
"""
__slots__ = (
'transport',
)
transport: Optional[BaseTransport]
@property
def socket(self) -> Optional[Socket]:
return (self.transport.get_extra_info('socket')
if self.transport is not None
else None)
@property
def peername(self):
return (self.transport.get_extra_info('peername')
if self.transport is not None
else None)
@property
def cipher(self):
return (self.transport.get_extra_info('cipher')
if self.transport is not None
else None)
@property
def remote_hostname(self):
return (self.peername[0]
if self.transport is not None
else None)
@property
def remote_port(self):
return (self.peername[1]
if self.transport is not None
else None)
# clean namespace
del ABC
del abstractmethod
del BaseTransport
del Optional
del Socket
| 23.635417
| 67
| 0.655355
|
from typing import Optional
from asyncio import BaseTransport
from socket import socket as Socket
from abc import ABC, abstractmethod
class GrowlerResponder(ABC):
@abstractmethod
def on_data(self, data):
raise NotImplementedError()
class CoroutineResponder(GrowlerResponder):
def __init__(self, coro):
self._coro = coro
def on_data(self, data):
self._coro.send(data)
class ResponderHandler:
__slots__ = (
'transport',
)
transport: Optional[BaseTransport]
@property
def socket(self) -> Optional[Socket]:
return (self.transport.get_extra_info('socket')
if self.transport is not None
else None)
@property
def peername(self):
return (self.transport.get_extra_info('peername')
if self.transport is not None
else None)
@property
def cipher(self):
return (self.transport.get_extra_info('cipher')
if self.transport is not None
else None)
@property
def remote_hostname(self):
return (self.peername[0]
if self.transport is not None
else None)
@property
def remote_port(self):
return (self.peername[1]
if self.transport is not None
else None)
del ABC
del abstractmethod
del BaseTransport
del Optional
del Socket
| true
| true
|
1c41ffc484f89a80d534cd8c6d48239d3916eb00
| 6,420
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/storage/get_object_replication_policy.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_nextgen/storage/get_object_replication_policy.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_nextgen/storage/get_object_replication_policy.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'GetObjectReplicationPolicyResult',
'AwaitableGetObjectReplicationPolicyResult',
'get_object_replication_policy',
]
@pulumi.output_type
class GetObjectReplicationPolicyResult:
"""
The replication policy between two storage accounts. Multiple rules can be defined in one policy.
"""
def __init__(__self__, destination_account=None, enabled_time=None, id=None, name=None, policy_id=None, rules=None, source_account=None, type=None):
if destination_account and not isinstance(destination_account, str):
raise TypeError("Expected argument 'destination_account' to be a str")
pulumi.set(__self__, "destination_account", destination_account)
if enabled_time and not isinstance(enabled_time, str):
raise TypeError("Expected argument 'enabled_time' to be a str")
pulumi.set(__self__, "enabled_time", enabled_time)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if policy_id and not isinstance(policy_id, str):
raise TypeError("Expected argument 'policy_id' to be a str")
pulumi.set(__self__, "policy_id", policy_id)
if rules and not isinstance(rules, list):
raise TypeError("Expected argument 'rules' to be a list")
pulumi.set(__self__, "rules", rules)
if source_account and not isinstance(source_account, str):
raise TypeError("Expected argument 'source_account' to be a str")
pulumi.set(__self__, "source_account", source_account)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="destinationAccount")
def destination_account(self) -> str:
"""
Required. Destination account name.
"""
return pulumi.get(self, "destination_account")
@property
@pulumi.getter(name="enabledTime")
def enabled_time(self) -> str:
"""
Indicates when the policy is enabled on the source account.
"""
return pulumi.get(self, "enabled_time")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> str:
"""
A unique id for object replication policy.
"""
return pulumi.get(self, "policy_id")
@property
@pulumi.getter
def rules(self) -> Optional[Sequence['outputs.ObjectReplicationPolicyRuleResponse']]:
"""
The storage account object replication rules.
"""
return pulumi.get(self, "rules")
@property
@pulumi.getter(name="sourceAccount")
def source_account(self) -> str:
"""
Required. Source account name.
"""
return pulumi.get(self, "source_account")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetObjectReplicationPolicyResult(GetObjectReplicationPolicyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetObjectReplicationPolicyResult(
destination_account=self.destination_account,
enabled_time=self.enabled_time,
id=self.id,
name=self.name,
policy_id=self.policy_id,
rules=self.rules,
source_account=self.source_account,
type=self.type)
def get_object_replication_policy(account_name: Optional[str] = None,
object_replication_policy_id: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetObjectReplicationPolicyResult:
"""
The replication policy between two storage accounts. Multiple rules can be defined in one policy.
API Version: 2021-01-01.
:param str account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
:param str object_replication_policy_id: The ID of object replication policy or 'default' if the policy ID is unknown.
:param str resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
"""
__args__ = dict()
__args__['accountName'] = account_name
__args__['objectReplicationPolicyId'] = object_replication_policy_id
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:storage:getObjectReplicationPolicy', __args__, opts=opts, typ=GetObjectReplicationPolicyResult).value
return AwaitableGetObjectReplicationPolicyResult(
destination_account=__ret__.destination_account,
enabled_time=__ret__.enabled_time,
id=__ret__.id,
name=__ret__.name,
policy_id=__ret__.policy_id,
rules=__ret__.rules,
source_account=__ret__.source_account,
type=__ret__.type)
| 39.62963
| 210
| 0.668224
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'GetObjectReplicationPolicyResult',
'AwaitableGetObjectReplicationPolicyResult',
'get_object_replication_policy',
]
@pulumi.output_type
class GetObjectReplicationPolicyResult:
def __init__(__self__, destination_account=None, enabled_time=None, id=None, name=None, policy_id=None, rules=None, source_account=None, type=None):
if destination_account and not isinstance(destination_account, str):
raise TypeError("Expected argument 'destination_account' to be a str")
pulumi.set(__self__, "destination_account", destination_account)
if enabled_time and not isinstance(enabled_time, str):
raise TypeError("Expected argument 'enabled_time' to be a str")
pulumi.set(__self__, "enabled_time", enabled_time)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if policy_id and not isinstance(policy_id, str):
raise TypeError("Expected argument 'policy_id' to be a str")
pulumi.set(__self__, "policy_id", policy_id)
if rules and not isinstance(rules, list):
raise TypeError("Expected argument 'rules' to be a list")
pulumi.set(__self__, "rules", rules)
if source_account and not isinstance(source_account, str):
raise TypeError("Expected argument 'source_account' to be a str")
pulumi.set(__self__, "source_account", source_account)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="destinationAccount")
def destination_account(self) -> str:
return pulumi.get(self, "destination_account")
@property
@pulumi.getter(name="enabledTime")
def enabled_time(self) -> str:
return pulumi.get(self, "enabled_time")
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> str:
return pulumi.get(self, "policy_id")
@property
@pulumi.getter
def rules(self) -> Optional[Sequence['outputs.ObjectReplicationPolicyRuleResponse']]:
return pulumi.get(self, "rules")
@property
@pulumi.getter(name="sourceAccount")
def source_account(self) -> str:
return pulumi.get(self, "source_account")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
class AwaitableGetObjectReplicationPolicyResult(GetObjectReplicationPolicyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetObjectReplicationPolicyResult(
destination_account=self.destination_account,
enabled_time=self.enabled_time,
id=self.id,
name=self.name,
policy_id=self.policy_id,
rules=self.rules,
source_account=self.source_account,
type=self.type)
def get_object_replication_policy(account_name: Optional[str] = None,
object_replication_policy_id: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetObjectReplicationPolicyResult:
__args__ = dict()
__args__['accountName'] = account_name
__args__['objectReplicationPolicyId'] = object_replication_policy_id
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:storage:getObjectReplicationPolicy', __args__, opts=opts, typ=GetObjectReplicationPolicyResult).value
return AwaitableGetObjectReplicationPolicyResult(
destination_account=__ret__.destination_account,
enabled_time=__ret__.enabled_time,
id=__ret__.id,
name=__ret__.name,
policy_id=__ret__.policy_id,
rules=__ret__.rules,
source_account=__ret__.source_account,
type=__ret__.type)
| true
| true
|
1c420085b055ce7cdac960f6e45563c43bc3b205
| 5,881
|
py
|
Python
|
nemo_cmd/deflate.py
|
SalishSeaCast/NEMO-Cmd
|
a1fb05c4430e152a7dae57296bce364f73752129
|
[
"Apache-2.0"
] | 1
|
2020-03-26T16:42:26.000Z
|
2020-03-26T16:42:26.000Z
|
nemo_cmd/deflate.py
|
SalishSeaCast/NEMO-Cmd
|
a1fb05c4430e152a7dae57296bce364f73752129
|
[
"Apache-2.0"
] | 10
|
2020-03-23T21:19:25.000Z
|
2021-11-01T22:12:17.000Z
|
nemo_cmd/deflate.py
|
SalishSeaCast/NEMO-Cmd
|
a1fb05c4430e152a7dae57296bce364f73752129
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013-2021 The Salish Sea MEOPAR Contributors
# and The University of British Columbia
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NEMO-Cmd command plug-in for deflate sub-command.
Deflate variables in netCDF files using Lempel-Ziv compression.
"""
import logging
import math
import multiprocessing
from pathlib import Path
import shlex
import subprocess
import time
import attr
import cliff.command
logger = logging.getLogger(__name__)
class Deflate(cliff.command.Command):
"""Deflate variables in netCDF files using Lempel-Ziv compression."""
def get_parser(self, prog_name):
parser = super(Deflate, self).get_parser(prog_name)
parser.description = """
Deflate variables in netCDF files using Lempel-Ziv compression.
Converts files to netCDF-4 format.
The deflated file replaces the original file.
This command is effectively the same as running
ncks -4 -L -O FILEPATH FILEPATH
for each FILEPATH.
"""
parser.add_argument(
"filepaths",
nargs="+",
type=Path,
metavar="FILEPATH",
help="Path/name of file to be deflated.",
)
parser.add_argument(
"-j",
"--jobs",
type=int,
default=math.floor(multiprocessing.cpu_count() / 2),
help=(
"Maximum number of concurrent deflation processes allowed. "
"Defaults to 1/2 the number of cores detected."
),
)
return parser
def take_action(self, parsed_args):
"""Execute the :command:`nemo deflate` sub-command.
Deflate variables in netCDF files using Lempel-Ziv compression.
Converts files to netCDF-4 format.
The deflated file replaces the original file.
This command is effectively the same as
:command:`ncks -4 -L -O filename filename`.
"""
deflate(parsed_args.filepaths, parsed_args.jobs)
@attr.s
class DeflateJob(object):
"""netCDF file deflation job."""
#: Path/name of the netCDF file to deflate.
filepath = attr.ib()
#: Lempel-Ziv compression level to use.
dfl_lvl = attr.ib(default=4)
#: Deflation job subprocess object.
process = attr.ib(default=None)
#: Deflation job process PID.
pid = attr.ib(default=None)
#: Deflation job process return code.
returncode = attr.ib(default=None)
def start(self):
"""Start the deflation job in a subprocess.
Cache the subprocess object and its process id as job attributes.
"""
cmd = "nccopy -s -4 -d{0.dfl_lvl} {0.filepath} {0.filepath}.nccopy.tmp".format(
self
)
self.process = subprocess.Popen(
shlex.split(cmd),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
)
self.pid = self.process.pid
logger.debug("deflating {0.filepath} in process {0.pid}".format(self))
@property
def done(self):
"""Return a boolean indicating whether or not the job has finished.
Cache the subprocess return code as a job attribute.
"""
finished = False
self.returncode = self.process.poll()
if self.returncode is not None:
if self.returncode == 0:
Path("{0.filepath}.nccopy.tmp".format(self)).rename(self.filepath)
finished = True
logger.debug(
"deflating {0.filepath} finished "
"with return code {0.returncode}".format(self)
)
return finished
def deflate(filepaths, max_concurrent_jobs):
"""Deflate variables in each of the netCDF files in filepaths using
Lempel-Ziv compression.
Converts files to netCDF-4 format.
The deflated file replaces the original file.
:param sequence filepaths: Paths/names of files to be deflated.
:param int max_concurrent_jobs: Maximum number of concurrent deflation
processes allowed.
"""
logger.info(
"Deflating in up to {} concurrent sub-processes".format(
int(max_concurrent_jobs)
)
)
jobs = [DeflateJob(fp) for fp in filepaths if fp.exists()]
jobs_in_progress = _launch_initial_jobs(jobs, max_concurrent_jobs)
while jobs or jobs_in_progress:
time.sleep(1)
_poll_and_launch(jobs, jobs_in_progress)
def _launch_initial_jobs(jobs, max_concurrent_jobs):
jobs_in_progress = {}
for process in range(int(max_concurrent_jobs)):
try:
job = jobs.pop(0)
except IndexError:
break
else:
job.start()
jobs_in_progress[job.pid] = job
return jobs_in_progress
def _poll_and_launch(jobs, jobs_in_progress):
for running_job in jobs_in_progress.copy().values():
if running_job.done:
result, _ = running_job.process.communicate()
logger.error(result) if result else logger.info(
"netCDF4 deflated {.filepath}".format(running_job)
)
jobs_in_progress.pop(running_job.pid)
try:
job = jobs.pop(0)
except IndexError:
continue
else:
job.start()
jobs_in_progress[job.pid] = job
| 32.672222
| 87
| 0.631185
|
import logging
import math
import multiprocessing
from pathlib import Path
import shlex
import subprocess
import time
import attr
import cliff.command
logger = logging.getLogger(__name__)
class Deflate(cliff.command.Command):
def get_parser(self, prog_name):
parser = super(Deflate, self).get_parser(prog_name)
parser.description = """
Deflate variables in netCDF files using Lempel-Ziv compression.
Converts files to netCDF-4 format.
The deflated file replaces the original file.
This command is effectively the same as running
ncks -4 -L -O FILEPATH FILEPATH
for each FILEPATH.
"""
parser.add_argument(
"filepaths",
nargs="+",
type=Path,
metavar="FILEPATH",
help="Path/name of file to be deflated.",
)
parser.add_argument(
"-j",
"--jobs",
type=int,
default=math.floor(multiprocessing.cpu_count() / 2),
help=(
"Maximum number of concurrent deflation processes allowed. "
"Defaults to 1/2 the number of cores detected."
),
)
return parser
def take_action(self, parsed_args):
deflate(parsed_args.filepaths, parsed_args.jobs)
@attr.s
class DeflateJob(object):
filepath = attr.ib()
dfl_lvl = attr.ib(default=4)
process = attr.ib(default=None)
pid = attr.ib(default=None)
returncode = attr.ib(default=None)
def start(self):
cmd = "nccopy -s -4 -d{0.dfl_lvl} {0.filepath} {0.filepath}.nccopy.tmp".format(
self
)
self.process = subprocess.Popen(
shlex.split(cmd),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
)
self.pid = self.process.pid
logger.debug("deflating {0.filepath} in process {0.pid}".format(self))
@property
def done(self):
finished = False
self.returncode = self.process.poll()
if self.returncode is not None:
if self.returncode == 0:
Path("{0.filepath}.nccopy.tmp".format(self)).rename(self.filepath)
finished = True
logger.debug(
"deflating {0.filepath} finished "
"with return code {0.returncode}".format(self)
)
return finished
def deflate(filepaths, max_concurrent_jobs):
logger.info(
"Deflating in up to {} concurrent sub-processes".format(
int(max_concurrent_jobs)
)
)
jobs = [DeflateJob(fp) for fp in filepaths if fp.exists()]
jobs_in_progress = _launch_initial_jobs(jobs, max_concurrent_jobs)
while jobs or jobs_in_progress:
time.sleep(1)
_poll_and_launch(jobs, jobs_in_progress)
def _launch_initial_jobs(jobs, max_concurrent_jobs):
jobs_in_progress = {}
for process in range(int(max_concurrent_jobs)):
try:
job = jobs.pop(0)
except IndexError:
break
else:
job.start()
jobs_in_progress[job.pid] = job
return jobs_in_progress
def _poll_and_launch(jobs, jobs_in_progress):
for running_job in jobs_in_progress.copy().values():
if running_job.done:
result, _ = running_job.process.communicate()
logger.error(result) if result else logger.info(
"netCDF4 deflated {.filepath}".format(running_job)
)
jobs_in_progress.pop(running_job.pid)
try:
job = jobs.pop(0)
except IndexError:
continue
else:
job.start()
jobs_in_progress[job.pid] = job
| true
| true
|
1c42009c0a71aced9a6872f98bae003b20e2d1ac
| 10,039
|
py
|
Python
|
hive/db/adapter.py
|
abitmore/hivemind
|
a68d8dd49d5d79caccf988a1ff6cba4703adae49
|
[
"MIT"
] | 45
|
2021-05-23T21:06:49.000Z
|
2022-03-22T23:04:25.000Z
|
hive/db/adapter.py
|
abitmore/hivemind
|
a68d8dd49d5d79caccf988a1ff6cba4703adae49
|
[
"MIT"
] | 1
|
2021-08-03T10:57:23.000Z
|
2021-08-03T10:58:05.000Z
|
hive/db/adapter.py
|
abitmore/hivemind
|
a68d8dd49d5d79caccf988a1ff6cba4703adae49
|
[
"MIT"
] | 30
|
2021-05-28T16:23:53.000Z
|
2021-09-09T00:28:34.000Z
|
"""Wrapper for sqlalchemy, providing a simple interface."""
import logging
from time import perf_counter as perf
from collections import OrderedDict
from funcy.seqs import first
import sqlalchemy
import os
from hive.utils.stats import Stats
from hive.db.autoexplain_controller import AutoExplainWrapper
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
log = logging.getLogger(__name__)
class Db:
"""RDBMS adapter for hive. Handles connecting and querying."""
_instance = None
#maximum number of connections that is required so as to execute some tasks concurrently
necessary_connections = 15
max_connections = 1
@classmethod
def instance(cls):
"""Get the shared instance."""
assert cls._instance, 'set_shared_instance was never called'
return cls._instance
@classmethod
def set_shared_instance(cls, db):
"""Set the global/shared db instance. Do not use."""
cls._instance = db
@classmethod
def set_max_connections(cls, db):
"""Remember maximum connections offered by postgres database."""
assert db is not None, "Database has to be initialized"
cls.max_connections = db.query_one("SELECT setting::int FROM pg_settings WHERE name = 'max_connections'")
if cls.necessary_connections > cls.max_connections:
log.info("A database offers only {} connections, but it's required {} connections".format(cls.max_connections, cls.necessary_connections))
else:
log.info("A database offers maximum connections: {}. Required {} connections.".format(cls.max_connections, cls.necessary_connections))
def __init__(self, url, name, enable_autoexplain = False):
"""Initialize an instance.
No work is performed here. Some modues might initialize an
instance before config is loaded.
"""
assert url, ('--database-url (or DATABASE_URL env) not specified; '
'e.g. postgresql://user:pass@localhost:5432/hive')
self._url = url
self._conn = []
self._engine = None
self._trx_active = False
self._prep_sql = {}
self.name = name
self._conn.append( { "connection" : self.engine().connect(), "name" : name } )
# Since we need to manage transactions ourselves, yet the
# core behavior of DBAPI (per PEP-0249) is that a transaction
# is always in progress, this COMMIT is a workaround to get
# back control (and used with autocommit=False query exec).
self._basic_connection = self.get_connection(0)
self._basic_connection.execute(sqlalchemy.text("COMMIT"))
self.__autoexplain = None;
if enable_autoexplain:
self.__autoexplain = AutoExplainWrapper( self )
def clone(self, name):
cloned = Db(self._url, name, self.__autoexplain)
cloned._engine = self._engine
return cloned
def close(self):
"""Close connection."""
try:
for item in self._conn:
if item is not None:
log.info("Closing database connection: '{}'".format(item['name']))
item['connection'].close()
item = None
self._conn = []
except Exception as ex:
log.exception("Error during connections closing: {}".format(ex))
raise ex
def close_engine(self):
"""Dispose db instance."""
try:
if self._engine is not None:
log.info("Disposing SQL engine")
self._engine.dispose()
self._engine = None
else:
log.info("SQL engine was already disposed")
except Exception as ex:
log.exception("Error during database closing: {}".format(ex))
raise ex
def get_connection(self, number):
assert len(self._conn) > number, "Incorrect number of connection. total: {} number: {}".format(len(self._conn), number)
assert 'connection' in self._conn[number], 'Incorrect construction of db connection'
return self._conn[number]['connection']
def engine(self):
"""Lazy-loaded SQLAlchemy engine."""
if self._engine is None:
self._engine = sqlalchemy.create_engine(
self._url,
isolation_level="READ UNCOMMITTED", # only supported in mysql
pool_size=self.max_connections,
pool_recycle=3600,
echo=False)
return self._engine
def get_new_connection(self, name):
self._conn.append( { "connection" : self.engine().connect(), "name" : name } )
return self.get_connection(len(self._conn) - 1)
def get_dialect(self):
return self.get_connection(0).dialect
def is_trx_active(self):
"""Check if a transaction is in progress."""
return self._trx_active
def explain(self):
if self.__autoexplain:
return self.__autoexplain;
return self;
def query(self, sql, **kwargs):
"""Perform a (*non-`SELECT`*) write query."""
# if prepared tuple, unpack
if isinstance(sql, tuple):
assert not kwargs
assert isinstance(sql[0], str)
assert isinstance(sql[1], dict)
sql, kwargs = sql
# this method is reserved for anything but SELECT
assert self._is_write_query(sql), sql
return self._query(sql, False, **kwargs)
def query_prepared(self, sql, **kwargs):
self._query(sql, True, **kwargs)
def query_no_return(self, sql, **kwargs):
self._query(sql, False, **kwargs)
def query_all(self, sql, **kwargs):
"""Perform a `SELECT n*m`"""
res = self._query(sql, False, **kwargs)
return res.fetchall()
def query_row(self, sql, **kwargs):
"""Perform a `SELECT 1*m`"""
res = self._query(sql, False, **kwargs)
return first(res)
def query_col(self, sql, **kwargs):
"""Perform a `SELECT n*1`"""
res = self._query(sql, False, **kwargs).fetchall()
return [r[0] for r in res]
def query_one(self, sql, **kwargs):
"""Perform a `SELECT 1*1`"""
row = first(self._query(sql, False, **kwargs))
return first(row) if row else None
def engine_name(self):
"""Get the name of the engine (e.g. `postgresql`, `mysql`)."""
_engine_name = self.get_dialect().name
if _engine_name not in ['postgresql', 'mysql']:
raise Exception("db engine %s not supported" % _engine_name)
return _engine_name
def batch_queries(self, queries, trx):
"""Process batches of prepared SQL tuples.
If `trx` is true, the queries will be wrapped in a transaction.
The format of queries is `[(sql, {params*}), ...]`
"""
if trx:
self.query("START TRANSACTION")
for (sql, params) in queries:
self.query(sql, **params)
if trx:
self.query("COMMIT")
@staticmethod
def build_insert(table, values, pk=None):
"""Generates an INSERT statement w/ bindings."""
values = OrderedDict(values)
# Delete PK field if blank
if pk:
pks = [pk] if isinstance(pk, str) else pk
for key in pks:
if not values[key]:
del values[key]
fields = list(values.keys())
cols = ', '.join([k for k in fields])
params = ', '.join([':'+k for k in fields])
sql = "INSERT INTO %s (%s) VALUES (%s)"
sql = sql % (table, cols, params)
return (sql, values)
@staticmethod
def build_update(table, values, pk):
"""Generates an UPDATE statement w/ bindings."""
assert pk and isinstance(pk, (str, list))
pks = [pk] if isinstance(pk, str) else pk
values = OrderedDict(values)
fields = list(values.keys())
update = ', '.join([k+" = :"+k for k in fields if k not in pks])
where = ' AND '.join([k+" = :"+k for k in fields if k in pks])
sql = "UPDATE %s SET %s WHERE %s"
sql = sql % (table, update, where)
return (sql, values)
def _sql_text(self, sql, is_prepared):
# if sql in self._prep_sql:
# query = self._prep_sql[sql]
# else:
# query = sqlalchemy.text(sql).execution_options(autocommit=False)
# self._prep_sql[sql] = query
if is_prepared:
query = sql
else:
query = sqlalchemy.text(sql)
return query
def _query(self, sql, is_prepared, **kwargs):
"""Send a query off to SQLAlchemy."""
if sql == 'START TRANSACTION':
assert not self._trx_active
self._trx_active = True
elif sql == 'COMMIT':
assert self._trx_active
self._trx_active = False
try:
start = perf()
query = self._sql_text(sql, is_prepared)
if 'log_query' in kwargs and kwargs['log_query']:
log.info("QUERY: {}".format(query))
result = self._basic_connection.execution_options(autocommit=False).execute(query, **kwargs)
if 'log_result' in kwargs and kwargs['log_result']:
log.info("RESULT: {}".format(result))
Stats.log_db(sql, perf() - start)
return result
except Exception as e:
log.warning("[SQL-ERR] %s in query %s (%s)",
e.__class__.__name__, sql, kwargs)
raise e
@staticmethod
def _is_write_query(sql):
"""Check if `sql` is a DELETE, UPDATE, COMMIT, ALTER, etc."""
action = sql.strip()[0:6].strip()
if action == 'SELECT':
return False
if action in ['DELETE', 'UPDATE', 'INSERT', 'COMMIT', 'START',
'ALTER', 'TRUNCA', 'CREATE', 'DROP I', 'DROP T']:
return True
raise Exception("unknown action: {}".format(sql))
| 35.725979
| 148
| 0.589302
|
import logging
from time import perf_counter as perf
from collections import OrderedDict
from funcy.seqs import first
import sqlalchemy
import os
from hive.utils.stats import Stats
from hive.db.autoexplain_controller import AutoExplainWrapper
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
log = logging.getLogger(__name__)
class Db:
_instance = None
necessary_connections = 15
max_connections = 1
@classmethod
def instance(cls):
assert cls._instance, 'set_shared_instance was never called'
return cls._instance
@classmethod
def set_shared_instance(cls, db):
cls._instance = db
@classmethod
def set_max_connections(cls, db):
assert db is not None, "Database has to be initialized"
cls.max_connections = db.query_one("SELECT setting::int FROM pg_settings WHERE name = 'max_connections'")
if cls.necessary_connections > cls.max_connections:
log.info("A database offers only {} connections, but it's required {} connections".format(cls.max_connections, cls.necessary_connections))
else:
log.info("A database offers maximum connections: {}. Required {} connections.".format(cls.max_connections, cls.necessary_connections))
def __init__(self, url, name, enable_autoexplain = False):
assert url, ('--database-url (or DATABASE_URL env) not specified; '
'e.g. postgresql://user:pass@localhost:5432/hive')
self._url = url
self._conn = []
self._engine = None
self._trx_active = False
self._prep_sql = {}
self.name = name
self._conn.append( { "connection" : self.engine().connect(), "name" : name } )
# Since we need to manage transactions ourselves, yet the
# core behavior of DBAPI (per PEP-0249) is that a transaction
# is always in progress, this COMMIT is a workaround to get
# back control (and used with autocommit=False query exec).
self._basic_connection = self.get_connection(0)
self._basic_connection.execute(sqlalchemy.text("COMMIT"))
self.__autoexplain = None;
if enable_autoexplain:
self.__autoexplain = AutoExplainWrapper( self )
def clone(self, name):
cloned = Db(self._url, name, self.__autoexplain)
cloned._engine = self._engine
return cloned
def close(self):
try:
for item in self._conn:
if item is not None:
log.info("Closing database connection: '{}'".format(item['name']))
item['connection'].close()
item = None
self._conn = []
except Exception as ex:
log.exception("Error during connections closing: {}".format(ex))
raise ex
def close_engine(self):
try:
if self._engine is not None:
log.info("Disposing SQL engine")
self._engine.dispose()
self._engine = None
else:
log.info("SQL engine was already disposed")
except Exception as ex:
log.exception("Error during database closing: {}".format(ex))
raise ex
def get_connection(self, number):
assert len(self._conn) > number, "Incorrect number of connection. total: {} number: {}".format(len(self._conn), number)
assert 'connection' in self._conn[number], 'Incorrect construction of db connection'
return self._conn[number]['connection']
def engine(self):
if self._engine is None:
self._engine = sqlalchemy.create_engine(
self._url,
isolation_level="READ UNCOMMITTED", # only supported in mysql
pool_size=self.max_connections,
pool_recycle=3600,
echo=False)
return self._engine
def get_new_connection(self, name):
self._conn.append( { "connection" : self.engine().connect(), "name" : name } )
return self.get_connection(len(self._conn) - 1)
def get_dialect(self):
return self.get_connection(0).dialect
def is_trx_active(self):
return self._trx_active
def explain(self):
if self.__autoexplain:
return self.__autoexplain;
return self;
def query(self, sql, **kwargs):
# if prepared tuple, unpack
if isinstance(sql, tuple):
assert not kwargs
assert isinstance(sql[0], str)
assert isinstance(sql[1], dict)
sql, kwargs = sql
# this method is reserved for anything but SELECT
assert self._is_write_query(sql), sql
return self._query(sql, False, **kwargs)
def query_prepared(self, sql, **kwargs):
self._query(sql, True, **kwargs)
def query_no_return(self, sql, **kwargs):
self._query(sql, False, **kwargs)
def query_all(self, sql, **kwargs):
res = self._query(sql, False, **kwargs)
return res.fetchall()
def query_row(self, sql, **kwargs):
res = self._query(sql, False, **kwargs)
return first(res)
def query_col(self, sql, **kwargs):
res = self._query(sql, False, **kwargs).fetchall()
return [r[0] for r in res]
def query_one(self, sql, **kwargs):
row = first(self._query(sql, False, **kwargs))
return first(row) if row else None
def engine_name(self):
_engine_name = self.get_dialect().name
if _engine_name not in ['postgresql', 'mysql']:
raise Exception("db engine %s not supported" % _engine_name)
return _engine_name
def batch_queries(self, queries, trx):
if trx:
self.query("START TRANSACTION")
for (sql, params) in queries:
self.query(sql, **params)
if trx:
self.query("COMMIT")
@staticmethod
def build_insert(table, values, pk=None):
values = OrderedDict(values)
# Delete PK field if blank
if pk:
pks = [pk] if isinstance(pk, str) else pk
for key in pks:
if not values[key]:
del values[key]
fields = list(values.keys())
cols = ', '.join([k for k in fields])
params = ', '.join([':'+k for k in fields])
sql = "INSERT INTO %s (%s) VALUES (%s)"
sql = sql % (table, cols, params)
return (sql, values)
@staticmethod
def build_update(table, values, pk):
assert pk and isinstance(pk, (str, list))
pks = [pk] if isinstance(pk, str) else pk
values = OrderedDict(values)
fields = list(values.keys())
update = ', '.join([k+" = :"+k for k in fields if k not in pks])
where = ' AND '.join([k+" = :"+k for k in fields if k in pks])
sql = "UPDATE %s SET %s WHERE %s"
sql = sql % (table, update, where)
return (sql, values)
def _sql_text(self, sql, is_prepared):
# if sql in self._prep_sql:
# query = self._prep_sql[sql]
# else:
# query = sqlalchemy.text(sql).execution_options(autocommit=False)
# self._prep_sql[sql] = query
if is_prepared:
query = sql
else:
query = sqlalchemy.text(sql)
return query
def _query(self, sql, is_prepared, **kwargs):
if sql == 'START TRANSACTION':
assert not self._trx_active
self._trx_active = True
elif sql == 'COMMIT':
assert self._trx_active
self._trx_active = False
try:
start = perf()
query = self._sql_text(sql, is_prepared)
if 'log_query' in kwargs and kwargs['log_query']:
log.info("QUERY: {}".format(query))
result = self._basic_connection.execution_options(autocommit=False).execute(query, **kwargs)
if 'log_result' in kwargs and kwargs['log_result']:
log.info("RESULT: {}".format(result))
Stats.log_db(sql, perf() - start)
return result
except Exception as e:
log.warning("[SQL-ERR] %s in query %s (%s)",
e.__class__.__name__, sql, kwargs)
raise e
@staticmethod
def _is_write_query(sql):
action = sql.strip()[0:6].strip()
if action == 'SELECT':
return False
if action in ['DELETE', 'UPDATE', 'INSERT', 'COMMIT', 'START',
'ALTER', 'TRUNCA', 'CREATE', 'DROP I', 'DROP T']:
return True
raise Exception("unknown action: {}".format(sql))
| true
| true
|
1c4200fb9ca8139d5608caf502d469c5d182aee3
| 12,081
|
py
|
Python
|
hydra/_internal/instantiate/_instantiate2.py
|
dmitryvinn/hydra-1
|
4d22c5628787d6b3a8d2303a99f906a06a4bbb28
|
[
"MIT"
] | null | null | null |
hydra/_internal/instantiate/_instantiate2.py
|
dmitryvinn/hydra-1
|
4d22c5628787d6b3a8d2303a99f906a06a4bbb28
|
[
"MIT"
] | null | null | null |
hydra/_internal/instantiate/_instantiate2.py
|
dmitryvinn/hydra-1
|
4d22c5628787d6b3a8d2303a99f906a06a4bbb28
|
[
"MIT"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import copy
import functools
import sys
from enum import Enum
from typing import Any, Callable, Dict, List, Sequence, Tuple, Union
from omegaconf import OmegaConf, SCMode
from omegaconf._utils import is_structured_config
from hydra._internal.utils import _locate
from hydra.errors import InstantiationException
from hydra.types import ConvertMode, TargetConf
class _Keys(str, Enum):
"""Special keys in configs used by instantiate."""
TARGET = "_target_"
CONVERT = "_convert_"
RECURSIVE = "_recursive_"
ARGS = "_args_"
PARTIAL = "_partial_"
def _is_target(x: Any) -> bool:
if isinstance(x, dict):
return "_target_" in x
if OmegaConf.is_dict(x):
return "_target_" in x
return False
def _extract_pos_args(*input_args: Any, **kwargs: Any) -> Tuple[Any, Any]:
config_args = kwargs.pop(_Keys.ARGS, ())
output_args = config_args
if isinstance(config_args, Sequence):
if len(input_args) > 0:
output_args = input_args
else:
raise InstantiationException(
f"Unsupported _args_ type: {type(config_args).__name__}. value: {config_args}"
)
return output_args, kwargs
def _call_target(_target_: Callable, _partial_: bool, *args, **kwargs) -> Any: # type: ignore
"""Call target (type) with args and kwargs."""
try:
args, kwargs = _extract_pos_args(*args, **kwargs)
# detaching configs from parent.
# At this time, everything is resolved and the parent link can cause
# issues when serializing objects in some scenarios.
for arg in args:
if OmegaConf.is_config(arg):
arg._set_parent(None)
for v in kwargs.values():
if OmegaConf.is_config(v):
v._set_parent(None)
except Exception as e:
raise type(e)(
f"Error instantiating '{_convert_target_to_string(_target_)}' : {e}"
).with_traceback(sys.exc_info()[2])
try:
if _partial_:
return functools.partial(_target_, *args, **kwargs)
return _target_(*args, **kwargs)
except Exception as e:
raise InstantiationException(
f"Error instantiating '{_convert_target_to_string(_target_)}' : {repr(e)}"
) from e
def _convert_target_to_string(t: Any) -> Any:
if callable(t):
return f"{t.__module__}.{t.__qualname__}"
else:
return t
def _prepare_input_dict_or_list(d: Union[Dict[Any, Any], List[Any]]) -> Any:
res: Any
if isinstance(d, dict):
res = {}
for k, v in d.items():
if k == "_target_":
v = _convert_target_to_string(d["_target_"])
elif isinstance(v, (dict, list)):
v = _prepare_input_dict_or_list(v)
res[k] = v
elif isinstance(d, list):
res = []
for v in d:
if isinstance(v, (list, dict)):
v = _prepare_input_dict_or_list(v)
res.append(v)
else:
assert False
return res
def _resolve_target(
target: Union[str, type, Callable[..., Any]]
) -> Union[type, Callable[..., Any]]:
"""Resolve target string, type or callable into type or callable."""
if isinstance(target, str):
return _locate(target)
if isinstance(target, type):
return target
if callable(target):
return target
raise InstantiationException(
f"Unsupported target type: {type(target).__name__}. value: {target}"
)
def instantiate(config: Any, *args: Any, **kwargs: Any) -> Any:
"""
:param config: An config object describing what to call and what params to use.
In addition to the parameters, the config must contain:
_target_ : target class or callable name (str)
And may contain:
_args_: List-like of positional arguments to pass to the target
_recursive_: Construct nested objects as well (bool).
True by default.
may be overridden via a _recursive_ key in
the kwargs
_convert_: Conversion strategy
none : Passed objects are DictConfig and ListConfig, default
partial : Passed objects are converted to dict and list, with
the exception of Structured Configs (and their fields).
all : Passed objects are dicts, lists and primitives without
a trace of OmegaConf containers
_partial_: If True, return functools.partial wrapped method or object
False by default. Configure per target.
:param args: Optional positional parameters pass-through
:param kwargs: Optional named parameters to override
parameters in the config object. Parameters not present
in the config objects are being passed as is to the target.
IMPORTANT: dataclasses instances in kwargs are interpreted as config
and cannot be used as passthrough
:return: if _target_ is a class name: the instantiated object
if _target_ is a callable: the return value of the call
"""
# Return None if config is None
if config is None:
return None
# TargetConf edge case
if isinstance(config, TargetConf) and config._target_ == "???":
# Specific check to give a good warning about failure to annotate _target_ as a string.
raise InstantiationException(
f"Missing value for {type(config).__name__}._target_. Check that it's properly annotated and overridden."
f"\nA common problem is forgetting to annotate _target_ as a string : '_target_: str = ...'"
)
if isinstance(config, (dict, list)):
config = _prepare_input_dict_or_list(config)
kwargs = _prepare_input_dict_or_list(kwargs)
# Structured Config always converted first to OmegaConf
if is_structured_config(config) or isinstance(config, (dict, list)):
config = OmegaConf.structured(config, flags={"allow_objects": True})
if OmegaConf.is_dict(config):
# Finalize config (convert targets to strings, merge with kwargs)
config_copy = copy.deepcopy(config)
config_copy._set_flag(
flags=["allow_objects", "struct", "readonly"], values=[True, False, False]
)
config_copy._set_parent(config._get_parent())
config = config_copy
if kwargs:
config = OmegaConf.merge(config, kwargs)
OmegaConf.resolve(config)
_recursive_ = config.pop(_Keys.RECURSIVE, True)
_convert_ = config.pop(_Keys.CONVERT, ConvertMode.NONE)
_partial_ = config.pop(_Keys.PARTIAL, False)
return instantiate_node(
config, *args, recursive=_recursive_, convert=_convert_, partial=_partial_
)
elif OmegaConf.is_list(config):
# Finalize config (convert targets to strings, merge with kwargs)
config_copy = copy.deepcopy(config)
config_copy._set_flag(
flags=["allow_objects", "struct", "readonly"], values=[True, False, False]
)
config_copy._set_parent(config._get_parent())
config = config_copy
OmegaConf.resolve(config)
_recursive_ = kwargs.pop(_Keys.RECURSIVE, True)
_convert_ = kwargs.pop(_Keys.CONVERT, ConvertMode.NONE)
_partial_ = kwargs.pop(_Keys.PARTIAL, False)
if _partial_:
raise InstantiationException(
"The _partial_ keyword is not compatible with top-level list instantiation"
)
return instantiate_node(
config, *args, recursive=_recursive_, convert=_convert_, partial=_partial_
)
else:
raise InstantiationException(
"Top level config has to be OmegaConf DictConfig/ListConfig, "
+ "plain dict/list, or a Structured Config class or instance."
)
def _convert_node(node: Any, convert: Union[ConvertMode, str]) -> Any:
if OmegaConf.is_config(node):
if convert == ConvertMode.ALL:
node = OmegaConf.to_container(node, resolve=True)
elif convert == ConvertMode.PARTIAL:
node = OmegaConf.to_container(
node, resolve=True, structured_config_mode=SCMode.DICT_CONFIG
)
return node
def instantiate_node(
node: Any,
*args: Any,
convert: Union[str, ConvertMode] = ConvertMode.NONE,
recursive: bool = True,
partial: bool = False,
) -> Any:
# Return None if config is None
if node is None or (OmegaConf.is_config(node) and node._is_none()):
return None
if not OmegaConf.is_config(node):
return node
# Override parent modes from config if specified
if OmegaConf.is_dict(node):
# using getitem instead of get(key, default) because OmegaConf will raise an exception
# if the key type is incompatible on get.
convert = node[_Keys.CONVERT] if _Keys.CONVERT in node else convert
recursive = node[_Keys.RECURSIVE] if _Keys.RECURSIVE in node else recursive
partial = node[_Keys.PARTIAL] if _Keys.PARTIAL in node else partial
if not isinstance(recursive, bool):
raise TypeError(f"_recursive_ flag must be a bool, got {type(recursive)}")
if not isinstance(partial, bool):
raise TypeError(f"_partial_ flag must be a bool, got {type( partial )}")
# If OmegaConf list, create new list of instances if recursive
if OmegaConf.is_list(node):
items = [
instantiate_node(item, convert=convert, recursive=recursive)
for item in node._iter_ex(resolve=True)
]
if convert in (ConvertMode.ALL, ConvertMode.PARTIAL):
# If ALL or PARTIAL, use plain list as container
return items
else:
# Otherwise, use ListConfig as container
lst = OmegaConf.create(items, flags={"allow_objects": True})
lst._set_parent(node)
return lst
elif OmegaConf.is_dict(node):
exclude_keys = set({"_target_", "_convert_", "_recursive_", "_partial_"})
if _is_target(node):
_target_ = _resolve_target(node.get(_Keys.TARGET))
kwargs = {}
for key, value in node.items():
if key not in exclude_keys:
if recursive:
value = instantiate_node(
value, convert=convert, recursive=recursive
)
kwargs[key] = _convert_node(value, convert)
return _call_target(_target_, partial, *args, **kwargs)
else:
# If ALL or PARTIAL non structured, instantiate in dict and resolve interpolations eagerly.
if convert == ConvertMode.ALL or (
convert == ConvertMode.PARTIAL and node._metadata.object_type is None
):
dict_items = {}
for key, value in node.items():
# list items inherits recursive flag from the containing dict.
dict_items[key] = instantiate_node(
value, convert=convert, recursive=recursive
)
return dict_items
else:
# Otherwise use DictConfig and resolve interpolations lazily.
cfg = OmegaConf.create({}, flags={"allow_objects": True})
for key, value in node.items():
cfg[key] = instantiate_node(
value, convert=convert, recursive=recursive
)
cfg._set_parent(node)
cfg._metadata.object_type = node._metadata.object_type
return cfg
else:
assert False, f"Unexpected config type : {type(node).__name__}"
| 38.231013
| 117
| 0.612201
|
import copy
import functools
import sys
from enum import Enum
from typing import Any, Callable, Dict, List, Sequence, Tuple, Union
from omegaconf import OmegaConf, SCMode
from omegaconf._utils import is_structured_config
from hydra._internal.utils import _locate
from hydra.errors import InstantiationException
from hydra.types import ConvertMode, TargetConf
class _Keys(str, Enum):
TARGET = "_target_"
CONVERT = "_convert_"
RECURSIVE = "_recursive_"
ARGS = "_args_"
PARTIAL = "_partial_"
def _is_target(x: Any) -> bool:
if isinstance(x, dict):
return "_target_" in x
if OmegaConf.is_dict(x):
return "_target_" in x
return False
def _extract_pos_args(*input_args: Any, **kwargs: Any) -> Tuple[Any, Any]:
config_args = kwargs.pop(_Keys.ARGS, ())
output_args = config_args
if isinstance(config_args, Sequence):
if len(input_args) > 0:
output_args = input_args
else:
raise InstantiationException(
f"Unsupported _args_ type: {type(config_args).__name__}. value: {config_args}"
)
return output_args, kwargs
def _call_target(_target_: Callable, _partial_: bool, *args, **kwargs) -> Any:
try:
args, kwargs = _extract_pos_args(*args, **kwargs)
for arg in args:
if OmegaConf.is_config(arg):
arg._set_parent(None)
for v in kwargs.values():
if OmegaConf.is_config(v):
v._set_parent(None)
except Exception as e:
raise type(e)(
f"Error instantiating '{_convert_target_to_string(_target_)}' : {e}"
).with_traceback(sys.exc_info()[2])
try:
if _partial_:
return functools.partial(_target_, *args, **kwargs)
return _target_(*args, **kwargs)
except Exception as e:
raise InstantiationException(
f"Error instantiating '{_convert_target_to_string(_target_)}' : {repr(e)}"
) from e
def _convert_target_to_string(t: Any) -> Any:
if callable(t):
return f"{t.__module__}.{t.__qualname__}"
else:
return t
def _prepare_input_dict_or_list(d: Union[Dict[Any, Any], List[Any]]) -> Any:
res: Any
if isinstance(d, dict):
res = {}
for k, v in d.items():
if k == "_target_":
v = _convert_target_to_string(d["_target_"])
elif isinstance(v, (dict, list)):
v = _prepare_input_dict_or_list(v)
res[k] = v
elif isinstance(d, list):
res = []
for v in d:
if isinstance(v, (list, dict)):
v = _prepare_input_dict_or_list(v)
res.append(v)
else:
assert False
return res
def _resolve_target(
target: Union[str, type, Callable[..., Any]]
) -> Union[type, Callable[..., Any]]:
if isinstance(target, str):
return _locate(target)
if isinstance(target, type):
return target
if callable(target):
return target
raise InstantiationException(
f"Unsupported target type: {type(target).__name__}. value: {target}"
)
def instantiate(config: Any, *args: Any, **kwargs: Any) -> Any:
if config is None:
return None
if isinstance(config, TargetConf) and config._target_ == "???":
raise InstantiationException(
f"Missing value for {type(config).__name__}._target_. Check that it's properly annotated and overridden."
f"\nA common problem is forgetting to annotate _target_ as a string : '_target_: str = ...'"
)
if isinstance(config, (dict, list)):
config = _prepare_input_dict_or_list(config)
kwargs = _prepare_input_dict_or_list(kwargs)
# Structured Config always converted first to OmegaConf
if is_structured_config(config) or isinstance(config, (dict, list)):
config = OmegaConf.structured(config, flags={"allow_objects": True})
if OmegaConf.is_dict(config):
# Finalize config (convert targets to strings, merge with kwargs)
config_copy = copy.deepcopy(config)
config_copy._set_flag(
flags=["allow_objects", "struct", "readonly"], values=[True, False, False]
)
config_copy._set_parent(config._get_parent())
config = config_copy
if kwargs:
config = OmegaConf.merge(config, kwargs)
OmegaConf.resolve(config)
_recursive_ = config.pop(_Keys.RECURSIVE, True)
_convert_ = config.pop(_Keys.CONVERT, ConvertMode.NONE)
_partial_ = config.pop(_Keys.PARTIAL, False)
return instantiate_node(
config, *args, recursive=_recursive_, convert=_convert_, partial=_partial_
)
elif OmegaConf.is_list(config):
# Finalize config (convert targets to strings, merge with kwargs)
config_copy = copy.deepcopy(config)
config_copy._set_flag(
flags=["allow_objects", "struct", "readonly"], values=[True, False, False]
)
config_copy._set_parent(config._get_parent())
config = config_copy
OmegaConf.resolve(config)
_recursive_ = kwargs.pop(_Keys.RECURSIVE, True)
_convert_ = kwargs.pop(_Keys.CONVERT, ConvertMode.NONE)
_partial_ = kwargs.pop(_Keys.PARTIAL, False)
if _partial_:
raise InstantiationException(
"The _partial_ keyword is not compatible with top-level list instantiation"
)
return instantiate_node(
config, *args, recursive=_recursive_, convert=_convert_, partial=_partial_
)
else:
raise InstantiationException(
"Top level config has to be OmegaConf DictConfig/ListConfig, "
+ "plain dict/list, or a Structured Config class or instance."
)
def _convert_node(node: Any, convert: Union[ConvertMode, str]) -> Any:
if OmegaConf.is_config(node):
if convert == ConvertMode.ALL:
node = OmegaConf.to_container(node, resolve=True)
elif convert == ConvertMode.PARTIAL:
node = OmegaConf.to_container(
node, resolve=True, structured_config_mode=SCMode.DICT_CONFIG
)
return node
def instantiate_node(
node: Any,
*args: Any,
convert: Union[str, ConvertMode] = ConvertMode.NONE,
recursive: bool = True,
partial: bool = False,
) -> Any:
# Return None if config is None
if node is None or (OmegaConf.is_config(node) and node._is_none()):
return None
if not OmegaConf.is_config(node):
return node
# Override parent modes from config if specified
if OmegaConf.is_dict(node):
# using getitem instead of get(key, default) because OmegaConf will raise an exception
# if the key type is incompatible on get.
convert = node[_Keys.CONVERT] if _Keys.CONVERT in node else convert
recursive = node[_Keys.RECURSIVE] if _Keys.RECURSIVE in node else recursive
partial = node[_Keys.PARTIAL] if _Keys.PARTIAL in node else partial
if not isinstance(recursive, bool):
raise TypeError(f"_recursive_ flag must be a bool, got {type(recursive)}")
if not isinstance(partial, bool):
raise TypeError(f"_partial_ flag must be a bool, got {type( partial )}")
# If OmegaConf list, create new list of instances if recursive
if OmegaConf.is_list(node):
items = [
instantiate_node(item, convert=convert, recursive=recursive)
for item in node._iter_ex(resolve=True)
]
if convert in (ConvertMode.ALL, ConvertMode.PARTIAL):
# If ALL or PARTIAL, use plain list as container
return items
else:
# Otherwise, use ListConfig as container
lst = OmegaConf.create(items, flags={"allow_objects": True})
lst._set_parent(node)
return lst
elif OmegaConf.is_dict(node):
exclude_keys = set({"_target_", "_convert_", "_recursive_", "_partial_"})
if _is_target(node):
_target_ = _resolve_target(node.get(_Keys.TARGET))
kwargs = {}
for key, value in node.items():
if key not in exclude_keys:
if recursive:
value = instantiate_node(
value, convert=convert, recursive=recursive
)
kwargs[key] = _convert_node(value, convert)
return _call_target(_target_, partial, *args, **kwargs)
else:
# If ALL or PARTIAL non structured, instantiate in dict and resolve interpolations eagerly.
if convert == ConvertMode.ALL or (
convert == ConvertMode.PARTIAL and node._metadata.object_type is None
):
dict_items = {}
for key, value in node.items():
# list items inherits recursive flag from the containing dict.
dict_items[key] = instantiate_node(
value, convert=convert, recursive=recursive
)
return dict_items
else:
# Otherwise use DictConfig and resolve interpolations lazily.
cfg = OmegaConf.create({}, flags={"allow_objects": True})
for key, value in node.items():
cfg[key] = instantiate_node(
value, convert=convert, recursive=recursive
)
cfg._set_parent(node)
cfg._metadata.object_type = node._metadata.object_type
return cfg
else:
assert False, f"Unexpected config type : {type(node).__name__}"
| true
| true
|
1c42012066391dbb8a299e980528cd00c950bf18
| 762
|
py
|
Python
|
metagym/__init__.py
|
WorldEditors/MetaGym
|
ad7263fcc80abd6831965ab6b556d54f75e17315
|
[
"Apache-2.0"
] | null | null | null |
metagym/__init__.py
|
WorldEditors/MetaGym
|
ad7263fcc80abd6831965ab6b556d54f75e17315
|
[
"Apache-2.0"
] | null | null | null |
metagym/__init__.py
|
WorldEditors/MetaGym
|
ad7263fcc80abd6831965ab6b556d54f75e17315
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def make_env(env_id, **kwargs):
raise Exception("metagym.make_env is deprecated. Please do \"import metagym.xxxenv\" and use gym.make instead")
| 42.333333
| 115
| 0.757218
|
def make_env(env_id, **kwargs):
raise Exception("metagym.make_env is deprecated. Please do \"import metagym.xxxenv\" and use gym.make instead")
| true
| true
|
1c4202d16eb62f826aa2cd0e3997cb5cf90f9be5
| 23,112
|
py
|
Python
|
research/cognitive_mapping_and_planning/tfcode/vision_baseline_lstm.py
|
caoxingchao/tensorflow_models
|
48fd7cde3d3492a8c67ec9eec95211fbee341bc7
|
[
"Apache-2.0"
] | null | null | null |
research/cognitive_mapping_and_planning/tfcode/vision_baseline_lstm.py
|
caoxingchao/tensorflow_models
|
48fd7cde3d3492a8c67ec9eec95211fbee341bc7
|
[
"Apache-2.0"
] | null | null | null |
research/cognitive_mapping_and_planning/tfcode/vision_baseline_lstm.py
|
caoxingchao/tensorflow_models
|
48fd7cde3d3492a8c67ec9eec95211fbee341bc7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import numpy as np
import tensorflow as tf
from tensorflow.contrib import slim
import logging
from tensorflow.python.platform import app
from tensorflow.python.platform import flags
from src import utils
import src.file_utils as fu
import tfcode.nav_utils as nu
from tfcode import tf_utils
setup_train_step_kwargs = nu.default_train_step_kwargs
compute_losses_multi_or = nu.compute_losses_multi_or
get_repr_from_image = nu.get_repr_from_image
_save_d_at_t = nu.save_d_at_t
_save_all = nu.save_all
_eval_ap = nu.eval_ap
_eval_dist = nu.eval_dist
_plot_trajectories = nu.plot_trajectories
def lstm_online(cell_fn, num_steps, inputs, state, varscope):
# inputs is B x num_steps x C, C channels.
# state is 2 tuple with B x 1 x C1, B x 1 x C2
# Output state is always B x 1 x C
inputs = tf.unstack(inputs, axis=1, num=num_steps)
state = tf.unstack(state, axis=1, num=1)[0]
outputs = []
if num_steps > 1:
varscope.reuse_variables()
for s in range(num_steps):
output, state = cell_fn(inputs[s], state)
outputs.append(output)
outputs = tf.stack(outputs, axis=1)
state = tf.stack([state], axis=1)
return outputs, state
def _inputs(problem, lstm_states, lstm_state_dims):
# Set up inputs.
with tf.name_scope('inputs'):
n_views = problem.n_views
inputs = []
inputs.append(('orig_maps', tf.float32,
(problem.batch_size, 1, None, None, 1)))
inputs.append(('goal_loc', tf.float32,
(problem.batch_size, problem.num_goals, 2)))
# For initing LSTM.
inputs.append(('rel_goal_loc_at_start', tf.float32,
(problem.batch_size, problem.num_goals,
problem.rel_goal_loc_dim)))
common_input_data, _ = tf_utils.setup_inputs(inputs)
inputs = []
inputs.append(('imgs', tf.float32, (problem.batch_size, None, n_views,
problem.img_height, problem.img_width,
problem.img_channels)))
# Goal location as a tuple of delta location and delta theta.
inputs.append(('rel_goal_loc', tf.float32, (problem.batch_size, None,
problem.rel_goal_loc_dim)))
if problem.outputs.visit_count:
inputs.append(('visit_count', tf.int32, (problem.batch_size, None, 1)))
inputs.append(('last_visit', tf.int32, (problem.batch_size, None, 1)))
for i, (state, dim) in enumerate(zip(lstm_states, lstm_state_dims)):
inputs.append((state, tf.float32, (problem.batch_size, 1, dim)))
if problem.outputs.egomotion:
inputs.append(('incremental_locs', tf.float32,
(problem.batch_size, None, 2)))
inputs.append(('incremental_thetas', tf.float32,
(problem.batch_size, None, 1)))
inputs.append(('step_number', tf.int32, (1, None, 1)))
inputs.append(('node_ids', tf.int32, (problem.batch_size, None,
problem.node_ids_dim)))
inputs.append(('perturbs', tf.float32, (problem.batch_size, None,
problem.perturbs_dim)))
# For plotting result plots
inputs.append(('loc_on_map', tf.float32, (problem.batch_size, None, 2)))
inputs.append(('gt_dist_to_goal', tf.float32, (problem.batch_size, None, 1)))
step_input_data, _ = tf_utils.setup_inputs(inputs)
inputs = []
inputs.append(('executed_actions', tf.int32, (problem.batch_size, None)))
inputs.append(('rewards', tf.float32, (problem.batch_size, None)))
inputs.append(('action_sample_wts', tf.float32, (problem.batch_size, None)))
inputs.append(('action', tf.int32, (problem.batch_size, None,
problem.num_actions)))
train_data, _ = tf_utils.setup_inputs(inputs)
train_data.update(step_input_data)
train_data.update(common_input_data)
return common_input_data, step_input_data, train_data
def _add_summaries(m, summary_mode, arop_full_summary_iters):
summarize_ops = [m.lr_op, m.global_step_op, m.sample_gt_prob_op,
m.total_loss_op, m.data_loss_op, m.reg_loss_op] + m.acc_ops
summarize_names = ['lr', 'global_step', 'sample_gt_prob_op', 'total_loss',
'data_loss', 'reg_loss'] + \
['acc_{:d}'.format(i) for i in range(len(m.acc_ops))]
to_aggregate = [0, 0, 0, 1, 1, 1] + [1]*len(m.acc_ops)
scope_name = 'summary'
with tf.name_scope(scope_name):
s_ops = nu.add_default_summaries(summary_mode, arop_full_summary_iters,
summarize_ops, summarize_names,
to_aggregate, m.action_prob_op,
m.input_tensors, scope_name=scope_name)
m.summary_ops = {summary_mode: s_ops}
def visit_count_fc(visit_count, last_visit, embed_neurons, wt_decay, fc_dropout):
with tf.variable_scope('embed_visit_count'):
visit_count = tf.reshape(visit_count, shape=[-1])
last_visit = tf.reshape(last_visit, shape=[-1])
visit_count = tf.clip_by_value(visit_count, clip_value_min=-1,
clip_value_max=15)
last_visit = tf.clip_by_value(last_visit, clip_value_min=-1,
clip_value_max=15)
visit_count = tf.one_hot(visit_count, depth=16, axis=1, dtype=tf.float32,
on_value=10., off_value=0.)
last_visit = tf.one_hot(last_visit, depth=16, axis=1, dtype=tf.float32,
on_value=10., off_value=0.)
f = tf.concat([visit_count, last_visit], 1)
x, _ = tf_utils.fc_network(
f, neurons=embed_neurons, wt_decay=wt_decay, name='visit_count_embed',
offset=0, batch_norm_param=None, dropout_ratio=fc_dropout,
is_training=is_training)
return x
def lstm_setup(name, x, batch_size, is_single_step, lstm_dim, lstm_out,
num_steps, state_input_op):
# returns state_name, state_init_op, updated_state_op, out_op
with tf.name_scope('reshape_'+name):
sh = x.get_shape().as_list()
x = tf.reshape(x, shape=[batch_size, -1, sh[-1]])
with tf.variable_scope(name) as varscope:
cell = tf.contrib.rnn.LSTMCell(
num_units=lstm_dim, forget_bias=1.0, state_is_tuple=False,
num_proj=lstm_out, use_peepholes=True,
initializer=tf.random_uniform_initializer(-0.01, 0.01, seed=0),
cell_clip=None, proj_clip=None)
sh = [batch_size, 1, lstm_dim+lstm_out]
state_init_op = tf.constant(0., dtype=tf.float32, shape=sh)
fn = lambda ns: lstm_online(cell, ns, x, state_input_op, varscope)
out_op, updated_state_op = tf.cond(is_single_step, lambda: fn(1), lambda:
fn(num_steps))
return name, state_init_op, updated_state_op, out_op
def combine_setup(name, combine_type, embed_img, embed_goal, num_img_neuorons=None,
num_goal_neurons=None):
with tf.name_scope(name + '_' + combine_type):
if combine_type == 'add':
# Simple concat features from goal and image
out = embed_img + embed_goal
elif combine_type == 'multiply':
# Multiply things together
re_embed_img = tf.reshape(
embed_img, shape=[-1, num_img_neuorons / num_goal_neurons,
num_goal_neurons])
re_embed_goal = tf.reshape(embed_goal, shape=[-1, num_goal_neurons, 1])
x = tf.matmul(re_embed_img, re_embed_goal, transpose_a=False, transpose_b=False)
out = slim.flatten(x)
elif combine_type == 'none' or combine_type == 'imgonly':
out = embed_img
elif combine_type == 'goalonly':
out = embed_goal
else:
LOGGING.fatal('Undefined combine_type: %s', combine_type)
return out
def preprocess_egomotion(locs, thetas):
with tf.name_scope('pre_ego'):
pre_ego = tf.concat([locs, tf.sin(thetas), tf.cos(thetas)], 2)
sh = pre_ego.get_shape().as_list()
pre_ego = tf.reshape(pre_ego, [-1, sh[-1]])
return pre_ego
def setup_to_run(m, args, is_training, batch_norm_is_training, summary_mode):
# Set up the model.
tf.set_random_seed(args.solver.seed)
task_params = args.navtask.task_params
num_steps = task_params.num_steps
num_goals = task_params.num_goals
num_actions = task_params.num_actions
num_actions_ = num_actions
n_views = task_params.n_views
batch_norm_is_training_op = \
tf.placeholder_with_default(batch_norm_is_training, shape=[],
name='batch_norm_is_training_op')
# Setup the inputs
m.input_tensors = {}
lstm_states = []; lstm_state_dims = [];
state_names = []; updated_state_ops = []; init_state_ops = [];
if args.arch.lstm_output:
lstm_states += ['lstm_output']
lstm_state_dims += [args.arch.lstm_output_dim+task_params.num_actions]
if args.arch.lstm_ego:
lstm_states += ['lstm_ego']
lstm_state_dims += [args.arch.lstm_ego_dim + args.arch.lstm_ego_out]
lstm_states += ['lstm_img']
lstm_state_dims += [args.arch.lstm_img_dim + args.arch.lstm_img_out]
elif args.arch.lstm_img:
# An LSTM only on the image
lstm_states += ['lstm_img']
lstm_state_dims += [args.arch.lstm_img_dim + args.arch.lstm_img_out]
else:
# No LSTMs involved here.
None
m.input_tensors['common'], m.input_tensors['step'], m.input_tensors['train'] = \
_inputs(task_params, lstm_states, lstm_state_dims)
with tf.name_scope('check_size'):
is_single_step = tf.equal(tf.unstack(tf.shape(m.input_tensors['step']['imgs']),
num=6)[1], 1)
images_reshaped = tf.reshape(m.input_tensors['step']['imgs'],
shape=[-1, task_params.img_height, task_params.img_width,
task_params.img_channels], name='re_image')
rel_goal_loc_reshaped = tf.reshape(m.input_tensors['step']['rel_goal_loc'],
shape=[-1, task_params.rel_goal_loc_dim], name='re_rel_goal_loc')
x, vars_ = get_repr_from_image(
images_reshaped, task_params.modalities, task_params.data_augment,
args.arch.encoder, args.solver.freeze_conv, args.solver.wt_decay,
is_training)
# Reshape into nice things so that these can be accumulated over time steps
# for faster backprop.
sh_before = x.get_shape().as_list()
m.encoder_output = tf.reshape(
x, shape=[task_params.batch_size, -1, n_views] + sh_before[1:])
x = tf.reshape(m.encoder_output, shape=[-1] + sh_before[1:])
# Add a layer to reduce dimensions for a fc layer.
if args.arch.dim_reduce_neurons > 0:
ks = 1; neurons = args.arch.dim_reduce_neurons;
init_var = np.sqrt(2.0/(ks**2)/neurons)
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
m.conv_feat = slim.conv2d(
x, neurons, kernel_size=ks, stride=1, normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_param, padding='SAME', scope='dim_reduce',
weights_regularizer=slim.l2_regularizer(args.solver.wt_decay),
weights_initializer=tf.random_normal_initializer(stddev=init_var))
reshape_conv_feat = slim.flatten(m.conv_feat)
sh = reshape_conv_feat.get_shape().as_list()
m.reshape_conv_feat = tf.reshape(reshape_conv_feat,
shape=[-1, sh[1]*n_views])
# Restore these from a checkpoint.
if args.solver.pretrained_path is not None:
m.init_fn = slim.assign_from_checkpoint_fn(args.solver.pretrained_path,
vars_)
else:
m.init_fn = None
# Hit the goal_location with a bunch of fully connected layers, to embed it
# into some space.
with tf.variable_scope('embed_goal'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
m.embed_goal, _ = tf_utils.fc_network(
rel_goal_loc_reshaped, neurons=args.arch.goal_embed_neurons,
wt_decay=args.solver.wt_decay, name='goal_embed', offset=0,
batch_norm_param=batch_norm_param, dropout_ratio=args.arch.fc_dropout,
is_training=is_training)
if args.arch.embed_goal_for_state:
with tf.variable_scope('embed_goal_for_state'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
m.embed_goal_for_state, _ = tf_utils.fc_network(
m.input_tensors['common']['rel_goal_loc_at_start'][:,0,:],
neurons=args.arch.goal_embed_neurons, wt_decay=args.solver.wt_decay,
name='goal_embed', offset=0, batch_norm_param=batch_norm_param,
dropout_ratio=args.arch.fc_dropout, is_training=is_training)
# Hit the goal_location with a bunch of fully connected layers, to embed it
# into some space.
with tf.variable_scope('embed_img'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
m.embed_img, _ = tf_utils.fc_network(
m.reshape_conv_feat, neurons=args.arch.img_embed_neurons,
wt_decay=args.solver.wt_decay, name='img_embed', offset=0,
batch_norm_param=batch_norm_param, dropout_ratio=args.arch.fc_dropout,
is_training=is_training)
# For lstm_ego, and lstm_image, embed the ego motion, accumulate it into an
# LSTM, combine with image features and accumulate those in an LSTM. Finally
# combine what you get from the image LSTM with the goal to output an action.
if args.arch.lstm_ego:
ego_reshaped = preprocess_egomotion(m.input_tensors['step']['incremental_locs'],
m.input_tensors['step']['incremental_thetas'])
with tf.variable_scope('embed_ego'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
m.embed_ego, _ = tf_utils.fc_network(
ego_reshaped, neurons=args.arch.ego_embed_neurons,
wt_decay=args.solver.wt_decay, name='ego_embed', offset=0,
batch_norm_param=batch_norm_param, dropout_ratio=args.arch.fc_dropout,
is_training=is_training)
state_name, state_init_op, updated_state_op, out_op = lstm_setup(
'lstm_ego', m.embed_ego, task_params.batch_size, is_single_step,
args.arch.lstm_ego_dim, args.arch.lstm_ego_out, num_steps*num_goals,
m.input_tensors['step']['lstm_ego'])
state_names += [state_name]
init_state_ops += [state_init_op]
updated_state_ops += [updated_state_op]
# Combine the output with the vision features.
m.img_ego_op = combine_setup('img_ego', args.arch.combine_type_ego,
m.embed_img, out_op,
args.arch.img_embed_neurons[-1],
args.arch.lstm_ego_out)
# LSTM on these vision features.
state_name, state_init_op, updated_state_op, out_op = lstm_setup(
'lstm_img', m.img_ego_op, task_params.batch_size, is_single_step,
args.arch.lstm_img_dim, args.arch.lstm_img_out, num_steps*num_goals,
m.input_tensors['step']['lstm_img'])
state_names += [state_name]
init_state_ops += [state_init_op]
updated_state_ops += [updated_state_op]
m.img_for_goal = out_op
num_img_for_goal_neurons = args.arch.lstm_img_out
elif args.arch.lstm_img:
# LSTM on just the image features.
state_name, state_init_op, updated_state_op, out_op = lstm_setup(
'lstm_img', m.embed_img, task_params.batch_size, is_single_step,
args.arch.lstm_img_dim, args.arch.lstm_img_out, num_steps*num_goals,
m.input_tensors['step']['lstm_img'])
state_names += [state_name]
init_state_ops += [state_init_op]
updated_state_ops += [updated_state_op]
m.img_for_goal = out_op
num_img_for_goal_neurons = args.arch.lstm_img_out
else:
m.img_for_goal = m.embed_img
num_img_for_goal_neurons = args.arch.img_embed_neurons[-1]
if args.arch.use_visit_count:
m.embed_visit_count = visit_count_fc(
m.input_tensors['step']['visit_count'],
m.input_tensors['step']['last_visit'], args.arch.goal_embed_neurons,
args.solver.wt_decay, args.arch.fc_dropout, is_training=is_training)
m.embed_goal = m.embed_goal + m.embed_visit_count
m.combined_f = combine_setup('img_goal', args.arch.combine_type,
m.img_for_goal, m.embed_goal,
num_img_for_goal_neurons,
args.arch.goal_embed_neurons[-1])
# LSTM on the combined representation.
if args.arch.lstm_output:
name = 'lstm_output'
# A few fully connected layers here.
with tf.variable_scope('action_pred'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
x, _ = tf_utils.fc_network(
m.combined_f, neurons=args.arch.pred_neurons,
wt_decay=args.solver.wt_decay, name='pred', offset=0,
batch_norm_param=batch_norm_param, dropout_ratio=args.arch.fc_dropout)
if args.arch.lstm_output_init_state_from_goal:
# Use the goal embedding to initialize the LSTM state.
# UGLY CLUGGY HACK: if this is doing computation for a single time step
# then this will not involve back prop, so we can use the state input from
# the feed dict, otherwise we compute the state representation from the
# goal and feed that in. Necessary for using goal location to generate the
# state representation.
m.embed_goal_for_state = tf.expand_dims(m.embed_goal_for_state, dim=1)
state_op = tf.cond(is_single_step, lambda: m.input_tensors['step'][name],
lambda: m.embed_goal_for_state)
state_name, state_init_op, updated_state_op, out_op = lstm_setup(
name, x, task_params.batch_size, is_single_step,
args.arch.lstm_output_dim,
num_actions_,
num_steps*num_goals, state_op)
init_state_ops += [m.embed_goal_for_state]
else:
state_op = m.input_tensors['step'][name]
state_name, state_init_op, updated_state_op, out_op = lstm_setup(
name, x, task_params.batch_size, is_single_step,
args.arch.lstm_output_dim,
num_actions_, num_steps*num_goals, state_op)
init_state_ops += [state_init_op]
state_names += [state_name]
updated_state_ops += [updated_state_op]
out_op = tf.reshape(out_op, shape=[-1, num_actions_])
if num_actions_ > num_actions:
m.action_logits_op = out_op[:,:num_actions]
m.baseline_op = out_op[:,num_actions:]
else:
m.action_logits_op = out_op
m.baseline_op = None
m.action_prob_op = tf.nn.softmax(m.action_logits_op)
else:
# A few fully connected layers here.
with tf.variable_scope('action_pred'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
out_op, _ = tf_utils.fc_network(
m.combined_f, neurons=args.arch.pred_neurons,
wt_decay=args.solver.wt_decay, name='pred', offset=0,
num_pred=num_actions_,
batch_norm_param=batch_norm_param,
dropout_ratio=args.arch.fc_dropout, is_training=is_training)
if num_actions_ > num_actions:
m.action_logits_op = out_op[:,:num_actions]
m.baseline_op = out_op[:,num_actions:]
else:
m.action_logits_op = out_op
m.baseline_op = None
m.action_prob_op = tf.nn.softmax(m.action_logits_op)
m.train_ops = {}
m.train_ops['step'] = m.action_prob_op
m.train_ops['common'] = [m.input_tensors['common']['orig_maps'],
m.input_tensors['common']['goal_loc'],
m.input_tensors['common']['rel_goal_loc_at_start']]
m.train_ops['state_names'] = state_names
m.train_ops['init_state'] = init_state_ops
m.train_ops['updated_state'] = updated_state_ops
m.train_ops['batch_norm_is_training_op'] = batch_norm_is_training_op
# Flat list of ops which cache the step data.
m.train_ops['step_data_cache'] = [tf.no_op()]
if args.solver.freeze_conv:
m.train_ops['step_data_cache'] = [m.encoder_output]
else:
m.train_ops['step_data_cache'] = []
ewma_decay = 0.99 if is_training else 0.0
weight = tf.ones_like(m.input_tensors['train']['action'], dtype=tf.float32,
name='weight')
m.reg_loss_op, m.data_loss_op, m.total_loss_op, m.acc_ops = \
compute_losses_multi_or(
m.action_logits_op, m.input_tensors['train']['action'],
weights=weight, num_actions=num_actions,
data_loss_wt=args.solver.data_loss_wt,
reg_loss_wt=args.solver.reg_loss_wt, ewma_decay=ewma_decay)
if args.solver.freeze_conv:
vars_to_optimize = list(set(tf.trainable_variables()) - set(vars_))
else:
vars_to_optimize = None
m.lr_op, m.global_step_op, m.train_op, m.should_stop_op, m.optimizer, \
m.sync_optimizer = tf_utils.setup_training(
m.total_loss_op,
args.solver.initial_learning_rate,
args.solver.steps_per_decay,
args.solver.learning_rate_decay,
args.solver.momentum,
args.solver.max_steps,
args.solver.sync,
args.solver.adjust_lr_sync,
args.solver.num_workers,
args.solver.task,
vars_to_optimize=vars_to_optimize,
clip_gradient_norm=args.solver.clip_gradient_norm,
typ=args.solver.typ, momentum2=args.solver.momentum2,
adam_eps=args.solver.adam_eps)
if args.arch.sample_gt_prob_type == 'inverse_sigmoid_decay':
m.sample_gt_prob_op = tf_utils.inverse_sigmoid_decay(args.arch.isd_k,
m.global_step_op)
elif args.arch.sample_gt_prob_type == 'zero':
m.sample_gt_prob_op = tf.constant(-1.0, dtype=tf.float32)
elif args.arch.sample_gt_prob_type.split('_')[0] == 'step':
step = int(args.arch.sample_gt_prob_type.split('_')[1])
m.sample_gt_prob_op = tf_utils.step_gt_prob(
step, m.input_tensors['step']['step_number'][0,0,0])
m.sample_action_type = args.arch.action_sample_type
m.sample_action_combine_type = args.arch.action_sample_combine_type
_add_summaries(m, summary_mode, args.summary.arop_full_summary_iters)
m.init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
m.saver_op = tf.train.Saver(keep_checkpoint_every_n_hours=4,
write_version=tf.train.SaverDef.V2)
return m
| 43.280899
| 86
| 0.678522
|
import numpy as np
import tensorflow as tf
from tensorflow.contrib import slim
import logging
from tensorflow.python.platform import app
from tensorflow.python.platform import flags
from src import utils
import src.file_utils as fu
import tfcode.nav_utils as nu
from tfcode import tf_utils
setup_train_step_kwargs = nu.default_train_step_kwargs
compute_losses_multi_or = nu.compute_losses_multi_or
get_repr_from_image = nu.get_repr_from_image
_save_d_at_t = nu.save_d_at_t
_save_all = nu.save_all
_eval_ap = nu.eval_ap
_eval_dist = nu.eval_dist
_plot_trajectories = nu.plot_trajectories
def lstm_online(cell_fn, num_steps, inputs, state, varscope):
inputs = tf.unstack(inputs, axis=1, num=num_steps)
state = tf.unstack(state, axis=1, num=1)[0]
outputs = []
if num_steps > 1:
varscope.reuse_variables()
for s in range(num_steps):
output, state = cell_fn(inputs[s], state)
outputs.append(output)
outputs = tf.stack(outputs, axis=1)
state = tf.stack([state], axis=1)
return outputs, state
def _inputs(problem, lstm_states, lstm_state_dims):
with tf.name_scope('inputs'):
n_views = problem.n_views
inputs = []
inputs.append(('orig_maps', tf.float32,
(problem.batch_size, 1, None, None, 1)))
inputs.append(('goal_loc', tf.float32,
(problem.batch_size, problem.num_goals, 2)))
inputs.append(('rel_goal_loc_at_start', tf.float32,
(problem.batch_size, problem.num_goals,
problem.rel_goal_loc_dim)))
common_input_data, _ = tf_utils.setup_inputs(inputs)
inputs = []
inputs.append(('imgs', tf.float32, (problem.batch_size, None, n_views,
problem.img_height, problem.img_width,
problem.img_channels)))
inputs.append(('rel_goal_loc', tf.float32, (problem.batch_size, None,
problem.rel_goal_loc_dim)))
if problem.outputs.visit_count:
inputs.append(('visit_count', tf.int32, (problem.batch_size, None, 1)))
inputs.append(('last_visit', tf.int32, (problem.batch_size, None, 1)))
for i, (state, dim) in enumerate(zip(lstm_states, lstm_state_dims)):
inputs.append((state, tf.float32, (problem.batch_size, 1, dim)))
if problem.outputs.egomotion:
inputs.append(('incremental_locs', tf.float32,
(problem.batch_size, None, 2)))
inputs.append(('incremental_thetas', tf.float32,
(problem.batch_size, None, 1)))
inputs.append(('step_number', tf.int32, (1, None, 1)))
inputs.append(('node_ids', tf.int32, (problem.batch_size, None,
problem.node_ids_dim)))
inputs.append(('perturbs', tf.float32, (problem.batch_size, None,
problem.perturbs_dim)))
inputs.append(('loc_on_map', tf.float32, (problem.batch_size, None, 2)))
inputs.append(('gt_dist_to_goal', tf.float32, (problem.batch_size, None, 1)))
step_input_data, _ = tf_utils.setup_inputs(inputs)
inputs = []
inputs.append(('executed_actions', tf.int32, (problem.batch_size, None)))
inputs.append(('rewards', tf.float32, (problem.batch_size, None)))
inputs.append(('action_sample_wts', tf.float32, (problem.batch_size, None)))
inputs.append(('action', tf.int32, (problem.batch_size, None,
problem.num_actions)))
train_data, _ = tf_utils.setup_inputs(inputs)
train_data.update(step_input_data)
train_data.update(common_input_data)
return common_input_data, step_input_data, train_data
def _add_summaries(m, summary_mode, arop_full_summary_iters):
summarize_ops = [m.lr_op, m.global_step_op, m.sample_gt_prob_op,
m.total_loss_op, m.data_loss_op, m.reg_loss_op] + m.acc_ops
summarize_names = ['lr', 'global_step', 'sample_gt_prob_op', 'total_loss',
'data_loss', 'reg_loss'] + \
['acc_{:d}'.format(i) for i in range(len(m.acc_ops))]
to_aggregate = [0, 0, 0, 1, 1, 1] + [1]*len(m.acc_ops)
scope_name = 'summary'
with tf.name_scope(scope_name):
s_ops = nu.add_default_summaries(summary_mode, arop_full_summary_iters,
summarize_ops, summarize_names,
to_aggregate, m.action_prob_op,
m.input_tensors, scope_name=scope_name)
m.summary_ops = {summary_mode: s_ops}
def visit_count_fc(visit_count, last_visit, embed_neurons, wt_decay, fc_dropout):
with tf.variable_scope('embed_visit_count'):
visit_count = tf.reshape(visit_count, shape=[-1])
last_visit = tf.reshape(last_visit, shape=[-1])
visit_count = tf.clip_by_value(visit_count, clip_value_min=-1,
clip_value_max=15)
last_visit = tf.clip_by_value(last_visit, clip_value_min=-1,
clip_value_max=15)
visit_count = tf.one_hot(visit_count, depth=16, axis=1, dtype=tf.float32,
on_value=10., off_value=0.)
last_visit = tf.one_hot(last_visit, depth=16, axis=1, dtype=tf.float32,
on_value=10., off_value=0.)
f = tf.concat([visit_count, last_visit], 1)
x, _ = tf_utils.fc_network(
f, neurons=embed_neurons, wt_decay=wt_decay, name='visit_count_embed',
offset=0, batch_norm_param=None, dropout_ratio=fc_dropout,
is_training=is_training)
return x
def lstm_setup(name, x, batch_size, is_single_step, lstm_dim, lstm_out,
num_steps, state_input_op):
with tf.name_scope('reshape_'+name):
sh = x.get_shape().as_list()
x = tf.reshape(x, shape=[batch_size, -1, sh[-1]])
with tf.variable_scope(name) as varscope:
cell = tf.contrib.rnn.LSTMCell(
num_units=lstm_dim, forget_bias=1.0, state_is_tuple=False,
num_proj=lstm_out, use_peepholes=True,
initializer=tf.random_uniform_initializer(-0.01, 0.01, seed=0),
cell_clip=None, proj_clip=None)
sh = [batch_size, 1, lstm_dim+lstm_out]
state_init_op = tf.constant(0., dtype=tf.float32, shape=sh)
fn = lambda ns: lstm_online(cell, ns, x, state_input_op, varscope)
out_op, updated_state_op = tf.cond(is_single_step, lambda: fn(1), lambda:
fn(num_steps))
return name, state_init_op, updated_state_op, out_op
def combine_setup(name, combine_type, embed_img, embed_goal, num_img_neuorons=None,
num_goal_neurons=None):
with tf.name_scope(name + '_' + combine_type):
if combine_type == 'add':
out = embed_img + embed_goal
elif combine_type == 'multiply':
re_embed_img = tf.reshape(
embed_img, shape=[-1, num_img_neuorons / num_goal_neurons,
num_goal_neurons])
re_embed_goal = tf.reshape(embed_goal, shape=[-1, num_goal_neurons, 1])
x = tf.matmul(re_embed_img, re_embed_goal, transpose_a=False, transpose_b=False)
out = slim.flatten(x)
elif combine_type == 'none' or combine_type == 'imgonly':
out = embed_img
elif combine_type == 'goalonly':
out = embed_goal
else:
LOGGING.fatal('Undefined combine_type: %s', combine_type)
return out
def preprocess_egomotion(locs, thetas):
with tf.name_scope('pre_ego'):
pre_ego = tf.concat([locs, tf.sin(thetas), tf.cos(thetas)], 2)
sh = pre_ego.get_shape().as_list()
pre_ego = tf.reshape(pre_ego, [-1, sh[-1]])
return pre_ego
def setup_to_run(m, args, is_training, batch_norm_is_training, summary_mode):
tf.set_random_seed(args.solver.seed)
task_params = args.navtask.task_params
num_steps = task_params.num_steps
num_goals = task_params.num_goals
num_actions = task_params.num_actions
num_actions_ = num_actions
n_views = task_params.n_views
batch_norm_is_training_op = \
tf.placeholder_with_default(batch_norm_is_training, shape=[],
name='batch_norm_is_training_op')
m.input_tensors = {}
lstm_states = []; lstm_state_dims = [];
state_names = []; updated_state_ops = []; init_state_ops = [];
if args.arch.lstm_output:
lstm_states += ['lstm_output']
lstm_state_dims += [args.arch.lstm_output_dim+task_params.num_actions]
if args.arch.lstm_ego:
lstm_states += ['lstm_ego']
lstm_state_dims += [args.arch.lstm_ego_dim + args.arch.lstm_ego_out]
lstm_states += ['lstm_img']
lstm_state_dims += [args.arch.lstm_img_dim + args.arch.lstm_img_out]
elif args.arch.lstm_img:
lstm_states += ['lstm_img']
lstm_state_dims += [args.arch.lstm_img_dim + args.arch.lstm_img_out]
else:
None
m.input_tensors['common'], m.input_tensors['step'], m.input_tensors['train'] = \
_inputs(task_params, lstm_states, lstm_state_dims)
with tf.name_scope('check_size'):
is_single_step = tf.equal(tf.unstack(tf.shape(m.input_tensors['step']['imgs']),
num=6)[1], 1)
images_reshaped = tf.reshape(m.input_tensors['step']['imgs'],
shape=[-1, task_params.img_height, task_params.img_width,
task_params.img_channels], name='re_image')
rel_goal_loc_reshaped = tf.reshape(m.input_tensors['step']['rel_goal_loc'],
shape=[-1, task_params.rel_goal_loc_dim], name='re_rel_goal_loc')
x, vars_ = get_repr_from_image(
images_reshaped, task_params.modalities, task_params.data_augment,
args.arch.encoder, args.solver.freeze_conv, args.solver.wt_decay,
is_training)
sh_before = x.get_shape().as_list()
m.encoder_output = tf.reshape(
x, shape=[task_params.batch_size, -1, n_views] + sh_before[1:])
x = tf.reshape(m.encoder_output, shape=[-1] + sh_before[1:])
if args.arch.dim_reduce_neurons > 0:
ks = 1; neurons = args.arch.dim_reduce_neurons;
init_var = np.sqrt(2.0/(ks**2)/neurons)
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
m.conv_feat = slim.conv2d(
x, neurons, kernel_size=ks, stride=1, normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_param, padding='SAME', scope='dim_reduce',
weights_regularizer=slim.l2_regularizer(args.solver.wt_decay),
weights_initializer=tf.random_normal_initializer(stddev=init_var))
reshape_conv_feat = slim.flatten(m.conv_feat)
sh = reshape_conv_feat.get_shape().as_list()
m.reshape_conv_feat = tf.reshape(reshape_conv_feat,
shape=[-1, sh[1]*n_views])
if args.solver.pretrained_path is not None:
m.init_fn = slim.assign_from_checkpoint_fn(args.solver.pretrained_path,
vars_)
else:
m.init_fn = None
with tf.variable_scope('embed_goal'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
m.embed_goal, _ = tf_utils.fc_network(
rel_goal_loc_reshaped, neurons=args.arch.goal_embed_neurons,
wt_decay=args.solver.wt_decay, name='goal_embed', offset=0,
batch_norm_param=batch_norm_param, dropout_ratio=args.arch.fc_dropout,
is_training=is_training)
if args.arch.embed_goal_for_state:
with tf.variable_scope('embed_goal_for_state'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
m.embed_goal_for_state, _ = tf_utils.fc_network(
m.input_tensors['common']['rel_goal_loc_at_start'][:,0,:],
neurons=args.arch.goal_embed_neurons, wt_decay=args.solver.wt_decay,
name='goal_embed', offset=0, batch_norm_param=batch_norm_param,
dropout_ratio=args.arch.fc_dropout, is_training=is_training)
with tf.variable_scope('embed_img'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
m.embed_img, _ = tf_utils.fc_network(
m.reshape_conv_feat, neurons=args.arch.img_embed_neurons,
wt_decay=args.solver.wt_decay, name='img_embed', offset=0,
batch_norm_param=batch_norm_param, dropout_ratio=args.arch.fc_dropout,
is_training=is_training)
if args.arch.lstm_ego:
ego_reshaped = preprocess_egomotion(m.input_tensors['step']['incremental_locs'],
m.input_tensors['step']['incremental_thetas'])
with tf.variable_scope('embed_ego'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
m.embed_ego, _ = tf_utils.fc_network(
ego_reshaped, neurons=args.arch.ego_embed_neurons,
wt_decay=args.solver.wt_decay, name='ego_embed', offset=0,
batch_norm_param=batch_norm_param, dropout_ratio=args.arch.fc_dropout,
is_training=is_training)
state_name, state_init_op, updated_state_op, out_op = lstm_setup(
'lstm_ego', m.embed_ego, task_params.batch_size, is_single_step,
args.arch.lstm_ego_dim, args.arch.lstm_ego_out, num_steps*num_goals,
m.input_tensors['step']['lstm_ego'])
state_names += [state_name]
init_state_ops += [state_init_op]
updated_state_ops += [updated_state_op]
m.img_ego_op = combine_setup('img_ego', args.arch.combine_type_ego,
m.embed_img, out_op,
args.arch.img_embed_neurons[-1],
args.arch.lstm_ego_out)
state_name, state_init_op, updated_state_op, out_op = lstm_setup(
'lstm_img', m.img_ego_op, task_params.batch_size, is_single_step,
args.arch.lstm_img_dim, args.arch.lstm_img_out, num_steps*num_goals,
m.input_tensors['step']['lstm_img'])
state_names += [state_name]
init_state_ops += [state_init_op]
updated_state_ops += [updated_state_op]
m.img_for_goal = out_op
num_img_for_goal_neurons = args.arch.lstm_img_out
elif args.arch.lstm_img:
state_name, state_init_op, updated_state_op, out_op = lstm_setup(
'lstm_img', m.embed_img, task_params.batch_size, is_single_step,
args.arch.lstm_img_dim, args.arch.lstm_img_out, num_steps*num_goals,
m.input_tensors['step']['lstm_img'])
state_names += [state_name]
init_state_ops += [state_init_op]
updated_state_ops += [updated_state_op]
m.img_for_goal = out_op
num_img_for_goal_neurons = args.arch.lstm_img_out
else:
m.img_for_goal = m.embed_img
num_img_for_goal_neurons = args.arch.img_embed_neurons[-1]
if args.arch.use_visit_count:
m.embed_visit_count = visit_count_fc(
m.input_tensors['step']['visit_count'],
m.input_tensors['step']['last_visit'], args.arch.goal_embed_neurons,
args.solver.wt_decay, args.arch.fc_dropout, is_training=is_training)
m.embed_goal = m.embed_goal + m.embed_visit_count
m.combined_f = combine_setup('img_goal', args.arch.combine_type,
m.img_for_goal, m.embed_goal,
num_img_for_goal_neurons,
args.arch.goal_embed_neurons[-1])
if args.arch.lstm_output:
name = 'lstm_output'
with tf.variable_scope('action_pred'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
x, _ = tf_utils.fc_network(
m.combined_f, neurons=args.arch.pred_neurons,
wt_decay=args.solver.wt_decay, name='pred', offset=0,
batch_norm_param=batch_norm_param, dropout_ratio=args.arch.fc_dropout)
if args.arch.lstm_output_init_state_from_goal:
m.embed_goal_for_state = tf.expand_dims(m.embed_goal_for_state, dim=1)
state_op = tf.cond(is_single_step, lambda: m.input_tensors['step'][name],
lambda: m.embed_goal_for_state)
state_name, state_init_op, updated_state_op, out_op = lstm_setup(
name, x, task_params.batch_size, is_single_step,
args.arch.lstm_output_dim,
num_actions_,
num_steps*num_goals, state_op)
init_state_ops += [m.embed_goal_for_state]
else:
state_op = m.input_tensors['step'][name]
state_name, state_init_op, updated_state_op, out_op = lstm_setup(
name, x, task_params.batch_size, is_single_step,
args.arch.lstm_output_dim,
num_actions_, num_steps*num_goals, state_op)
init_state_ops += [state_init_op]
state_names += [state_name]
updated_state_ops += [updated_state_op]
out_op = tf.reshape(out_op, shape=[-1, num_actions_])
if num_actions_ > num_actions:
m.action_logits_op = out_op[:,:num_actions]
m.baseline_op = out_op[:,num_actions:]
else:
m.action_logits_op = out_op
m.baseline_op = None
m.action_prob_op = tf.nn.softmax(m.action_logits_op)
else:
with tf.variable_scope('action_pred'):
batch_norm_param = args.arch.batch_norm_param
batch_norm_param['is_training'] = batch_norm_is_training_op
out_op, _ = tf_utils.fc_network(
m.combined_f, neurons=args.arch.pred_neurons,
wt_decay=args.solver.wt_decay, name='pred', offset=0,
num_pred=num_actions_,
batch_norm_param=batch_norm_param,
dropout_ratio=args.arch.fc_dropout, is_training=is_training)
if num_actions_ > num_actions:
m.action_logits_op = out_op[:,:num_actions]
m.baseline_op = out_op[:,num_actions:]
else:
m.action_logits_op = out_op
m.baseline_op = None
m.action_prob_op = tf.nn.softmax(m.action_logits_op)
m.train_ops = {}
m.train_ops['step'] = m.action_prob_op
m.train_ops['common'] = [m.input_tensors['common']['orig_maps'],
m.input_tensors['common']['goal_loc'],
m.input_tensors['common']['rel_goal_loc_at_start']]
m.train_ops['state_names'] = state_names
m.train_ops['init_state'] = init_state_ops
m.train_ops['updated_state'] = updated_state_ops
m.train_ops['batch_norm_is_training_op'] = batch_norm_is_training_op
m.train_ops['step_data_cache'] = [tf.no_op()]
if args.solver.freeze_conv:
m.train_ops['step_data_cache'] = [m.encoder_output]
else:
m.train_ops['step_data_cache'] = []
ewma_decay = 0.99 if is_training else 0.0
weight = tf.ones_like(m.input_tensors['train']['action'], dtype=tf.float32,
name='weight')
m.reg_loss_op, m.data_loss_op, m.total_loss_op, m.acc_ops = \
compute_losses_multi_or(
m.action_logits_op, m.input_tensors['train']['action'],
weights=weight, num_actions=num_actions,
data_loss_wt=args.solver.data_loss_wt,
reg_loss_wt=args.solver.reg_loss_wt, ewma_decay=ewma_decay)
if args.solver.freeze_conv:
vars_to_optimize = list(set(tf.trainable_variables()) - set(vars_))
else:
vars_to_optimize = None
m.lr_op, m.global_step_op, m.train_op, m.should_stop_op, m.optimizer, \
m.sync_optimizer = tf_utils.setup_training(
m.total_loss_op,
args.solver.initial_learning_rate,
args.solver.steps_per_decay,
args.solver.learning_rate_decay,
args.solver.momentum,
args.solver.max_steps,
args.solver.sync,
args.solver.adjust_lr_sync,
args.solver.num_workers,
args.solver.task,
vars_to_optimize=vars_to_optimize,
clip_gradient_norm=args.solver.clip_gradient_norm,
typ=args.solver.typ, momentum2=args.solver.momentum2,
adam_eps=args.solver.adam_eps)
if args.arch.sample_gt_prob_type == 'inverse_sigmoid_decay':
m.sample_gt_prob_op = tf_utils.inverse_sigmoid_decay(args.arch.isd_k,
m.global_step_op)
elif args.arch.sample_gt_prob_type == 'zero':
m.sample_gt_prob_op = tf.constant(-1.0, dtype=tf.float32)
elif args.arch.sample_gt_prob_type.split('_')[0] == 'step':
step = int(args.arch.sample_gt_prob_type.split('_')[1])
m.sample_gt_prob_op = tf_utils.step_gt_prob(
step, m.input_tensors['step']['step_number'][0,0,0])
m.sample_action_type = args.arch.action_sample_type
m.sample_action_combine_type = args.arch.action_sample_combine_type
_add_summaries(m, summary_mode, args.summary.arop_full_summary_iters)
m.init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
m.saver_op = tf.train.Saver(keep_checkpoint_every_n_hours=4,
write_version=tf.train.SaverDef.V2)
return m
| true
| true
|
1c42031629c254017278935e32527500eb389956
| 567
|
py
|
Python
|
PythonLearnig/Algorithms_Part_2/Ex19DirectedEdge.py
|
dhivadeekshi/Algorithms-Samples
|
cadd159d835ce64d51d786eafc25c40e35540353
|
[
"MIT"
] | null | null | null |
PythonLearnig/Algorithms_Part_2/Ex19DirectedEdge.py
|
dhivadeekshi/Algorithms-Samples
|
cadd159d835ce64d51d786eafc25c40e35540353
|
[
"MIT"
] | null | null | null |
PythonLearnig/Algorithms_Part_2/Ex19DirectedEdge.py
|
dhivadeekshi/Algorithms-Samples
|
cadd159d835ce64d51d786eafc25c40e35540353
|
[
"MIT"
] | null | null | null |
class DirectedEdge:
def __init__(self, v, w, weight):
self.v = v
self.w = w
self.weight = weight
def from_v(self):
return self.v
def to_v(self):
return self.w
def get_weight(self):
return self.weight
def compare_to(self, that):
return self.get_weight() - that.get_weight()
def __str__(self):
return f'{self.from_v()} -- {self.weight} --> {self.to_v()}'
def test_directed_graph():
print(DirectedEdge(3, 7, 10.7))
if __name__ == '__main__':
test_directed_graph()
| 18.9
| 68
| 0.585538
|
class DirectedEdge:
def __init__(self, v, w, weight):
self.v = v
self.w = w
self.weight = weight
def from_v(self):
return self.v
def to_v(self):
return self.w
def get_weight(self):
return self.weight
def compare_to(self, that):
return self.get_weight() - that.get_weight()
def __str__(self):
return f'{self.from_v()} -- {self.weight} --> {self.to_v()}'
def test_directed_graph():
print(DirectedEdge(3, 7, 10.7))
if __name__ == '__main__':
test_directed_graph()
| true
| true
|
1c42034168d392e077e4eb52a9036e0bda7c218c
| 2,743
|
py
|
Python
|
barcode/barcode/base.py
|
bharat-rajani/Project-barcode-passes
|
9e151aed5f8124350b757c6a8788f117d7f9c570
|
[
"MIT"
] | 1
|
2020-12-21T20:04:45.000Z
|
2020-12-21T20:04:45.000Z
|
barcode/base.py
|
trobertsca/reBarcode
|
eb5929ffa20e6dc80ccad8d351c4ac8cb3ee07ac
|
[
"MIT"
] | null | null | null |
barcode/base.py
|
trobertsca/reBarcode
|
eb5929ffa20e6dc80ccad8d351c4ac8cb3ee07ac
|
[
"MIT"
] | 1
|
2019-05-28T15:54:55.000Z
|
2019-05-28T15:54:55.000Z
|
# -*- coding: utf-8 -*-
"""barcode.base
"""
from __future__ import unicode_literals
from barcode.writer import SVGWriter
class Barcode(object):
name = ''
raw = None
digits = 0
default_writer = SVGWriter
default_writer_options = {
'module_width': 0.2,
'module_height': 15.0,
'quiet_zone': 6.5,
'font_size': 10,
'text_distance': 5.0,
'background': 'white',
'foreground': 'black',
'write_text': True,
'text': '',
}
def to_ascii(self):
code = self.build()
for i, line in enumerate(code):
code[i] = line.replace('1', 'X').replace('0', ' ')
return '\n'.join(code)
def __repr__(self):
return '<{0}({1!r})>'.format(self.__class__.__name__,
self.get_fullcode())
def build(self):
raise NotImplementedError
def get_fullcode(self):
"""Returns the full code, encoded in the barcode.
:returns: Full human readable code.
:rtype: String
"""
raise NotImplementedError
def save(self, filename, options=None):
"""Renders the barcode and saves it in `filename`.
:parameters:
filename : String
Filename to save the barcode in (without filename
extension).
options : Dict
The same as in `self.render`.
:returns: The full filename with extension.
:rtype: String
"""
output = self.render(options)
_filename = self.writer.save(filename, output)
return _filename
def write(self, fp, options=None):
"""Renders the barcode and writes it to the file like object
`fp`.
:parameters:
fp : File like object
Object to write the raw data in.
options : Dict
The same as in `self.render`.
"""
output = self.render(options)
if hasattr(output, 'tostring'):
output.save(fp, format=self.writer.format)
else:
fp.write(output)
def render(self, writer_options=None):
"""Renders the barcode using `self.writer`.
:parameters:
writer_options : Dict
Options for `self.writer`, see writer docs for details.
:returns: Output of the writers render method.
"""
options = Barcode.default_writer_options.copy()
options.update(writer_options or {})
if options['write_text']:
options['text'] = self.get_fullcode()
self.writer.set_options(options)
code = self.build()
raw = Barcode.raw = self.writer.render(code)
return raw
| 26.375
| 71
| 0.553044
|
from __future__ import unicode_literals
from barcode.writer import SVGWriter
class Barcode(object):
name = ''
raw = None
digits = 0
default_writer = SVGWriter
default_writer_options = {
'module_width': 0.2,
'module_height': 15.0,
'quiet_zone': 6.5,
'font_size': 10,
'text_distance': 5.0,
'background': 'white',
'foreground': 'black',
'write_text': True,
'text': '',
}
def to_ascii(self):
code = self.build()
for i, line in enumerate(code):
code[i] = line.replace('1', 'X').replace('0', ' ')
return '\n'.join(code)
def __repr__(self):
return '<{0}({1!r})>'.format(self.__class__.__name__,
self.get_fullcode())
def build(self):
raise NotImplementedError
def get_fullcode(self):
raise NotImplementedError
def save(self, filename, options=None):
output = self.render(options)
_filename = self.writer.save(filename, output)
return _filename
def write(self, fp, options=None):
output = self.render(options)
if hasattr(output, 'tostring'):
output.save(fp, format=self.writer.format)
else:
fp.write(output)
def render(self, writer_options=None):
options = Barcode.default_writer_options.copy()
options.update(writer_options or {})
if options['write_text']:
options['text'] = self.get_fullcode()
self.writer.set_options(options)
code = self.build()
raw = Barcode.raw = self.writer.render(code)
return raw
| true
| true
|
1c4203425fb36f3395c1ea4a98381de0a194c6d6
| 120
|
py
|
Python
|
flag_engine/environments/integrations/models.py
|
Flagsmith/flagsmith-engine
|
a33097a772f30a275a9ebcba88627552877be0b5
|
[
"BSD-3-Clause"
] | 4
|
2021-09-01T10:16:49.000Z
|
2022-02-15T04:23:07.000Z
|
flag_engine/environments/integrations/models.py
|
Flagsmith/flagsmith-engine
|
a33097a772f30a275a9ebcba88627552877be0b5
|
[
"BSD-3-Clause"
] | 21
|
2021-10-01T13:37:17.000Z
|
2022-03-24T10:47:07.000Z
|
flag_engine/environments/integrations/models.py
|
Flagsmith/flagsmith-engine
|
a33097a772f30a275a9ebcba88627552877be0b5
|
[
"BSD-3-Clause"
] | 1
|
2022-02-28T19:19:37.000Z
|
2022-02-28T19:19:37.000Z
|
from dataclasses import dataclass
@dataclass
class IntegrationModel:
api_key: str = None
base_url: str = None
| 15
| 33
| 0.741667
|
from dataclasses import dataclass
@dataclass
class IntegrationModel:
api_key: str = None
base_url: str = None
| true
| true
|
1c42038064106b4b0454c2d0791750ce4155985f
| 23,416
|
py
|
Python
|
splinext/pokedex/controllers/pokedex_conquest.py
|
hugopeixoto/spline-pokedex
|
17b8d22118c9d4b02a01c2271120c162b8dd41da
|
[
"MIT"
] | 7
|
2015-05-28T22:37:26.000Z
|
2020-10-26T17:28:32.000Z
|
splinext/pokedex/controllers/pokedex_conquest.py
|
hugopeixoto/spline-pokedex
|
17b8d22118c9d4b02a01c2271120c162b8dd41da
|
[
"MIT"
] | 28
|
2015-02-28T04:58:47.000Z
|
2021-03-19T03:32:43.000Z
|
splinext/pokedex/controllers/pokedex_conquest.py
|
hugopeixoto/spline-pokedex
|
17b8d22118c9d4b02a01c2271120c162b8dd41da
|
[
"MIT"
] | 3
|
2015-11-25T17:02:32.000Z
|
2020-08-07T09:52:31.000Z
|
# encoding: utf8
from __future__ import absolute_import, division
from collections import defaultdict
import colorsys
from itertools import izip
from random import randint
import pokedex.db
import pokedex.db.tables as t
from pylons import request, tmpl_context as c
from pylons.controllers.util import abort
import sqlalchemy as sqla
from sqlalchemy.orm.exc import NoResultFound
import wtforms
from spline.lib.base import render
import spline.lib.helpers as h
from splinext.pokedex import PokedexBaseController
import splinext.pokedex.db as db
def bar_color(hue, pastelness):
"""Returns a color in the form #rrggbb that has the provided hue and
lightness/saturation equal to the given "pastelness".
"""
r, g, b = colorsys.hls_to_rgb(hue, pastelness, pastelness)
return "#%02x%02x%02x" % (r * 256, g * 256, b * 256)
class LinkThresholdForm(wtforms.Form):
"""A form for specifying the threshold for max link tables on warrior and
Pokémon pages.
"""
link = wtforms.DecimalField(places=0, rounding='ROUND_UP')
class PokedexConquestController(PokedexBaseController):
def _not_found(self):
# XXX make this do fuzzy search or whatever
abort(404)
def _prev_next_id(self, thing, table, column_name):
"""Returns a 2-tuple of the previous and next thing by their IDs."""
column = getattr(table, column_name)
thing_id = getattr(thing, column_name)
max_id = (db.pokedex_session.query(table)
.filter(column != None)
.count())
prev_thing = db.pokedex_session.query(table).filter(
column == (thing_id - 1 - 1) % max_id + 1).one()
next_thing = db.pokedex_session.query(table).filter(
column == (thing_id - 1 + 1) % max_id + 1).one()
return prev_thing, next_thing
def _prev_next_name(self, table, current, filters=[]):
"""Figure out the previous/next thing for the navigation bar
table: the table to select from
current: list of the current values
filters: a list of filter expressions for the table
"""
name_table = table.__mapper__.get_property('names').argument
query = (db.pokedex_session.query(table)
.join(name_table)
.filter(name_table.local_language == c.game_language)
)
for filter in filters:
query = query.filter(filter)
name_col = name_table.name
name_current = current.name_map[c.game_language]
lt = name_col < name_current
gt = name_col > name_current
asc = name_col.asc()
desc = name_col.desc()
# The previous thing is the biggest smaller, wrap around if
# nothing comes before
prev = query.filter(lt).order_by(desc).first()
if prev is None:
prev = query.order_by(desc).first()
# Similarly for next
next = query.filter(gt).order_by(asc).first()
if next is None:
next = query.order_by(asc).first()
return prev, next
def abilities(self, name):
try:
c.ability = db.get_by_name_query(t.Ability, name).one()
except NoResultFound:
return self._not_found()
# XXX The ability might exist, but not in Conquest
if not c.ability.conquest_pokemon:
return self._not_found()
c.prev_ability, c.next_ability = self._prev_next_name(
t.Ability, c.ability,
filters=[t.Ability.conquest_pokemon.any()])
return render('/pokedex/conquest/ability.mako')
def abilities_list(self):
c.abilities = (db.pokedex_session.query(t.Ability)
.join(t.Ability.names_local)
.filter(t.Ability.conquest_pokemon.any())
.order_by(t.Ability.names_table.name.asc())
.all()
)
return render('/pokedex/conquest/ability_list.mako')
def kingdoms(self, name):
try:
c.kingdom = db.get_by_name_query(t.ConquestKingdom, name).one()
except NoResultFound:
return self._not_found()
# We have pretty much nothing for kingdoms. Yet.
c.prev_kingdom, c.next_kingdom = self._prev_next_id(
c.kingdom, t.ConquestKingdom, 'id')
return render('/pokedex/conquest/kingdom.mako')
def kingdoms_list(self):
c.kingdoms = (db.pokedex_session.query(t.ConquestKingdom)
.options(
sqla.orm.joinedload('type')
)
.order_by(t.ConquestKingdom.id)
.all()
)
return render('/pokedex/conquest/kingdom_list.mako')
def moves(self, name):
try:
c.move = (db.get_by_name_query(t.Move, name)
.options(
sqla.orm.joinedload('conquest_data'),
sqla.orm.joinedload('conquest_pokemon'),
sqla.orm.subqueryload('conquest_pokemon.conquest_abilities'),
sqla.orm.subqueryload('conquest_pokemon.conquest_stats'),
)
.one())
except NoResultFound:
return self._not_found()
if not c.move.conquest_pokemon:
return self._not_found()
### Prev/next for header
c.prev_move, c.next_move = self._prev_next_name(t.Move, c.move,
filters=[t.Move.conquest_pokemon.any()])
return render('/pokedex/conquest/move.mako')
def moves_list(self):
c.moves = (db.pokedex_session.query(t.Move)
.filter(t.Move.conquest_data.has())
.options(
sqla.orm.joinedload('conquest_data'),
sqla.orm.joinedload('conquest_data.move_displacement'),
)
.join(t.Move.names_local)
.order_by(t.Move.names_table.name.asc())
.all()
)
return render('/pokedex/conquest/move_list.mako')
def pokemon(self, name=None):
try:
c.pokemon = db.pokemon_query(name, None).one()
except NoResultFound:
return self._not_found()
c.semiform_pokemon = c.pokemon
c.pokemon = c.pokemon.species
# This Pokémon might exist, but not appear in Conquest
if c.pokemon.conquest_order is None:
return self._not_found()
### Previous and next for the header
c.prev_pokemon, c.next_pokemon = self._prev_next_id(
c.pokemon, t.PokemonSpecies, 'conquest_order')
### Type efficacy
c.type_efficacies = defaultdict(lambda: 100)
for target_type in c.semiform_pokemon.types:
for type_efficacy in target_type.target_efficacies:
c.type_efficacies[type_efficacy.damage_type] *= \
type_efficacy.damage_factor
# The defaultdict starts at 100, and every damage factor is
# a percentage. Dividing by 100 with every iteration turns the
# damage factor into a decimal percentage taken of the starting
# 100, without using floats and regardless of number of types
c.type_efficacies[type_efficacy.damage_type] //= 100
### Evolution
# Shamelessly lifted from the main controller and tweaked.
#
# Format is a matrix as follows:
# [
# [ None, Eevee, Vaporeon, None ]
# [ None, None, Jolteon, None ]
# [ None, None, Flareon, None ]
# ... etc ...
# ]
# That is, each row is a physical row in the resulting table, and each
# contains four elements, one per row: Baby, Base, Stage 1, Stage 2.
# The Pokémon are actually dictionaries with 'pokemon' and 'span' keys,
# where the span is used as the HTML cell's rowspan -- e.g., Eevee has a
# total of seven descendents, so it would need to span 7 rows.
c.evolution_table = []
# Prefetch the evolution details
family = (db.pokedex_session.query(t.PokemonSpecies)
.filter(t.PokemonSpecies.evolution_chain_id ==
c.pokemon.evolution_chain_id)
.options(
sqla.orm.subqueryload('conquest_evolution'),
sqla.orm.joinedload('conquest_evolution.stat'),
sqla.orm.joinedload('conquest_evolution.kingdom'),
sqla.orm.joinedload('conquest_evolution.gender'),
sqla.orm.joinedload('conquest_evolution.item'),
)
.all())
# Strategy: build this table going backwards.
# Find a leaf, build the path going back up to its root. Remember all
# of the nodes seen along the way. Find another leaf not seen so far.
# Build its path backwards, sticking it to a seen node if one exists.
# Repeat until there are no unseen nodes.
seen_nodes = {}
while True:
# First, find some unseen nodes
unseen_leaves = []
for species in family:
if species in seen_nodes:
continue
children = []
# A Pokémon is a leaf if it has no evolutionary children, so...
for possible_child in family:
if possible_child in seen_nodes:
continue
if possible_child.parent_species == species:
children.append(possible_child)
if len(children) == 0:
unseen_leaves.append(species)
# If there are none, we're done! Bail.
# Note that it is impossible to have any unseen non-leaves if there
# are no unseen leaves; every leaf's ancestors become seen when we
# build a path to it.
if len(unseen_leaves) == 0:
break
unseen_leaves.sort(key=lambda x: x.id)
leaf = unseen_leaves[0]
# root, parent_n, ... parent2, parent1, leaf
current_path = []
# Finally, go back up the tree to the root
current_species = leaf
while current_species:
# The loop bails just after current_species is no longer the
# root, so this will give us the root after the loop ends;
# we need to know if it's a baby to see whether to indent the
# entire table below
root_pokemon = current_species
if current_species in seen_nodes:
current_node = seen_nodes[current_species]
# Don't need to repeat this node; the first instance will
# have a rowspan
current_path.insert(0, None)
else:
current_node = {
'species': current_species,
'span': 0,
}
current_path.insert(0, current_node)
seen_nodes[current_species] = current_node
# This node has one more row to span: our current leaf
current_node['span'] += 1
current_species = current_species.parent_species
# We want every path to have four nodes: baby, basic, stage 1 and 2.
# Every root node is basic, unless it's defined as being a baby.
# So first, add an empty baby node at the beginning if this is not
# a baby.
# We use an empty string to indicate an empty cell, as opposed to a
# complete lack of cell due to a tall cell from an earlier row.
if not root_pokemon.is_baby:
current_path.insert(0, '')
# Now pad to four if necessary.
while len(current_path) < 4:
current_path.append('')
c.evolution_table.append(current_path)
### Stats
# Conquest has a nonstandard stat, Range, which shouldn't be included
# in the total, so we have to do things a bit differently.
# XXX actually do things differently instead of just fudging the same
# thing to work
c.stats = {} # stat => { border, background, percentile }
stat_total = 0
total_stat_rows = db.pokedex_session.query(t.ConquestPokemonStat) \
.filter_by(stat=c.pokemon.conquest_stats[0].stat) \
.count()
for pokemon_stat in c.pokemon.conquest_stats:
stat_info = c.stats[pokemon_stat.stat.identifier] = {}
stat_info['value'] = pokemon_stat.base_stat
if pokemon_stat.stat.is_base:
stat_total += pokemon_stat.base_stat
q = db.pokedex_session.query(t.ConquestPokemonStat) \
.filter_by(stat=pokemon_stat.stat)
less = q.filter(t.ConquestPokemonStat.base_stat <
pokemon_stat.base_stat).count()
equal = q.filter(t.ConquestPokemonStat.base_stat ==
pokemon_stat.base_stat).count()
percentile = (less + equal * 0.5) / total_stat_rows
stat_info['percentile'] = percentile
# Colors for the stat bars, based on percentile
stat_info['background'] = bar_color(percentile, 0.9)
stat_info['border'] = bar_color(percentile, 0.8)
# Percentile for the total
# Need to make a derived table that fakes pokemon_id, total_stats
stat_sum_tbl = db.pokedex_session.query(
sqla.sql.func.sum(t.ConquestPokemonStat.base_stat)
.label('stat_total')
) \
.filter(t.ConquestPokemonStat.conquest_stat_id <= 4) \
.group_by(t.ConquestPokemonStat.pokemon_species_id) \
.subquery()
q = db.pokedex_session.query(stat_sum_tbl)
less = q.filter(stat_sum_tbl.c.stat_total < stat_total).count()
equal = q.filter(stat_sum_tbl.c.stat_total == stat_total).count()
percentile = (less + equal * 0.5) / total_stat_rows
c.stats['total'] = {
'percentile': percentile,
'value': stat_total,
'background': bar_color(percentile, 0.9),
'border': bar_color(percentile, 0.8),
}
### Max links
# We only want to show warriors who have a max link above a certain
# threshold, because there are 200 warriors and most of them won't
# have very good links.
default_link = 70
c.link_form = LinkThresholdForm(request.params, link=default_link)
if request.params and c.link_form.validate():
link_threshold = c.link_form.link.data
else:
link_threshold = default_link
# However, some warriors will only be above this threshold at later
# ranks. In these cases, we may as well show all ranks' links.
# No link ever goes down when a warrior ranks up, so we just need to
# check their final rank.
# First, craft a clause to filter out non-final warrior ranks.
ranks_sub = sqla.orm.aliased(t.ConquestWarriorRank)
higher_ranks_exist = (sqla.sql.exists([1])
.where(sqla.and_(
ranks_sub.warrior_id == t.ConquestWarriorRank.warrior_id,
ranks_sub.rank > t.ConquestWarriorRank.rank))
)
# Next, find final-rank warriors with a max link high enough.
worthy_warriors = (db.pokedex_session.query(t.ConquestWarrior.id)
.join(t.ConquestWarriorRank)
.filter(~higher_ranks_exist)
.join(t.ConquestMaxLink)
.filter(t.ConquestMaxLink.pokemon_species_id == c.pokemon.id)
.filter(t.ConquestMaxLink.max_link >= link_threshold))
# For Froslass and Gallade, we want to filter out male and female
# warriors, respectively.
# XXX Eventually we want to figure out all impossible evolutions, and
# show them, but sort them to the bottom and grey them out.
if (c.pokemon.conquest_evolution is not None and
c.pokemon.conquest_evolution.warrior_gender_id is not None):
worthy_warriors = worthy_warriors.filter(
t.ConquestWarrior.gender_id ==
c.pokemon.conquest_evolution.warrior_gender_id)
# Finally, find ALL the max links for these warriors!
links_q = (c.pokemon.conquest_max_links
.join(ranks_sub)
.filter(ranks_sub.warrior_id.in_(worthy_warriors))
.options(
sqla.orm.joinedload('warrior_rank'),
sqla.orm.subqueryload('warrior_rank.stats'),
sqla.orm.joinedload('warrior_rank.warrior'),
sqla.orm.joinedload('warrior_rank.warrior.archetype'),
sqla.orm.subqueryload('warrior_rank.warrior.types'),
))
c.max_links = links_q.all()
return render('/pokedex/conquest/pokemon.mako')
def pokemon_list(self):
c.pokemon = (db.pokedex_session.query(t.PokemonSpecies)
.filter(t.PokemonSpecies.conquest_order != None)
.options(
sqla.orm.subqueryload('conquest_abilities'),
sqla.orm.joinedload('conquest_move'),
sqla.orm.subqueryload('conquest_stats'),
sqla.orm.subqueryload('default_pokemon.types')
)
.order_by(t.PokemonSpecies.conquest_order)
.all()
)
return render('/pokedex/conquest/pokemon_list.mako')
def skills(self, name):
try:
c.skill = (db.get_by_name_query(t.ConquestWarriorSkill, name)
.one())
except NoResultFound:
return self._not_found()
### Prev/next for header
c.prev_skill, c.next_skill = self._prev_next_name(
t.ConquestWarriorSkill, c.skill)
return render('/pokedex/conquest/skill.mako')
def skills_list(self):
skills = (db.pokedex_session.query(t.ConquestWarriorSkill)
.join(t.ConquestWarriorSkill.names_local)
.order_by(t.ConquestWarriorSkill.names_table.name.asc()))
# We want to split the list up between generic skills anyone can get
# and the unique skills a specific warlord gets at a specific rank.
# The two player characters throw a wrench in that though so we just
# assume any skill known only by warlords is unique, which happens to
# work.
warriors_and_ranks = sqla.orm.join(t.ConquestWarrior,
t.ConquestWarriorRank)
generic_clause = (sqla.sql.exists(warriors_and_ranks.select())
.where(sqla.and_(
t.ConquestWarrior.archetype_id != None,
t.ConquestWarriorRank.skill_id ==
t.ConquestWarriorSkill.id))
)
c.generic_skills = skills.filter(generic_clause).all()
c.unique_skills = (skills.filter(~generic_clause)
.options(
sqla.orm.joinedload('warrior_ranks'),
sqla.orm.joinedload('warrior_ranks.warrior')
)
.all())
# Decide randomly which player gets displayed
c.player_index = randint(0, 1)
return render('/pokedex/conquest/skill_list.mako')
def warriors(self, name):
try:
c.warrior = db.get_by_name_query(t.ConquestWarrior, name).one()
except NoResultFound:
return self._not_found()
c.prev_warrior, c.next_warrior = self._prev_next_id(
c.warrior, t.ConquestWarrior, 'id')
c.rank_count = len(c.warrior.ranks)
c.perfect_links = (c.warrior.ranks[-1].max_links
.filter_by(max_link=100)
.join(t.PokemonSpecies)
.order_by(t.PokemonSpecies.conquest_order)
.all())
### Stats
# Percentiles! Percentiles are hard.
stats = t.ConquestWarriorRankStatMap
all_stats = sqla.orm.aliased(t.ConquestWarriorRankStatMap)
# We need this to be a float so the percentile equation can divide by it
stat_count = sqla.cast(sqla.func.count(all_stats.base_stat),
sqla.types.FLOAT)
# Grab all of a rank's stats, and also get percentiles
stat_q = (db.pokedex_session.query(stats.warrior_stat_id, stats.base_stat)
.join(all_stats, stats.warrior_stat_id == all_stats.warrior_stat_id)
.group_by(stats.warrior_rank_id, stats.warrior_stat_id,
stats.base_stat)
.order_by(stats.warrior_stat_id)
.add_columns(
sqla.func.sum(sqla.cast(stats.base_stat > all_stats.base_stat,
sqla.types.INT)) / stat_count +
sqla.func.sum(sqla.cast(stats.base_stat == all_stats.base_stat,
sqla.types.INT)) / stat_count / 2
)
)
# XXX There's probably a better way to query all the names
stat_names = [stat.name for stat in
db.pokedex_session.query(t.ConquestWarriorStat)
.order_by(t.ConquestWarriorStat.id)
.all()]
# Go through the query for each rank
c.stats = []
for rank in c.warrior.ranks:
c.stats.append([])
info = stat_q.filter(stats.warrior_rank_id == rank.id).all()
# We need a bit more info than what the query directly provides
for stat, value, percentile in info:
percentile = float(percentile)
c.stats[-1].append((
stat_names[stat - 1], value, percentile,
bar_color(percentile, 0.9), bar_color(percentile, 0.8)
))
### Max links
default_link = 70 if c.warrior.archetype else 90
c.link_form = LinkThresholdForm(request.params, link=default_link)
if request.params and c.link_form.validate():
link_threshold = c.link_form.link.data
else:
link_threshold = default_link
link_pokemon = (
db.pokedex_session.query(t.ConquestMaxLink.pokemon_species_id)
.filter(t.ConquestMaxLink.warrior_rank_id ==
c.warrior.ranks[-1].id)
.filter(t.ConquestMaxLink.max_link >= link_threshold)
)
max_links = []
for rank in c.warrior.ranks:
max_links.append(rank.max_links
.filter(t.ConquestMaxLink.pokemon_species_id
.in_(link_pokemon))
.join(t.PokemonSpecies)
.order_by(t.PokemonSpecies.conquest_order)
.options(
sqla.orm.joinedload('pokemon'),
sqla.orm.subqueryload('pokemon.conquest_abilities'),
sqla.orm.subqueryload('pokemon.conquest_stats'),
)
.all())
c.max_links = izip(*max_links)
return render('/pokedex/conquest/warrior.mako')
def warriors_list(self):
c.warriors = (db.pokedex_session.query(t.ConquestWarrior)
.options(
sqla.orm.subqueryload('ranks'),
sqla.orm.subqueryload('ranks.stats'),
sqla.orm.subqueryload('types')
)
.order_by(t.ConquestWarrior.id)
.all()
)
return render('/pokedex/conquest/warrior_list.mako')
| 38.89701
| 82
| 0.592885
|
from __future__ import absolute_import, division
from collections import defaultdict
import colorsys
from itertools import izip
from random import randint
import pokedex.db
import pokedex.db.tables as t
from pylons import request, tmpl_context as c
from pylons.controllers.util import abort
import sqlalchemy as sqla
from sqlalchemy.orm.exc import NoResultFound
import wtforms
from spline.lib.base import render
import spline.lib.helpers as h
from splinext.pokedex import PokedexBaseController
import splinext.pokedex.db as db
def bar_color(hue, pastelness):
r, g, b = colorsys.hls_to_rgb(hue, pastelness, pastelness)
return "#%02x%02x%02x" % (r * 256, g * 256, b * 256)
class LinkThresholdForm(wtforms.Form):
link = wtforms.DecimalField(places=0, rounding='ROUND_UP')
class PokedexConquestController(PokedexBaseController):
def _not_found(self):
abort(404)
def _prev_next_id(self, thing, table, column_name):
column = getattr(table, column_name)
thing_id = getattr(thing, column_name)
max_id = (db.pokedex_session.query(table)
.filter(column != None)
.count())
prev_thing = db.pokedex_session.query(table).filter(
column == (thing_id - 1 - 1) % max_id + 1).one()
next_thing = db.pokedex_session.query(table).filter(
column == (thing_id - 1 + 1) % max_id + 1).one()
return prev_thing, next_thing
def _prev_next_name(self, table, current, filters=[]):
name_table = table.__mapper__.get_property('names').argument
query = (db.pokedex_session.query(table)
.join(name_table)
.filter(name_table.local_language == c.game_language)
)
for filter in filters:
query = query.filter(filter)
name_col = name_table.name
name_current = current.name_map[c.game_language]
lt = name_col < name_current
gt = name_col > name_current
asc = name_col.asc()
desc = name_col.desc()
prev = query.filter(lt).order_by(desc).first()
if prev is None:
prev = query.order_by(desc).first()
next = query.filter(gt).order_by(asc).first()
if next is None:
next = query.order_by(asc).first()
return prev, next
def abilities(self, name):
try:
c.ability = db.get_by_name_query(t.Ability, name).one()
except NoResultFound:
return self._not_found()
if not c.ability.conquest_pokemon:
return self._not_found()
c.prev_ability, c.next_ability = self._prev_next_name(
t.Ability, c.ability,
filters=[t.Ability.conquest_pokemon.any()])
return render('/pokedex/conquest/ability.mako')
def abilities_list(self):
c.abilities = (db.pokedex_session.query(t.Ability)
.join(t.Ability.names_local)
.filter(t.Ability.conquest_pokemon.any())
.order_by(t.Ability.names_table.name.asc())
.all()
)
return render('/pokedex/conquest/ability_list.mako')
def kingdoms(self, name):
try:
c.kingdom = db.get_by_name_query(t.ConquestKingdom, name).one()
except NoResultFound:
return self._not_found()
c.prev_kingdom, c.next_kingdom = self._prev_next_id(
c.kingdom, t.ConquestKingdom, 'id')
return render('/pokedex/conquest/kingdom.mako')
def kingdoms_list(self):
c.kingdoms = (db.pokedex_session.query(t.ConquestKingdom)
.options(
sqla.orm.joinedload('type')
)
.order_by(t.ConquestKingdom.id)
.all()
)
return render('/pokedex/conquest/kingdom_list.mako')
def moves(self, name):
try:
c.move = (db.get_by_name_query(t.Move, name)
.options(
sqla.orm.joinedload('conquest_data'),
sqla.orm.joinedload('conquest_pokemon'),
sqla.orm.subqueryload('conquest_pokemon.conquest_abilities'),
sqla.orm.subqueryload('conquest_pokemon.conquest_stats'),
)
.one())
except NoResultFound:
return self._not_found()
if not c.move.conquest_pokemon:
return self._not_found()
v_next_name(t.Move, c.move,
filters=[t.Move.conquest_pokemon.any()])
return render('/pokedex/conquest/move.mako')
def moves_list(self):
c.moves = (db.pokedex_session.query(t.Move)
.filter(t.Move.conquest_data.has())
.options(
sqla.orm.joinedload('conquest_data'),
sqla.orm.joinedload('conquest_data.move_displacement'),
)
.join(t.Move.names_local)
.order_by(t.Move.names_table.name.asc())
.all()
)
return render('/pokedex/conquest/move_list.mako')
def pokemon(self, name=None):
try:
c.pokemon = db.pokemon_query(name, None).one()
except NoResultFound:
return self._not_found()
c.semiform_pokemon = c.pokemon
c.pokemon = c.pokemon.species
if c.pokemon.conquest_order is None:
return self._not_found()
c.pokemon, t.PokemonSpecies, 'conquest_order')
faultdict(lambda: 100)
for target_type in c.semiform_pokemon.types:
for type_efficacy in target_type.target_efficacies:
c.type_efficacies[type_efficacy.damage_type] *= \
type_efficacy.damage_factor
c.type_efficacies[type_efficacy.damage_type] //= 100
# total of seven descendents, so it would need to span 7 rows.
c.evolution_table = []
# Prefetch the evolution details
family = (db.pokedex_session.query(t.PokemonSpecies)
.filter(t.PokemonSpecies.evolution_chain_id ==
c.pokemon.evolution_chain_id)
.options(
sqla.orm.subqueryload('conquest_evolution'),
sqla.orm.joinedload('conquest_evolution.stat'),
sqla.orm.joinedload('conquest_evolution.kingdom'),
sqla.orm.joinedload('conquest_evolution.gender'),
sqla.orm.joinedload('conquest_evolution.item'),
)
.all())
# Strategy: build this table going backwards.
# Find a leaf, build the path going back up to its root. Remember all
# of the nodes seen along the way. Find another leaf not seen so far.
# Build its path backwards, sticking it to a seen node if one exists.
# Repeat until there are no unseen nodes.
seen_nodes = {}
while True:
# First, find some unseen nodes
unseen_leaves = []
for species in family:
if species in seen_nodes:
continue
children = []
# A Pokémon is a leaf if it has no evolutionary children, so...
for possible_child in family:
if possible_child in seen_nodes:
continue
if possible_child.parent_species == species:
children.append(possible_child)
if len(children) == 0:
unseen_leaves.append(species)
# If there are none, we're done! Bail.
# build a path to it.
if len(unseen_leaves) == 0:
break
unseen_leaves.sort(key=lambda x: x.id)
leaf = unseen_leaves[0]
# root, parent_n, ... parent2, parent1, leaf
current_path = []
# Finally, go back up the tree to the root
current_species = leaf
while current_species:
# The loop bails just after current_species is no longer the
# root, so this will give us the root after the loop ends;
# we need to know if it's a baby to see whether to indent the
root_pokemon = current_species
if current_species in seen_nodes:
current_node = seen_nodes[current_species]
# have a rowspan
current_path.insert(0, None)
else:
current_node = {
'species': current_species,
'span': 0,
}
current_path.insert(0, current_node)
seen_nodes[current_species] = current_node
# This node has one more row to span: our current leaf
current_node['span'] += 1
current_species = current_species.parent_species
# We want every path to have four nodes: baby, basic, stage 1 and 2.
# Every root node is basic, unless it's defined as being a baby.
if not root_pokemon.is_baby:
current_path.insert(0, '')
while len(current_path) < 4:
current_path.append('')
c.evolution_table.append(current_path)
# in the total, so we have to do things a bit differently.
# XXX actually do things differently instead of just fudging the same
# thing to work
c.stats = {} # stat => { border, background, percentile }
stat_total = 0
total_stat_rows = db.pokedex_session.query(t.ConquestPokemonStat) \
.filter_by(stat=c.pokemon.conquest_stats[0].stat) \
.count()
for pokemon_stat in c.pokemon.conquest_stats:
stat_info = c.stats[pokemon_stat.stat.identifier] = {}
stat_info['value'] = pokemon_stat.base_stat
if pokemon_stat.stat.is_base:
stat_total += pokemon_stat.base_stat
q = db.pokedex_session.query(t.ConquestPokemonStat) \
.filter_by(stat=pokemon_stat.stat)
less = q.filter(t.ConquestPokemonStat.base_stat <
pokemon_stat.base_stat).count()
equal = q.filter(t.ConquestPokemonStat.base_stat ==
pokemon_stat.base_stat).count()
percentile = (less + equal * 0.5) / total_stat_rows
stat_info['percentile'] = percentile
# Colors for the stat bars, based on percentile
stat_info['background'] = bar_color(percentile, 0.9)
stat_info['border'] = bar_color(percentile, 0.8)
# Percentile for the total
# Need to make a derived table that fakes pokemon_id, total_stats
stat_sum_tbl = db.pokedex_session.query(
sqla.sql.func.sum(t.ConquestPokemonStat.base_stat)
.label('stat_total')
) \
.filter(t.ConquestPokemonStat.conquest_stat_id <= 4) \
.group_by(t.ConquestPokemonStat.pokemon_species_id) \
.subquery()
q = db.pokedex_session.query(stat_sum_tbl)
less = q.filter(stat_sum_tbl.c.stat_total < stat_total).count()
equal = q.filter(stat_sum_tbl.c.stat_total == stat_total).count()
percentile = (less + equal * 0.5) / total_stat_rows
c.stats['total'] = {
'percentile': percentile,
'value': stat_total,
'background': bar_color(percentile, 0.9),
'border': bar_color(percentile, 0.8),
}
### Max links
# We only want to show warriors who have a max link above a certain
# threshold, because there are 200 warriors and most of them won't
default_link = 70
c.link_form = LinkThresholdForm(request.params, link=default_link)
if request.params and c.link_form.validate():
link_threshold = c.link_form.link.data
else:
link_threshold = default_link
# No link ever goes down when a warrior ranks up, so we just need to
# check their final rank.
# First, craft a clause to filter out non-final warrior ranks.
ranks_sub = sqla.orm.aliased(t.ConquestWarriorRank)
higher_ranks_exist = (sqla.sql.exists([1])
.where(sqla.and_(
ranks_sub.warrior_id == t.ConquestWarriorRank.warrior_id,
ranks_sub.rank > t.ConquestWarriorRank.rank))
)
# Next, find final-rank warriors with a max link high enough.
worthy_warriors = (db.pokedex_session.query(t.ConquestWarrior.id)
.join(t.ConquestWarriorRank)
.filter(~higher_ranks_exist)
.join(t.ConquestMaxLink)
.filter(t.ConquestMaxLink.pokemon_species_id == c.pokemon.id)
.filter(t.ConquestMaxLink.max_link >= link_threshold))
# For Froslass and Gallade, we want to filter out male and female
# warriors, respectively.
# XXX Eventually we want to figure out all impossible evolutions, and
# show them, but sort them to the bottom and grey them out.
if (c.pokemon.conquest_evolution is not None and
c.pokemon.conquest_evolution.warrior_gender_id is not None):
worthy_warriors = worthy_warriors.filter(
t.ConquestWarrior.gender_id ==
c.pokemon.conquest_evolution.warrior_gender_id)
# Finally, find ALL the max links for these warriors!
links_q = (c.pokemon.conquest_max_links
.join(ranks_sub)
.filter(ranks_sub.warrior_id.in_(worthy_warriors))
.options(
sqla.orm.joinedload('warrior_rank'),
sqla.orm.subqueryload('warrior_rank.stats'),
sqla.orm.joinedload('warrior_rank.warrior'),
sqla.orm.joinedload('warrior_rank.warrior.archetype'),
sqla.orm.subqueryload('warrior_rank.warrior.types'),
))
c.max_links = links_q.all()
return render('/pokedex/conquest/pokemon.mako')
def pokemon_list(self):
c.pokemon = (db.pokedex_session.query(t.PokemonSpecies)
.filter(t.PokemonSpecies.conquest_order != None)
.options(
sqla.orm.subqueryload('conquest_abilities'),
sqla.orm.joinedload('conquest_move'),
sqla.orm.subqueryload('conquest_stats'),
sqla.orm.subqueryload('default_pokemon.types')
)
.order_by(t.PokemonSpecies.conquest_order)
.all()
)
return render('/pokedex/conquest/pokemon_list.mako')
def skills(self, name):
try:
c.skill = (db.get_by_name_query(t.ConquestWarriorSkill, name)
.one())
except NoResultFound:
return self._not_found()
### Prev/next for header
c.prev_skill, c.next_skill = self._prev_next_name(
t.ConquestWarriorSkill, c.skill)
return render('/pokedex/conquest/skill.mako')
def skills_list(self):
skills = (db.pokedex_session.query(t.ConquestWarriorSkill)
.join(t.ConquestWarriorSkill.names_local)
.order_by(t.ConquestWarriorSkill.names_table.name.asc()))
# We want to split the list up between generic skills anyone can get
# and the unique skills a specific warlord gets at a specific rank.
# The two player characters throw a wrench in that though so we just
# assume any skill known only by warlords is unique, which happens to
# work.
warriors_and_ranks = sqla.orm.join(t.ConquestWarrior,
t.ConquestWarriorRank)
generic_clause = (sqla.sql.exists(warriors_and_ranks.select())
.where(sqla.and_(
t.ConquestWarrior.archetype_id != None,
t.ConquestWarriorRank.skill_id ==
t.ConquestWarriorSkill.id))
)
c.generic_skills = skills.filter(generic_clause).all()
c.unique_skills = (skills.filter(~generic_clause)
.options(
sqla.orm.joinedload('warrior_ranks'),
sqla.orm.joinedload('warrior_ranks.warrior')
)
.all())
# Decide randomly which player gets displayed
c.player_index = randint(0, 1)
return render('/pokedex/conquest/skill_list.mako')
def warriors(self, name):
try:
c.warrior = db.get_by_name_query(t.ConquestWarrior, name).one()
except NoResultFound:
return self._not_found()
c.prev_warrior, c.next_warrior = self._prev_next_id(
c.warrior, t.ConquestWarrior, 'id')
c.rank_count = len(c.warrior.ranks)
c.perfect_links = (c.warrior.ranks[-1].max_links
.filter_by(max_link=100)
.join(t.PokemonSpecies)
.order_by(t.PokemonSpecies.conquest_order)
.all())
### Stats
# Percentiles! Percentiles are hard.
stats = t.ConquestWarriorRankStatMap
all_stats = sqla.orm.aliased(t.ConquestWarriorRankStatMap)
# We need this to be a float so the percentile equation can divide by it
stat_count = sqla.cast(sqla.func.count(all_stats.base_stat),
sqla.types.FLOAT)
# Grab all of a rank's stats, and also get percentiles
stat_q = (db.pokedex_session.query(stats.warrior_stat_id, stats.base_stat)
.join(all_stats, stats.warrior_stat_id == all_stats.warrior_stat_id)
.group_by(stats.warrior_rank_id, stats.warrior_stat_id,
stats.base_stat)
.order_by(stats.warrior_stat_id)
.add_columns(
sqla.func.sum(sqla.cast(stats.base_stat > all_stats.base_stat,
sqla.types.INT)) / stat_count +
sqla.func.sum(sqla.cast(stats.base_stat == all_stats.base_stat,
sqla.types.INT)) / stat_count / 2
)
)
stat_names = [stat.name for stat in
db.pokedex_session.query(t.ConquestWarriorStat)
.order_by(t.ConquestWarriorStat.id)
.all()]
# Go through the query for each rank
c.stats = []
for rank in c.warrior.ranks:
c.stats.append([])
info = stat_q.filter(stats.warrior_rank_id == rank.id).all()
# We need a bit more info than what the query directly provides
for stat, value, percentile in info:
percentile = float(percentile)
c.stats[-1].append((
stat_names[stat - 1], value, percentile,
bar_color(percentile, 0.9), bar_color(percentile, 0.8)
))
### Max links
default_link = 70 if c.warrior.archetype else 90
c.link_form = LinkThresholdForm(request.params, link=default_link)
if request.params and c.link_form.validate():
link_threshold = c.link_form.link.data
else:
link_threshold = default_link
link_pokemon = (
db.pokedex_session.query(t.ConquestMaxLink.pokemon_species_id)
.filter(t.ConquestMaxLink.warrior_rank_id ==
c.warrior.ranks[-1].id)
.filter(t.ConquestMaxLink.max_link >= link_threshold)
)
max_links = []
for rank in c.warrior.ranks:
max_links.append(rank.max_links
.filter(t.ConquestMaxLink.pokemon_species_id
.in_(link_pokemon))
.join(t.PokemonSpecies)
.order_by(t.PokemonSpecies.conquest_order)
.options(
sqla.orm.joinedload('pokemon'),
sqla.orm.subqueryload('pokemon.conquest_abilities'),
sqla.orm.subqueryload('pokemon.conquest_stats'),
)
.all())
c.max_links = izip(*max_links)
return render('/pokedex/conquest/warrior.mako')
def warriors_list(self):
c.warriors = (db.pokedex_session.query(t.ConquestWarrior)
.options(
sqla.orm.subqueryload('ranks'),
sqla.orm.subqueryload('ranks.stats'),
sqla.orm.subqueryload('types')
)
.order_by(t.ConquestWarrior.id)
.all()
)
return render('/pokedex/conquest/warrior_list.mako')
| true
| true
|
1c4203b03f80648133ec496159fe09432bce6089
| 227
|
py
|
Python
|
Files/soundcloud.py
|
Li-Evelyn/vanilla-cake-bot
|
a166c897b15d21b82b3b9c838c017bd1679a2063
|
[
"MIT"
] | null | null | null |
Files/soundcloud.py
|
Li-Evelyn/vanilla-cake-bot
|
a166c897b15d21b82b3b9c838c017bd1679a2063
|
[
"MIT"
] | null | null | null |
Files/soundcloud.py
|
Li-Evelyn/vanilla-cake-bot
|
a166c897b15d21b82b3b9c838c017bd1679a2063
|
[
"MIT"
] | null | null | null |
import soundcloud
# TODO: add soundcloud link processing once https://soundcloud.com/you/apps/new opens up :(
client = soundcloud.Client(client_id="")
track_url = ""
track = client.get('/resolve', url=track_url)
print(track)
| 25.222222
| 91
| 0.744493
|
import soundcloud
client = soundcloud.Client(client_id="")
track_url = ""
track = client.get('/resolve', url=track_url)
print(track)
| true
| true
|
1c420479abefc7128f12f426e45827f1228ff734
| 17,260
|
py
|
Python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_virtual_machine_images_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2021-03-24T06:26:11.000Z
|
2021-04-18T15:55:59.000Z
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_virtual_machine_images_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 4
|
2019-04-17T17:57:49.000Z
|
2020-04-24T21:11:22.000Z
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_virtual_machine_images_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineImagesOperations:
"""VirtualMachineImagesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2017_03_30.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
location: str,
publisher_name: str,
offer: str,
skus: str,
version: str,
**kwargs
) -> "_models.VirtualMachineImage":
"""Gets a virtual machine image.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name: A valid image publisher.
:type publisher_name: str
:param offer: A valid image publisher offer.
:type offer: str
:param skus: A valid image SKU.
:type skus: str
:param version: A valid image SKU version.
:type version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineImage, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2017_03_30.models.VirtualMachineImage
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineImage"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'offer': self._serialize.url("offer", offer, 'str'),
'skus': self._serialize.url("skus", skus, 'str'),
'version': self._serialize.url("version", version, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineImage', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions/{version}'} # type: ignore
async def list(
self,
location: str,
publisher_name: str,
offer: str,
skus: str,
expand: Optional[str] = None,
top: Optional[int] = None,
orderby: Optional[str] = None,
**kwargs
) -> List["_models.VirtualMachineImageResource"]:
"""Gets a list of all virtual machine image versions for the specified location, publisher, offer,
and SKU.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name: A valid image publisher.
:type publisher_name: str
:param offer: A valid image publisher offer.
:type offer: str
:param skus: A valid image SKU.
:type skus: str
:param expand: The expand expression to apply on the operation.
:type expand: str
:param top:
:type top: int
:param orderby:
:type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VirtualMachineImageResource, or the result of cls(response)
:rtype: list[~azure.mgmt.compute.v2017_03_30.models.VirtualMachineImageResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VirtualMachineImageResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'offer': self._serialize.url("offer", offer, 'str'),
'skus': self._serialize.url("skus", skus, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions'} # type: ignore
async def list_offers(
self,
location: str,
publisher_name: str,
**kwargs
) -> List["_models.VirtualMachineImageResource"]:
"""Gets a list of virtual machine image offers for the specified location and publisher.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name: A valid image publisher.
:type publisher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VirtualMachineImageResource, or the result of cls(response)
:rtype: list[~azure.mgmt.compute.v2017_03_30.models.VirtualMachineImageResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VirtualMachineImageResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.list_offers.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_offers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers'} # type: ignore
async def list_publishers(
self,
location: str,
**kwargs
) -> List["_models.VirtualMachineImageResource"]:
"""Gets a list of virtual machine image publishers for the specified Azure location.
:param location: The name of a supported Azure region.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VirtualMachineImageResource, or the result of cls(response)
:rtype: list[~azure.mgmt.compute.v2017_03_30.models.VirtualMachineImageResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VirtualMachineImageResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.list_publishers.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_publishers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers'} # type: ignore
async def list_skus(
self,
location: str,
publisher_name: str,
offer: str,
**kwargs
) -> List["_models.VirtualMachineImageResource"]:
"""Gets a list of virtual machine image SKUs for the specified location, publisher, and offer.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name: A valid image publisher.
:type publisher_name: str
:param offer: A valid image publisher offer.
:type offer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VirtualMachineImageResource, or the result of cls(response)
:rtype: list[~azure.mgmt.compute.v2017_03_30.models.VirtualMachineImageResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VirtualMachineImageResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.list_skus.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'offer': self._serialize.url("offer", offer, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus'} # type: ignore
| 46.902174
| 221
| 0.664484
|
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineImagesOperations:
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
location: str,
publisher_name: str,
offer: str,
skus: str,
version: str,
**kwargs
) -> "_models.VirtualMachineImage":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'offer': self._serialize.url("offer", offer, 'str'),
'skus': self._serialize.url("skus", skus, 'str'),
'version': self._serialize.url("version", version, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineImage', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions/{version}'}
async def list(
self,
location: str,
publisher_name: str,
offer: str,
skus: str,
expand: Optional[str] = None,
top: Optional[int] = None,
orderby: Optional[str] = None,
**kwargs
) -> List["_models.VirtualMachineImageResource"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
url = self.list.metadata['url']
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'offer': self._serialize.url("offer", offer, 'str'),
'skus': self._serialize.url("skus", skus, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions'}
async def list_offers(
self,
location: str,
publisher_name: str,
**kwargs
) -> List["_models.VirtualMachineImageResource"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
url = self.list_offers.metadata['url']
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_offers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers'}
async def list_publishers(
self,
location: str,
**kwargs
) -> List["_models.VirtualMachineImageResource"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
url = self.list_publishers.metadata['url']
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_publishers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers'}
async def list_skus(
self,
location: str,
publisher_name: str,
offer: str,
**kwargs
) -> List["_models.VirtualMachineImageResource"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
url = self.list_skus.metadata['url']
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'offer': self._serialize.url("offer", offer, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus'}
| true
| true
|
1c4204b8cfaf8042c511acc4a89440dfa012d28b
| 480
|
py
|
Python
|
setup.py
|
SystematIC-Design/cocotbext-ral
|
c3dbd35727b1474700a5970c13d6325ea155e0c6
|
[
"MIT"
] | null | null | null |
setup.py
|
SystematIC-Design/cocotbext-ral
|
c3dbd35727b1474700a5970c13d6325ea155e0c6
|
[
"MIT"
] | null | null | null |
setup.py
|
SystematIC-Design/cocotbext-ral
|
c3dbd35727b1474700a5970c13d6325ea155e0c6
|
[
"MIT"
] | null | null | null |
# Minimal setup.py. Extend as needed.
from setuptools import setup, find_namespace_packages
setup(name = 'cocotbext-ral',
version = '0.1',
packages = find_namespace_packages(include=['cocotbext.*']),
install_requires = ['cocotb'],
python_requires = '>=3.5',
classifiers = [
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)"])
| 36.923077
| 81
| 0.65
|
from setuptools import setup, find_namespace_packages
setup(name = 'cocotbext-ral',
version = '0.1',
packages = find_namespace_packages(include=['cocotbext.*']),
install_requires = ['cocotb'],
python_requires = '>=3.5',
classifiers = [
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)"])
| true
| true
|
1c42067f56138dc2c2ea75e25e3849b0dfaa31fc
| 3,161
|
py
|
Python
|
test/lint/check-doc.py
|
PenTesting/bitcoin-abc
|
28fa7e2db7d986e63d70e41b8f1d29d5fdf724d2
|
[
"MIT"
] | 1
|
2019-06-11T13:40:52.000Z
|
2019-06-11T13:40:52.000Z
|
test/lint/check-doc.py
|
PenTesting/bitcoin-abc
|
28fa7e2db7d986e63d70e41b8f1d29d5fdf724d2
|
[
"MIT"
] | null | null | null |
test/lint/check-doc.py
|
PenTesting/bitcoin-abc
|
28fa7e2db7d986e63d70e41b8f1d29d5fdf724d2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Copyright (c) 2019 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
This checks if all command line args are documented.
Return value is 0 to indicate no error.
Author: @MarcoFalke
'''
from subprocess import check_output
from pprint import PrettyPrinter
import glob
import re
TOP_LEVEL = 'git rev-parse --show-toplevel'
FOLDER_SRC = '/src/**/'
FOLDER_TEST = '/src/**/test/'
EXTENSIONS = ["*.c", "*.h", "*.cpp", "*.cc", "*.hpp"]
REGEX_ARG = '(?:ForceSet|SoftSet|Get|Is)(?:Bool)?Args?(?:Set)?\(\s*"(-[^"]+)"'
REGEX_DOC = 'AddArg\(\s*"(-[^"=]+?)(?:=|")'
# list unsupported, deprecated and duplicate args as they need no documentation
SET_DOC_OPTIONAL = set(['-benchmark',
'-blockminsize',
'-dbcrashratio',
'-debugnet',
'-forcecompactdb',
# TODO remove after the Nov 2019 upgrade
'-gravitonactivationtime',
# TODO remove after the may 2019 fork
'-greatwallactivationtime',
'-h',
'-help',
'-parkdeepreorg',
'-replayprotectionactivationtime',
'-rpcssl',
'-socks',
'-tor',
'-usehd',
'-whitelistalwaysrelay'])
# list false positive unknows arguments
SET_FALSE_POSITIVE_UNKNOWNS = set(['-zmqpubhashblock',
'-zmqpubhashtx',
'-zmqpubrawblock',
'-zmqpubrawtx'])
def main():
top_level = check_output(TOP_LEVEL, shell=True).decode().strip()
source_files = []
test_files = []
for extension in EXTENSIONS:
source_files += glob.glob(top_level +
FOLDER_SRC + extension, recursive=True)
test_files += glob.glob(top_level + FOLDER_TEST +
extension, recursive=True)
files = set(source_files) - set(test_files)
args_used = set()
args_docd = set()
for file in files:
with open(file, 'r') as f:
content = f.read()
args_used |= set(re.findall(re.compile(REGEX_ARG), content))
args_docd |= set(re.findall(re.compile(REGEX_DOC), content))
args_used |= SET_FALSE_POSITIVE_UNKNOWNS
args_need_doc = args_used - args_docd - SET_DOC_OPTIONAL
args_unknown = args_docd - args_used
pp = PrettyPrinter()
print("Args used : {}".format(len(args_used)))
print("Args documented : {}".format(len(args_docd)))
print("Args undocumented: {} ({} don't need documentation)".format(
len(args_need_doc), len(SET_DOC_OPTIONAL)))
pp.pprint(args_need_doc)
print("Args unknown : {}".format(len(args_unknown)))
pp.pprint(args_unknown)
if __name__ == "__main__":
main()
| 34.736264
| 79
| 0.554255
|
from subprocess import check_output
from pprint import PrettyPrinter
import glob
import re
TOP_LEVEL = 'git rev-parse --show-toplevel'
FOLDER_SRC = '/src/**/'
FOLDER_TEST = '/src/**/test/'
EXTENSIONS = ["*.c", "*.h", "*.cpp", "*.cc", "*.hpp"]
REGEX_ARG = '(?:ForceSet|SoftSet|Get|Is)(?:Bool)?Args?(?:Set)?\(\s*"(-[^"]+)"'
REGEX_DOC = 'AddArg\(\s*"(-[^"=]+?)(?:=|")'
SET_DOC_OPTIONAL = set(['-benchmark',
'-blockminsize',
'-dbcrashratio',
'-debugnet',
'-forcecompactdb',
'-gravitonactivationtime',
'-greatwallactivationtime',
'-h',
'-help',
'-parkdeepreorg',
'-replayprotectionactivationtime',
'-rpcssl',
'-socks',
'-tor',
'-usehd',
'-whitelistalwaysrelay'])
SET_FALSE_POSITIVE_UNKNOWNS = set(['-zmqpubhashblock',
'-zmqpubhashtx',
'-zmqpubrawblock',
'-zmqpubrawtx'])
def main():
top_level = check_output(TOP_LEVEL, shell=True).decode().strip()
source_files = []
test_files = []
for extension in EXTENSIONS:
source_files += glob.glob(top_level +
FOLDER_SRC + extension, recursive=True)
test_files += glob.glob(top_level + FOLDER_TEST +
extension, recursive=True)
files = set(source_files) - set(test_files)
args_used = set()
args_docd = set()
for file in files:
with open(file, 'r') as f:
content = f.read()
args_used |= set(re.findall(re.compile(REGEX_ARG), content))
args_docd |= set(re.findall(re.compile(REGEX_DOC), content))
args_used |= SET_FALSE_POSITIVE_UNKNOWNS
args_need_doc = args_used - args_docd - SET_DOC_OPTIONAL
args_unknown = args_docd - args_used
pp = PrettyPrinter()
print("Args used : {}".format(len(args_used)))
print("Args documented : {}".format(len(args_docd)))
print("Args undocumented: {} ({} don't need documentation)".format(
len(args_need_doc), len(SET_DOC_OPTIONAL)))
pp.pprint(args_need_doc)
print("Args unknown : {}".format(len(args_unknown)))
pp.pprint(args_unknown)
if __name__ == "__main__":
main()
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.