id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
3261527 | from typing import Union
def submit_event(key: str, value: Union[int, float, bool]):
"""
Register another event by a key and with a numeric value.
:param key: Identifier of the event.
:param value: Value of the event.
:return: None
"""
pass
| StarcoderdataPython |
3352411 | from django.conf.urls import url, include
from tastypie import fields
from tastypie.api import NamespacedApi
try:
from pieguard.authorization import GuardianAuthorization as AuthorizationClass
except ImportError:
from tastypie.authorization import DjangoAuthorization as AuthorizationClass
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from tastypie.resources import NamespacedModelResource
from djangoautoconf.model_utils.model_attr_utils import model_enumerator, enum_model_fields
from djangoautoconf.req_with_auth import DjangoUserAuthentication
from ufs_tools.string_tools import class_name_to_low_case
def create_tastypie_resource_class(class_inst, resource_name=None):
if resource_name is None:
resource_name = class_name_to_low_case(class_inst.__name__)
meta_attributes = {"queryset": class_inst.objects.all(), "resource_name": resource_name,
"authentication": DjangoUserAuthentication(), "authorization": AuthorizationClass(),
"filtering": {}, "always_return_data": True}
additional_resource_fields = {}
for field in enum_model_fields(class_inst):
if field.is_relation:
if field.related_model is class_inst:
additional_resource_fields[field.name] = fields.ForeignKey('self', field.name, null=True, blank=True)
else:
# Do not add filtering if it is foreign key, because we can not find the foreign key's resource
continue
meta_attributes["filtering"].update({field.name: ALL_WITH_RELATIONS})
# The NamespacedModelResource used with NamespacedApi will ensure the namespace is added when calling reverse to
# get the resource uri
resource_attributes = {"Meta": type("Meta", (), meta_attributes)}
resource_attributes.update(additional_resource_fields)
resource_class = type(class_inst.__name__ + "AutoResource", (NamespacedModelResource,), resource_attributes)
return resource_class
def create_tastypie_resource(class_inst):
"""
Usage: url(r'^api/', include(create_tastypie_resource(UfsObjFileMapping).urls)),
Access url: api/ufs_obj_file_mapping/?format=json
:param class_inst:
:return:
"""
return create_tastypie_resource_class(class_inst)()
def add_tastypie_for(urlpatterns, models, excluded_model_name=('MPTTModel',)):
res_patterns = get_tastypie_urls(models, excluded_model_name)
urlpatterns += res_patterns
def get_tastypie_urls(models, excluded_model_name=('MPTTModel',)):
app_name = models.__name__.split(".")[0]
# The urlconf_namespace and the above NamespacedModelResource will ensure the name space is added when
# calling reverse to get the resource uri
v1_api = NamespacedApi(api_name='v1', urlconf_namespace=app_name)
url_list = []
for model in model_enumerator(models, excluded_model_name):
if hasattr(model, "objects"):
add_model_resource(model, v1_api)
url_list.append(url(r'api/doc/',
include('tastypie_swagger.urls'),
kwargs={
"tastypie_api_module": v1_api,
"namespace": app_name,
"version": "1.0"}
),
)
url_list.append(url(r'^api/',
include(v1_api.urls)))
url_list.append(url(r'^api_domain_needed_signature/',
None))
p = url_list
return p
def add_model_resource(model, v1_api):
resource = create_tastypie_resource(model)
v1_api.register(resource)
| StarcoderdataPython |
3289003 | import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
import lldbsuite.test.lldbutil as lldbutil
import unittest2
class TestSwiftHealthCheck(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
@swiftTest
@skipIfDarwinEmbedded
def test(self):
"""Test that an underspecified triple is upgraded with a version number.
"""
self.build()
target, process, thread, bkpt = lldbutil.run_to_name_breakpoint(
self, 'main')
self.expect("p 1")
result = lldb.SBCommandReturnObject()
ret_val = self.dbg.GetCommandInterpreter().HandleCommand("swift-healthcheck", result)
log = result.GetOutput()[:-1].split(" ")[-1]
self.assertEquals(log[-4:], ".log")
import io, re
logfile = io.open(log, "r", encoding='utf-8')
good = 0
bad = 0
for line in logfile:
if re.search('swift-healthcheck', line):
good += 1
continue
if re.search('Unsupported mixing"', line):
bad += 1
break
self.assertGreater(good, 1)
self.assertEquals(bad, 0)
| StarcoderdataPython |
1691318 | # coding: utf-8
import pprint
import six
from enum import Enum
class AbstractApplicationUserUpdate:
swagger_types = {
'name': 'str',
'request_limit': 'int',
'state': 'CreationEntityState',
}
attribute_map = {
'name': 'name','request_limit': 'requestLimit','state': 'state',
}
_name = None
_request_limit = None
_state = None
def __init__(self, **kwargs):
self.discriminator = None
self.name = kwargs.get('name', None)
self.request_limit = kwargs.get('request_limit', None)
self.state = kwargs.get('state', None)
@property
def name(self):
"""Gets the name of this AbstractApplicationUserUpdate.
The user name is used to identify the application user in administrative interfaces.
:return: The name of this AbstractApplicationUserUpdate.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this AbstractApplicationUserUpdate.
The user name is used to identify the application user in administrative interfaces.
:param name: The name of this AbstractApplicationUserUpdate.
:type: str
"""
if name is not None and len(name) > 256:
raise ValueError("Invalid value for `name`, length must be less than or equal to `256`")
self._name = name
@property
def request_limit(self):
"""Gets the request_limit of this AbstractApplicationUserUpdate.
The request limit defines the maximum number of API request accepted within 2 minutes. This limit can only be changed with special privileges.
:return: The request_limit of this AbstractApplicationUserUpdate.
:rtype: int
"""
return self._request_limit
@request_limit.setter
def request_limit(self, request_limit):
"""Sets the request_limit of this AbstractApplicationUserUpdate.
The request limit defines the maximum number of API request accepted within 2 minutes. This limit can only be changed with special privileges.
:param request_limit: The request_limit of this AbstractApplicationUserUpdate.
:type: int
"""
self._request_limit = request_limit
@property
def state(self):
"""Gets the state of this AbstractApplicationUserUpdate.
:return: The state of this AbstractApplicationUserUpdate.
:rtype: CreationEntityState
"""
return self._state
@state.setter
def state(self, state):
"""Sets the state of this AbstractApplicationUserUpdate.
:param state: The state of this AbstractApplicationUserUpdate.
:type: CreationEntityState
"""
self._state = state
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
elif isinstance(value, Enum):
result[attr] = value.value
else:
result[attr] = value
if issubclass(AbstractApplicationUserUpdate, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, AbstractApplicationUserUpdate):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| StarcoderdataPython |
113863 | from __future__ import annotations
import abc
from typing import Any, Iterable
import jsonpath_ng
from storage.var import BaseVar
class JsonPath(BaseVar[Any, Any]):
json_path: Any
def __init__(self, json_path):
self.json_path = json_path
@classmethod
def from_str(cls, json_path: str):
return cls(jsonpath_ng.parse(json_path))
def find(self, item):
return [match.value for match in self.json_path.find(item)]
@abc.abstractmethod
def __call__(self, item: Any) -> Any:
raise NotImplementedError()
@staticmethod
def array(json_path: str) -> JsonPath:
return ArrayJsonPath.from_str(json_path)
@staticmethod
def single(json_path: str) -> JsonPath:
return SingleJsonPath.from_str(json_path)
class ArrayJsonPath(JsonPath):
def __call__(self, item: Any) -> Iterable[Any]:
return self.find(item)
class SingleJsonPath(JsonPath):
def __call__(self, item: Any) -> Any:
for item in self.find(item):
return item
return None
| StarcoderdataPython |
3382824 | <reponame>mindspore-ai/models
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""get predictions for squad"""
import math
import collections
def get_prelim_predictions(features, unique_id_to_result, n_best_size, max_answer_length):
"""get prelim predictions"""
_PrelimPrediction = collections.namedtuple(
"PrelimPrediction",
["feature_index", "start_index", "end_index", "start_logit", "end_logit"])
prelim_predictions = []
# keep track of the minimum score of null start+end of position 0
for (feature_index, feature) in enumerate(features):
if feature.unique_id not in unique_id_to_result:
continue
result = unique_id_to_result[feature.unique_id]
start_indexes = _get_best_indexes(result.start_logits, n_best_size)
end_indexes = _get_best_indexes(result.end_logits, n_best_size)
# if we could have irrelevant answers, get the min score of irrelevant
for start_index in start_indexes:
for end_index in end_indexes:
# We could hypothetically create invalid predictions, e.g., predict
# that the start of the span is in the question. We throw out all
# invalid predictions.
if start_index >= len(feature.tokens):
continue
if end_index >= len(feature.tokens):
continue
if start_index not in feature.token_to_orig_map:
continue
if end_index not in feature.token_to_orig_map:
continue
if not feature.token_is_max_context.get(start_index, False):
continue
if end_index < start_index:
continue
length = end_index - start_index + 1
if length > max_answer_length:
continue
prelim_predictions.append(
_PrelimPrediction(
feature_index=feature_index,
start_index=start_index,
end_index=end_index,
start_logit=result.start_logits[start_index],
end_logit=result.end_logits[end_index]))
prelim_predictions = sorted(
prelim_predictions,
key=lambda x: (x.start_logit + x.end_logit),
reverse=True)
return prelim_predictions
def get_nbest(args, prelim_predictions, features, example, n_best_size, do_lower_case):
"""get nbest predictions"""
_NbestPrediction = collections.namedtuple(
"NbestPrediction", ["text", "start_logit", "end_logit"])
seen_predictions = {}
nbest = []
for pred in prelim_predictions:
if len(nbest) >= n_best_size:
break
feature = features[pred.feature_index]
if pred.start_index > 0: # this is a non-null prediction
tok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)]
final_text = args.tokenizer.convert_tokens_to_string(tok_tokens).strip()
if final_text in seen_predictions:
continue
seen_predictions[final_text] = True
else:
final_text = ""
seen_predictions[final_text] = True
nbest.append(
_NbestPrediction(
text=final_text,
start_logit=pred.start_logit,
end_logit=pred.end_logit))
# In very rare edge cases we could have no valid predictions. So we
# just create a nonce prediction in this case to avoid failure.
if not nbest:
nbest.append(_NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0))
assert len(nbest) >= 1
return nbest
def get_predictions(args, all_examples, all_features, all_results, n_best_size, max_answer_length, do_lower_case):
"""Get final predictions"""
print("start to get predictions")
example_index_to_features = collections.defaultdict(list)
for feature in all_features:
example_index_to_features[feature.example_index].append(feature)
unique_id_to_result = {}
for result in all_results:
unique_id_to_result[result.unique_id] = result
all_predictions = collections.OrderedDict()
for (example_index, example) in enumerate(all_examples):
features = example_index_to_features[example_index]
prelim_predictions = get_prelim_predictions(features, unique_id_to_result, n_best_size, max_answer_length)
nbest = get_nbest(args, prelim_predictions, features, example, n_best_size, do_lower_case)
total_scores = []
best_non_null_entry = None
for entry in nbest:
total_scores.append(entry.start_logit + entry.end_logit)
if not best_non_null_entry:
if entry.text:
best_non_null_entry = entry
probs = _compute_softmax(total_scores)
nbest_json = []
for (i, entry) in enumerate(nbest):
output = collections.OrderedDict()
output["text"] = entry.text
output["probability"] = probs[i]
output["start_logit"] = entry.start_logit
output["end_logit"] = entry.end_logit
nbest_json.append(output)
assert len(nbest_json) >= 1
all_predictions[example.qas_id] = nbest_json[0]["text"]
return all_predictions
def write_predictions(args, all_examples, all_features, all_results, n_best_size,
max_answer_length, do_lower_case):
"""Write final predictions to the json file and log-odds of null if needed."""
all_predictions = get_predictions(args, all_examples, all_features, all_results,
n_best_size, max_answer_length, do_lower_case)
return all_predictions
def _get_best_indexes(logits, n_best_size):
"""Get the n-best logits from a list."""
index_and_score = sorted(enumerate(logits), key=lambda x: x[1], reverse=True)
best_indexes = []
for (i, score) in enumerate(index_and_score):
if i >= n_best_size:
break
best_indexes.append(score[0])
return best_indexes
def _compute_softmax(scores):
"""Compute softmax probability over raw logits."""
if not scores:
return []
max_score = None
for score in scores:
if max_score is None or score > max_score:
max_score = score
exp_scores = []
total_sum = 0.0
for score in scores:
x = math.exp(score - max_score)
exp_scores.append(x)
total_sum += x
probs = []
for score in exp_scores:
probs.append(score / total_sum)
return probs
| StarcoderdataPython |
73031 | from configHandler import loadConfigData
from clientClass import Client
def main():
mainConfig = loadConfigData("../../config.json")
PORT = mainConfig["PORT"]
SERVER_IP = mainConfig["SERVER_IP"]
SERVER_ADDRESS = (SERVER_IP, PORT)
client = Client(PORT, SERVER_IP, SERVER_ADDRESS)
client.send(input("Your message:\n"))
responseType, response = client.receive(1024)
print("[SERVER] " + str(response))
client.send("", 0) # disconnect
#responseType, response = client.receive(1024)
#print(f"[SERVER] {response}")
if __name__ == "__main__":
main() | StarcoderdataPython |
149856 | <filename>ch09/modadmin.py
from moduser import User
class Admin(User):
"""This is a special user with special rights."""
def __init__(self, first_name, last_name, zip, age):
"""Initialize attributes of the parent class."""
super().__init__(first_name, last_name, zip, age)
self.privs = Privileges()
class Privileges():
"""This is a special user with special rights."""
def __init__(self):
"""Initialize attributes of the parent class."""
self.privs = ['read', 'write', 'delete']
def show_privs(self):
print(self.privs) | StarcoderdataPython |
1675174 | <filename>01/aoc_d01p1.py
"""
--- Day 1: Inverse Captcha ---
The night before Christmas, one of Santa's Elves calls you in a panic. "The printer's broken! We can't print the Naughty or Nice List!" By the time you make it to sub-basement 17, there are only a few minutes until midnight. "We have a big problem," she says; "there must be almost fifty bugs in this system, but nothing else can print The List. Stand in this square, quick! There's no time to explain; if you can convince them to pay you in stars, you'll be able to--" She pulls a lever and the world goes blurry.
When your eyes can focus again, everything seems a lot more pixelated than before. She must have sent you inside the computer! You check the system clock: 25 milliseconds until midnight. With that much time, you should be able to collect all fifty stars by December 25th.
Collect stars by solving puzzles. Two puzzles will be made available on each day millisecond in the advent calendar; the second puzzle is unlocked when you complete the first. Each puzzle grants one star. Good luck!
You're standing in a room with "digitization quarantine" written in LEDs along one wall. The only door is locked, but it includes a small interface. "Restricted Area - Strictly No Digitized Users Allowed."
It goes on to explain that you may only leave by solving a captcha to prove you're not a human. Apparently, you only get one millisecond to solve the captcha: too fast for a normal human, but it feels like hours to you.
The captcha requires you to review a sequence of digits (your puzzle input) and find the sum of all digits that match the next digit in the list. The list is circular, so the digit after the last digit is the first digit in the list.
For example:
1122 produces a sum of 3 (1 + 2) because the first digit (1) matches the second digit and the third digit (2) matches the fourth digit.
1111 produces 4 because each digit (all 1) matches the next.
1234 produces 0 because no digit matches the next.
91212129 produces 9 because the only digit that matches the next one is the last digit, 9.
What is the solution to your captcha?
Your puzzle answer was 1069.
--- Part Two ---
You notice a progress bar that jumps to 50% completion. Apparently, the door isn't yet satisfied, but it did emit a star as encouragement. The instructions change:
Now, instead of considering the next digit, it wants you to consider the digit halfway around the circular list. That is, if your list contains 10 items, only include a digit in your sum if the digit 10/2 = 5 steps forward matches it. Fortunately, your list has an even number of elements.
For example:
1212 produces 6: the list contains 4 items, and all four digits match the digit 2 items ahead.
1221 produces 0, because every comparison is between a 1 and a 2.
123425 produces 4, because both 2s match each other, but no other digit has a match.
123123 produces 12.
12131415 produces 4.
What is the solution to your new captcha?
Your puzzle answer was 1268.
Both parts of this puzzle are complete! They provide two gold stars: **
"""
from collections import deque
def part1(contents):
first_digit = last_digit = int(contents[0])
sum_ = 0
for digit in contents[1:]:
digit = int(digit)
if digit == last_digit:
sum_ += digit
last_digit = digit
if last_digit == first_digit:
sum_ += last_digit
return sum_
def part2(contents):
i = len(contents) // 2
contents2 = deque(contents)
contents2.rotate(i)
sum_ = 0
for digit1, digit2 in zip(contents, contents2):
if digit1 == digit2:
sum_ += int(digit1)
return sum_
def test_part2():
assert 6 == part2('1212')
assert 0 == part2('1221')
assert 4 == part2('123425')
assert 12 == part2('123123')
assert 4 == part2('12131415')
if __name__ == '__main__':
with open('input_d01p1.txt') as f:
contents = f.read().strip()
print('part 1', part1(contents))
print('part 2', part2(contents))
| StarcoderdataPython |
132378 | # Non-dependent modules
import data
# Dependent modules
try:
import pymc
except ImportError:
print('-----------------------------------------------------------')
print('-----------------------------------------------------------')
print('WARNING: Not loading model in xastropy.fN \n Install pymc if you want it')
print('-----------------------------------------------------------')
else:
import model
| StarcoderdataPython |
1721440 | text = "oi apareceu 2x batata 1x e oi 2x"
words = text.split()
summary = {word: words.count(word) for word in set(words)}
print(summary)
| StarcoderdataPython |
3226574 | <gh_stars>0
import json
import pytest
CONFIG = {
'api_endpoint': 'https://my.nsone.net',
# The api authentication key.
'api_key': 'testkey',
'metrics': {'qps': [{"test.com": None}], 'usage': [{"test.com": None}], 'pulsar': None, 'ddi': None},
}
CONFIG_NOMETRICS = {
'api_endpoint': 'https://test.com',
# The api authentication key.
'api_key': 'testkey',
'metrics': None,
}
CONFIG_NOKEY = {
'api_endpoint': 'https://test.com',
# The api authentication key.
'api_key': None,
'metrics': None,
}
CONFIG_2 = """{
"api_endpoint": "https://my.nsone.net",
"api_key": "testkey",
"metrics": {
"qps": [
{
"test.com": [
{
"www": "A"
},
{
"mail": "A"
}
]
}
],
"usage": [
{
"test.com": null
}
],
"pulsar": null,
"ddi": null,
"account":[
{"billing": null},
{"ttl": ["dloc.com", "dloc1.com", "dloc2.com"]}
]
}
}"""
CONFIG_DDI = """
{
"api_endpoint": "https://localhost",
"api_key": "testkey",
"min_collection_interval": 15,
"metrics": {
"ddi": [
2
]
}
}
"""
CONFIG_1 = """
{
"api_endpoint": "https://my.nsone.net",
"api_key": "testkey",
"min_collection_interval": 15,
"query_params": {
"usage_networks": "*",
"pulsar_period": "1m",
"pulsar_geo": "*",
"pulsar_asn": "*",
"pulsar_agg": "avg"
},
"metrics": {
"pulsar": null,
"pulsar_by_app": [
{
"1xy4sn3": "1xtvhvx"
}
],
"pulsar_by_record": [
{
"www.dloc1.com": "A"
},
{
"www.dloc2.com": "A"
}
],
"qps": [
{
"dloc.com": [
{
"www": "A"
},
{
"email": "A"
}
]
},
{
"dloc1.com": [
{
"www": "A"
},
{
"email": "A"
}
]
},
{
"dloc2.com": [
{
"www": "A"
},
{
"email": "A"
}
]
}
],
"usage": [
{
"dloc.com": [
{
"www": "A"
},
{
"email": "A"
}
]
},
{
"dloc1.com": [
{
"www": "A"
},
{
"email": "A"
}
]
},
{
"dloc2.com": [
{
"www": "A"
},
{
"email": "A"
}
]
}
],
"account": [
{
"billing": null
},
{
"ttl": [
"dloc.com",
"dloc1.com",
"dloc2.com"
]
}
]
}
}
"""
@pytest.fixture
def instance():
return CONFIG
@pytest.fixture
def instance_nokey():
return CONFIG_NOKEY
@pytest.fixture
def instance_nometrics():
return CONFIG_NOMETRICS
@pytest.fixture
def instance_empty():
return {}
@pytest.fixture
def instance_1():
return json.loads(CONFIG_1)
@pytest.fixture
def instance_ddi():
return json.loads(CONFIG_DDI)
| StarcoderdataPython |
1751752 | from .end_model import EndModel
from .label_model import (
LabelModel,
MajorityClassVoter,
MajorityLabelVoter,
RandomVoter,
)
from .tuners import RandomSearchTuner
__all__ = [
"EndModel",
"LabelModel",
"MajorityClassVoter",
"MajorityLabelVoter",
"RandomVoter",
"RandomSearchTuner",
]
__version__ = "0.3.1"
| StarcoderdataPython |
1785057 | <filename>Day 22 - Pong Game/my_global_constants.py
WIDTH, HEIGHT = 700, 480
X_WALL, Y_WALL = .9 * WIDTH / 2, .9 * HEIGHT / 2
ALIGN = 'center'
FONT = ('Arial', 20, 'normal')
GAME_SPEED = .001
BALL_SPEED = 5
BALL_PLAYER_DIST = 40
PLAYER_SPEED = 50
| StarcoderdataPython |
3320354 | <reponame>soarlab/gandalv
import re
import os
import sys
def invert_single_assertion(file_string,match,replacement,assertion):
"""Invert an assertion by replacing the old one, using the regex match"""
repl_string = assertion + '(' + match.group(1) + replacement + match.group(3) + ')'
result = file_string[:match.start()] + repl_string + file_string[match.end():]
return re.sub('@expect verified', '@expect error', result)
def fail_suffix(i):
"""Filename suffix for the ith failing file"""
if i == 0:
return '_fail'
else:
return '_fail_' + str(i+1)
def fail_filename(orig_filename,i):
"""Filename for a failing regression"""
parts = orig_filename.split('.')
parts[-2] += fail_suffix(i)
return '.'.join(parts)
def invert_assertions(filename,original="==",replacement="!=",assertion="assert"):
"""Inverts each assertion in a file, writing each one seperately to a new file."""
orig_file = ""
with open(filename, "r") as f:
orig_file = f.read()
regex = assertion + r'\((.*)(' + original + r')(.*)\)'
matches = re.finditer(regex,orig_file)
i = 0
for match in matches:
new_file_str = invert_single_assertion(orig_file,match,replacement,assertion)
with open(fail_filename(filename,i), "w") as f:
f.write(new_file_str)
i += 1
def main():
if len(sys.argv) < 2:
print("Usage: \npython invert_assertions.py [folder_name]")
sys.exit()
reg_list = [
'hello',
'compute',
'function',
'forloop',
'fib',
'compound',
'array',
'pointer',
'method',
'dynamic',
'inout',
'overload',
]
folder = sys.argv[1]
os.chdir(folder)
for src_file in os.listdir('.'): # current directory, which we just changed into
if not os.path.isfile('./' + src_file):
continue
parts = src_file.split('.')
reg_name = parts[-2]
if reg_name in reg_list:
orig = "=="
repl = "!="
sert = "assert"
if parts[-1] in ['f', 'f90', 'f95', 'for', 'f03']: #fortran
repl = "/="
elif parts[-1] in ['rs']: #rust
sert = r'assert!'
if parts[-2] == 'hello':
orig = "true"
repl = "false"
invert_assertions(src_file,orig,repl,sert)
if __name__ == '__main__':
main()
| StarcoderdataPython |
3307778 | <filename>Probability Statistics Intermediate/Calculating probabilities-134.py
## 2. Probability of renting bikes ##
import pandas
bikes = pandas.read_csv("bike_rental_day.csv")
# Find the number of days the bikes rented exceeded the threshold.
days_over_threshold = bikes[bikes["cnt"] > 2000].shape[0]
# Find the total number of days we have data for.
total_days = bikes.shape[0]
# Get the probability that more than 2000 bikes were rented for any given day.
probability_over_2000 = days_over_threshold / total_days
print(probability_over_2000)
probability_over_4000 =bikes[bikes["cnt"] > 4000].shape[0] / total_days
## 4. Calculating probabilities ##
# Enter your code here.
coin_1_prob = 0.5*3*.5*.5
## 6. Calculating the number of combinations ##
sunny_1_combinations = None
# There are 5 combinations in which one day can be sunny.
# SNNNN
# NSNNN
# NNSNN
# NNNSN
# NNNNS
sunny_1_combinations = 5
## 8. Finding the number of combinations ##
import math
def find_outcome_combinations(N, k):
# Calculate the numerator of our formula.
numerator = math.factorial(N)
# Calculate the denominator.
denominator = math.factorial(k) * math.factorial(N - k)
# Divide them to get the final value.
return numerator / denominator
combinations_7 = find_outcome_combinations(10, 7)
combinations_8 = find_outcome_combinations(10, 8)
combinations_9 = find_outcome_combinations(10, 9)
## 10. Calculating the probability of one combination ##
prob_combination_3 = None
prob_combination_3 = (0.7 ** 3) * (0.3 ** 2)
## 12. Function to calculate the probability of a single combination ##
p = .6
q = .4
def find_combination_probability(N, k, p, q):
# Take p to the power k, and get the first term.
term_1 = p ** k
# Take q to the power N-k, and get the second term.
term_2 = q ** (N-k)
# Multiply the terms out.
return term_1 * term_2
prob_8 = find_outcome_combinations(10, 8) * find_combination_probability(10, 8, p, q)
prob_9 = find_outcome_combinations(10, 9) * find_combination_probability(10, 9, p, q)
prob_10 = find_outcome_combinations(10, 10) * find_combination_probability(10, 10, p, q) | StarcoderdataPython |
104930 | <reponame>brkronheim/BNNs-for-SUSY
"""datagroup.py
Written by Karbo in the summer of 2017 and modified by Braden in the spring of 2019
This code reads the data output of the individual susyhit and prospino datafiles
and writes them into one document. The program takes the following three arguments:
* the name of the datafile to create
* the number of the datafile to read
* whether or or not the file is in setup phase
This version is called after every data point so the data is written incrementally.
This code is made to function with the shell script makeData.sh, see the readme
for more information.
"""
import sys
import linecache
def main():
name = sys.argv[1] #name of datafile to be used
num = int(sys.argv[2]) #which file to read
setup = sys.argv[3] #setup
if(setup=="True"):
outfile = open('../'+str(name), 'w') #create datafile
row = linecache.getline('./heading.txt',1)
outfile.write(row)
outfile.close()
else:
try: #try and open the file
outfile = open('../'+str(name), 'a')
except FileNotFoundError: #the file was not initially setup, so do it here
outfile = open('../'+str(name), 'w') #create datafile
#get input names from a sus file and write to datafile
for j in range(24):
if not (j == 10 or j == 13 or j == 16 or j == 19 or j == 22):
row = linecache.getline('../susy/sus'+str(num)+'.dat', j+64)
outfile.write("{0:>16}".format(row.split()[3]))
#get target names from a pro file and write to datafile
row = linecache.getline('./pro'+str(num)+'.dat', 3)
outfile.write("{0:>14}".format(row.split()[13]))
outfile.write("{0:>14}".format(row.split()[14]))
outfile.write("\n")
#Combine the data from the sus and pro files
row = linecache.getline('./pro'+str(num)+'.dat',1)
if not (float(row.split()[14]) == float(0.610)):
#read and write the sus data
for j in range(24):
if not (j == 10 or j == 13 or j == 16 or j == 19 or j == 22):
row = linecache.getline('../susy/sus'+str(num)+'.dat', j+64)
outfile.write("{0:>16}".format(row.split()[1]))
#read and write the pro data
row = linecache.getline('./pro'+str(num)+'.dat', 1)
outfile.write("{0:>14}".format(row.split()[14]))
outfile.write("{0:>14}".format(row.split()[15]))
outfile.write("\n")
outfile.close()
main()
| StarcoderdataPython |
3394632 | # -*- coding: utf-8 -*-
import os, time, logging, urllib, socket
from sentry_sdk.integrations.logging import LoggingIntegration
from requests import Session, Response, exceptions
from requests.adapters import HTTPAdapter
from requests.structures import CaseInsensitiveDict
from requests.utils import get_encoding_from_headers
from requests.cookies import extract_cookies_to_jar
from requests.packages.urllib3.util.retry import Retry
from sentry_sdk import configure_scope
from kinoplex.const import config, tree
from kinoplex.agent import KinoPlex
from kinoplex.meta import prepare_meta
from collections import namedtuple
from datetime import datetime
from types import MethodType
TRACE_LEVEL_NUM = 15
logging.addLevelName(TRACE_LEVEL_NUM, "TRACE")
class PlexResponse(Response):
def __str__(self):
return self.content
class PlexHTTPAdapter(HTTPAdapter):
def build_response(self, req, resp):
response = PlexResponse()
response.status_code = getattr(resp, 'status', None)
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
response.encoding = get_encoding_from_headers(response.headers)
response.raw = resp
response.reason = response.raw.reason
if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8')
else:
response.url = req.url
extract_cookies_to_jar(response.cookies, req, resp)
response.request = req
response.connection = self
return response
def getVersionInfo(core):
from kinoplex import __version__
branch = str(__version__[-1]).upper()
version = '.'.join(str(i) for i in __version__[:-1])
current_version = '%s-v%s' % (branch, version)
current_mtime = 0
version_path = core.storage.join_path(core.bundle_path, 'Contents', 'VERSION')
if core.storage.file_exists(version_path):
current_version = core.storage.load(version_path)
current_mtime = core.storage.last_modified(version_path)
return current_version, current_mtime
# implement http_request using requests
def requests_http_request(self, url, values=None, headers={}, cacheTime=None, encoding=None, errors=None, timeout=0, immediate=False, sleep=0, data=None, opener=None, sandbox=None, follow_redirects=True, basic_auth=None, method=None):
def _content_type_allowed(content_type):
for t in ['html', 'xml', 'json', 'javascript']:
if t in content_type:
return True
return False
if cacheTime == None: cacheTime = self.cache_time
pos = url.rfind('#')
if pos > 0:
url = url[:pos]
if values and not data:
data = urllib.urlencode(values)
if data:
cacheTime = 0
immediate = True
url_cache = None
if self._http_caching_enabled:
if cacheTime > 0:
cache_mgr = self._cache_mgr
if cache_mgr.item_count > self._core.config.http_cache_max_items + self._core.config.http_cache_max_items_grace:
cache_mgr.trim(self._core.config.http_cache_max_size, self._core.config.http_cache_max_items)
url_cache = cache_mgr[url]
url_cache.set_expiry_interval(cacheTime)
else:
del self._cache_mgr[url]
if url_cache != None and url_cache['content'] and not url_cache.expired:
content_type = url_cache.headers.get('Content-Type', '')
if self._core.plugin_class == 'Agent' and not _content_type_allowed(content_type):
self._core.log.debug("Removing cached data for '%s' (content type '%s' not cacheable in Agent plug-ins)", url, content_type)
manager = url_cache._manager
del manager[url]
else:
self._core.log.debug("Fetching '%s' from the HTTP cache", url)
res = PlexResponse()
res.content = url_cache['content']
res.headers = url_cache.headers
return res
h = dict(self.default_headers)
h.update({'connection': 'keep-alive'})
if sandbox:
h.update(sandbox.custom_headers)
h.update(headers)
self._core.log.debug("Requesting '%s'", url)
if 'PLEXTOKEN' in os.environ and len(os.environ['PLEXTOKEN']) > 0 and h is not None and url.find('http://127.0.0.1') == 0:
h['X-Plex-Token'] = os.environ['PLEXTOKEN']
if basic_auth != None:
h['Authorization'] = self.generate_basic_auth_header(*basic_auth)
if url.startswith(config.kinopoisk.api.base[:-2]):
h.update({'clientDate': datetime.now().strftime("%H:%M %d.%m.%Y"), 'x-timestamp': str(int(round(time.time() * 1000)))})
h.update({'x-signature': self._core.data.hashing.md5(url[len(config.kinopoisk.api.base[:-2]):]+h.get('x-timestamp')+config.kinopoisk.api.hash)})
req = None
try:
req = self.session.request(method or 'GET', url, headers=h, allow_redirects=follow_redirects, data=data)
except exceptions.RequestException as e:
self._core.log.error("Failed request %s: %s", url, e)
if url_cache != None:
content_type = req.headers.get('Content-Type', '')
if self._core.plugin_class == 'Agent' and not _content_type_allowed(content_type):
self._core.log.debug("Not caching '%s' (content type '%s' not cacheable in Agent plug-ins)", url, content_type)
else:
url_cache['content'] = req.data
url_cache.headers = dict(req.headers)
return req
def setup_network(core, prefs):
core.log.debug('requests install')
core.networking.session = Session()
retry = Retry(
total=3,
read=3,
connect=3,
backoff_factor=0.5,
status_forcelist=(500, 502, 504),
)
core.networking.session.mount('https://', PlexHTTPAdapter(max_retries=retry))
core.networking.session.mount('http://', PlexHTTPAdapter(max_retries=retry))
core.networking.http_request = MethodType(requests_http_request, core.networking)
def search_event(self, results, media, lang, manual=False, version=0, primary=True):
with configure_scope() as scope:
scope.set_extra("media", media.__dict__)
try:
self.quick_search(results, media, lang, manual, primary)
self.fire('search', results, media, lang, manual, primary)
except Exception, e:
self.api.Log.Error(e, exc_info=True)
def update_event(self, metadata, media, lang, force=False, version=0, periodic=False):
with configure_scope() as scope:
scope.set_extra("media", media.__dict__)
try:
ids = {}
if self.api.Data.Exists(media.id):
ids = self.api.Data.LoadObject(media.id)
if not ids.get('kp'):
ids['kp'] = metadata.id
metadict = dict(id=metadata.id, meta_ids=ids, ratings={}, reviews={}, covers={}, backdrops={}, clips={}, seasons=tree())
self.fire('update', metadict, media, lang, force, periodic)
prepare_meta(metadict, metadata, self)
self.api.Data.SaveObject(media.id, metadict['meta_ids'])
except Exception, e:
self.api.Log.Error(e, exc_info=True)
def log_trace(self, message, *args):
if self.api.Prefs['trace']:
self.api.Core.log.log(TRACE_LEVEL_NUM, message, *args)
def init_class(cls_name, cls_base, gl, version=0):
g = dict((k, v) for k, v in gl.items() if not k.startswith("_"))
d = {
'name': '<NAME>',
'api': namedtuple('Struct', g.keys())(*g.values()),
'agent_type': 'movie' if cls_base.__name__ == 'Movies' else 'tv',
'primary_provider': True,
'languages': ['ru', 'en'],
'accepts_from': ['com.plexapp.agents.localmedia'],
'contributes_to': config.get('contrib', {}).get(cls_base.__name__,[]),
'c': config,
'trace': log_trace,
'search': search_event,
'update': update_event,
'version': version
}
return d.get('__metaclass__', type)(cls_name, (KinoPlex, cls_base,), d)
| StarcoderdataPython |
88397 | """
Write a function that takes an integer as input, and returns the number of bits that are equal to one in the binary
representation of that number. You can guarantee that input is non-negative.
Example: The binary representation of 1234 is 10011010010, so the function should return 5 in this case
"""
def count_bits(n):
return sum([int(n) for n in bin(n)[2:]])
print(count_bits(0))
print(count_bits(4))
print(count_bits(7))
print(count_bits(9))
print(count_bits(10))
| StarcoderdataPython |
3237683 | import os
import signal
import subprocess
import time
from unittest import TestCase
from scripttest import TestFileEnvironment
from .cli import setup_user_dir
from .meter import Meter
from .utils import create_executable
d = os.path.dirname(__file__)
PROJECT_PATH = os.path.abspath(os.path.join(d, os.pardir))
def create_run_script():
run_script = os.path.join(PROJECT_PATH, 'run.py')
content = '#!/usr/bin/env python\n'
content += 'from soundmeter.meter import main\n\n\n'
content += 'main()'
if not os.path.exists(run_script):
create_executable(run_script, content)
setup_user_dir()
create_run_script()
class TestMeter(TestCase):
"""Test Meter class programmatically"""
def setUp(self):
self.meter = Meter(seconds=2.0)
def test_running(self):
self.assertFalse(self.meter.is_running)
self.meter.start()
self.assertFalse(self.meter.is_running)
class TestBasicCommands(TestCase):
"""Test basic command-line invoke of the program"""
def setUp(self):
self.env = TestFileEnvironment('./test-output')
def test_default(self):
res = self.env.run('../run.py', '-s', '1',
expect_stderr=True)
assert 'Timeout' in res.stdout
self.assertEqual(res.returncode, 0)
def test_collect(self):
res = self.env.run('../run.py', '-s', '1', '-c',
expect_stderr=True)
assert 'Collecting' in res.stdout
self.assertEqual(res.returncode, 0)
def test_log(self):
res = self.env.run('../run.py', '-s', '1', '--log', 'log.txt',
expect_stderr=True)
assert 'Timeout' in res.stdout
assert 'log.txt' in res.files_created
self.assertEqual(res.returncode, 0)
def test_segment(self):
res = self.env.run('../run.py', '-s', '1', '--segment', '0.2',
expect_stderr=True)
assert 'Timeout' in res.stdout
self.assertEqual(res.returncode, 0)
def tearDown(self):
pass
class TestCommands(TestCase):
def test_sigint(self):
popen = subprocess.Popen(['./run.py'])
time.sleep(2)
os.kill(popen.pid, signal.SIGINT)
def test_arguments(self):
popen = subprocess.Popen(['./run.py', '-t', '10000', '-a', 'stop'])
time.sleep(2)
os.kill(popen.pid, signal.SIGINT)
def test_daemon(self):
popen = subprocess.Popen(['./run.py', '-d'], shell=True)
time.sleep(2)
popen.send_signal(signal.SIGINT)
class TestConfig(TestCase):
def test_config(self):
config = os.path.join(PROJECT_PATH, 'sample_config')
os.environ['SOUNDMETER_TEST_CONFIG'] = config
popen = subprocess.Popen(['./run.py'], env=os.environ.copy())
time.sleep(2)
popen.send_signal(signal.SIGINT)
| StarcoderdataPython |
4829054 | <gh_stars>1-10
import FWCore.ParameterSet.Config as cms
process = cms.Process("CALIB")
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring(''),
QualityReader = cms.untracked.PSet(
threshold = cms.untracked.string('INFO')
),
cout = cms.untracked.PSet(
threshold = cms.untracked.string('INFO')
),
destinations = cms.untracked.vstring('QualityReader.log')
)
process.source = cms.Source("EmptyIOVSource",
lastValue = cms.uint64(100),
timetype = cms.string('runnumber'),
firstValue = cms.uint64(1),
interval = cms.uint64(90)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(100)
)
process.SiStripDetInfoFileReader = cms.Service("SiStripDetInfoFileReader")
process.load("CalibTracker.SiStripESProducers.fake.SiStripBadModuleConfigurableFakeESSource_cfi")
process.load("CalibTracker.SiStripESProducers.SiStripQualityESProducer_cfi")
process.siStripQualityESProducer.ListOfRecordToMerge = cms.VPSet(
cms.PSet( record = cms.string("SiStripBadModuleRcd"), tag = cms.string("") )
)
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
process.reader = DQMEDAnalyzer("SiStripQualityStatistics",
dataLabel = cms.untracked.string(""),
TkMapFileName = cms.untracked.string("")
)
process.p = cms.Path(process.reader)
| StarcoderdataPython |
3377828 | <reponame>steemfans/steem-lightdb<filename>transfer/user_relation.py
#!/usr/bin/python3
#encoding:UTF-8
import json, os, sys, time
import utils.TransferTasks as tasks
import utils.utils as utils
from utils.BlockProcess import BlockProcess as BlockProcess
import asyncio, aiomysql
from multiprocessing import Pool
from concurrent.futures import ThreadPoolExecutor,ProcessPoolExecutor
from contextlib import suppress
task_type = 'user_relation'
class UserRelationProcess(BlockProcess):
def __init__(self, loop, data_type):
super().__init__(loop, data_type)
async def process(self, block_num, block_time, trans_id, ops):
global task_type
db = self.db
# print('process %i blcok\'s ops' % block_num, ops)
self.processed_data = {
'data': [],
'undo': []}
for op_idx, op in enumerate(ops):
op_type = op[0]
op_detail = op[1]
if op_type == 'custom_json' and 'id' in op_detail and op_detail['id'] == 'follow':
if op_detail['json'] == '':
continue
try:
json_data = json.loads(op_detail['json'])
follower = None
following = None
what = None
if isinstance(json_data, dict):
if 'follower' in json_data:
follower = json_data['follower']
if 'following' in json_data:
following = json_data['following']
if 'what' in json_data and isinstance(json_data['what'], list) and len(json_data['what']) > 0:
what = json_data['what'][0]
#elif isinstance(json_data, list):
# if len(json_data) >= 2 and json_data[0] == 'follow':
# if 'follower' in json_data[1]:
# follower = json_data[1]['follower']
# if 'following' in json_data[1]:
# following = json_data[1]['following']
# if 'what' in json_data[1] and len(json_data[1]['what']) > 0:
# what = json_data[1]['what'][0]
# else:
# continue
else:
continue
if follower == None and following == None and (what == None or what == ''):
print('follow_data_error', block_num, trans_id, follower, following, what, op)
continue
sql = '''
select id, username from users
where username = %s or username = %s'''
cur = await db.cursor()
await cur.execute(sql, (follower, following))
user_data = await cur.fetchall()
await cur.close()
if len(user_data) == 2:
for user in user_data:
if user[1] == follower:
follower_id = user[0]
if user[1] == following:
following_id = user[0]
self.processed_data['data'].append((follower_id, following_id, what, block_time, ))
else:
self.processed_data['undo'].append((block_num, trans_id, op_idx, json.dumps(op), tasks.getTypeId(task_type), block_time))
except Exception as e:
self.processed_data['undo'].append((block_num, trans_id, op_idx, json.dumps(op), tasks.getTypeId(task_type), block_time))
utils.PrintException([block_num, trans_id, op_idx])
else:
# print('unknown type:', op_type, block_num, trans_id, ops, op_idx)
continue
# print('processed:', self.processed_data)
return self.processed_data
async def insertData(self):
db = self.db
try:
cur = await db.cursor()
if self.prepared_data['data'] != []:
sql_main_data = '''
insert ignore into user_relations
(follower_id, following_id, what, created_at)
values
(%s, %s, %s, %s)'''
await cur.executemany(sql_main_data, self.prepared_data['data'])
if self.prepared_data['undo'] != []:
sql_undo_data = '''
insert ignore into undo_op
(block_num, transaction_id, op_index, op, task_type, block_time)
values
(%s, %s, %s, %s, %s, %s)'''
await cur.executemany(sql_undo_data, self.prepared_data['undo'])
sql_update_task = '''
update multi_tasks set is_finished = 1
where id = %s'''
await cur.execute(sql_update_task, (self.task_id))
await db.commit()
await cur.close()
except Exception as e:
await db.rollback()
await cur.close()
print('insert_data_failed', 'task_id:', self.task_id, e)
def processor(all_tasks):
global task_type
if all_tasks != []:
loop = asyncio.get_event_loop()
loop_tasks = []
try:
for one_task in all_tasks:
user_task = UserRelationProcess(loop, task_type)
loop_tasks.append(asyncio.ensure_future(user_task.doMultiTasks(one_task)))
loop.run_until_complete(asyncio.wait(loop_tasks))
except KeyboardInterrupt as e:
for task in asyncio.Task.all_tasks():
task.cancel()
loop.stop()
finally:
loop.close()
def mainMultiProcess():
global task_type
config = utils.get_config()
while True:
all_tasks = tasks.splitTasks(tasks.get(task_type), config['slice_step'])
if all_tasks != []:
p = ProcessPoolExecutor(config['worker'])
for t in all_tasks:
p.submit(processor, t)
p.shutdown()
time.sleep(3)
if __name__ == '__main__':
with suppress(KeyboardInterrupt):
mainMultiProcess()
| StarcoderdataPython |
1656166 | import json
from aws_cdk import (
aws_apigateway,
aws_lambda,
aws_lambda_python,
aws_logs,
aws_s3,
aws_secretsmanager,
aws_ssm,
core,
)
class IntegrationStack(core.Stack):
def __init__(
self,
scope: core.Construct,
construct_id: str,
identifier: str,
**kwargs,
) -> None:
super().__init__(scope, construct_id, **kwargs)
aws_secretsmanager.Secret(
self,
id=f"{identifier}-integration-scihub-credentials",
secret_name=f"hls-s2-downloader-serverless/{identifier}/scihub-credentials",
description="Dummy values for the Mock SciHub API credentials",
generate_secret_string=aws_secretsmanager.SecretStringGenerator(
secret_string_template=json.dumps({"username": "test-user"}),
generate_string_key="password",
),
)
mock_scihub_search_api_lambda = aws_lambda_python.PythonFunction(
self,
id=f"{identifier}-mock-scihub-api-lambda",
entry="lambdas/mock_scihub_search_api",
index="handler.py",
handler="handler",
runtime=aws_lambda.Runtime.PYTHON_3_8,
timeout=core.Duration.minutes(1),
memory_size=128,
)
aws_logs.LogGroup(
self,
id=f"{identifier}-mock-scihub-search-api-log-group",
log_group_name=f"/aws/lambda/{mock_scihub_search_api_lambda.function_name}",
removal_policy=core.RemovalPolicy.DESTROY,
retention=aws_logs.RetentionDays.ONE_DAY,
)
mock_scihub_product_api_lambda = aws_lambda_python.PythonFunction(
self,
id=f"{identifier}-mock-scihub-product-lambda",
entry="lambdas/mock_scihub_product_api",
index="handler.py",
handler="handler",
runtime=aws_lambda.Runtime.PYTHON_3_8,
timeout=core.Duration.minutes(1),
memory_size=128,
)
aws_logs.LogGroup(
self,
id=f"{identifier}-mock-scihub-product-api-log-group",
log_group_name=(
f"/aws/lambda/{mock_scihub_product_api_lambda.function_name}"
),
removal_policy=core.RemovalPolicy.DESTROY,
retention=aws_logs.RetentionDays.ONE_DAY,
)
mock_scihub_api = aws_apigateway.RestApi(
self, id=f"{identifier}-mock-scihub-api", binary_media_types=["*/*"]
)
self.scihub_url = mock_scihub_api.url.rsplit("/", 1)[0]
aws_ssm.StringParameter(
self,
id=f"{identifier}-mock-scihub-url",
string_value=self.scihub_url,
parameter_name=f"/integration_tests/{identifier}/mock_scihub_url",
)
dhus_resource = aws_apigateway.Resource(
self,
id=f"{identifier}-mock-scihub-api-dhus-search",
parent=mock_scihub_api.root,
path_part="dhus",
)
dhus_resource.add_resource("search").add_method(
http_method="GET",
method_responses=[
aws_apigateway.MethodResponse(
status_code="200",
response_models={
"application/json": aws_apigateway.Model.EMPTY_MODEL
},
)
],
integration=aws_apigateway.LambdaIntegration(
handler=mock_scihub_search_api_lambda,
integration_responses=[
aws_apigateway.IntegrationResponse(status_code="200")
],
),
)
dhus_resource.add_resource("odata").add_resource("v1").add_resource(
"{product+}"
).add_method(
http_method="GET",
method_responses=[
aws_apigateway.MethodResponse(
status_code="200",
response_models={
"application/octect-stream": aws_apigateway.Model.EMPTY_MODEL,
"application/json": aws_apigateway.Model.EMPTY_MODEL,
},
)
],
integration=aws_apigateway.LambdaIntegration(
handler=mock_scihub_product_api_lambda,
integration_responses=[
aws_apigateway.IntegrationResponse(status_code="200")
],
content_handling=aws_apigateway.ContentHandling.CONVERT_TO_BINARY,
),
)
self.upload_bucket = aws_s3.Bucket(
self,
id=f"{identifier}-upload-bucket",
access_control=aws_s3.BucketAccessControl.PRIVATE,
removal_policy=core.RemovalPolicy.DESTROY,
)
aws_ssm.StringParameter(
self,
id=f"{identifier}-upload-bucket-name",
string_value=self.upload_bucket.bucket_name,
parameter_name=f"/integration_tests/{identifier}/upload_bucket_name",
)
| StarcoderdataPython |
1683743 | #!/usr/bin/env python
import ari
import logging
import threading
logging.basicConfig(level=logging.ERROR)
client = ari.connect('http://localhost:8088', 'asterisk', 'asterisk')
# Note: this uses the 'extra' sounds package
sounds = ['press-1', 'or', 'press-2']
channel_timers = {}
class MenuState(object):
"""A small tracking object for the channel in the menu"""
def __init__(self, current_sound, complete):
self.current_sound = current_sound
self.complete = complete
def play_intro_menu(channel):
"""Play our intro menu to the specified channel
Since we want to interrupt the playback of the menu when the user presses
a DTMF key, we maintain the state of the menu via the MenuState object.
A menu completes in one of two ways:
(1) The user hits a key
(2) The menu finishes to completion
In the case of (2), a timer is started for the channel. If the timer pops,
a prompt is played back and the menu restarted.
Keyword Arguments:
channel The channel in the IVR
"""
menu_state = MenuState(0, False)
def play_next_sound(menu_state):
"""Play the next sound, if we should
Keyword Arguments:
menu_state The current state of the IVR
Returns:
None if no playback should occur
A playback object if a playback was started
"""
if menu_state.current_sound == len(sounds) or menu_state.complete:
return None
try:
current_playback = channel.play(media='sound:%s' %
sounds[menu_state.current_sound])
except:
current_playback = None
return current_playback
def queue_up_sound(channel, menu_state):
"""Start up the next sound and handle whatever happens
Keywords Arguments:
channel The channel in the IVR
menu_state The current state of the menu
"""
def on_playback_finished(playback, ev, menu_state):
"""Callback handler for when a playback is finished
Keyword Arguments:
playback The playback object that finished
ev The PlaybackFinished event
menu_state The current state of the menu
"""
unsubscribe_playback_event()
queue_up_sound(channel, menu_state)
def menu_timeout(channel, menu_state):
"""Callback called by a timer when the menu times out"""
print 'Channel %s stopped paying attention...' % \
channel.json.get('name')
channel.play(media='sound:are-you-still-there')
play_intro_menu(channel)
def cancel_menu(channel, ev, current_playback, menu_state):
"""Cancel the menu, as the user did something"""
menu_state.complete = True
try:
current_playback.stop()
except:
pass
unsubscribe_cancel_menu_events()
return
current_playback = play_next_sound(menu_state)
if not current_playback:
# only start timer if menu is not complete
if menu_state.current_sound == len(sounds) and \
menu_state.complete == False:
# Menu played, start a timer!
timer = threading.Timer(10, menu_timeout, [channel, menu_state])
channel_timers[channel.id] = timer
timer.start()
return
menu_state.current_sound += 1
playback_event = current_playback.on_event('PlaybackFinished',
on_playback_finished,
menu_state)
# If the user hits a key or hangs up, cancel the menu operations
dtmf_event = channel.on_event('ChannelDtmfReceived', cancel_menu,
current_playback, menu_state)
stasis_end_event = channel.on_event('StasisEnd', cancel_menu,
current_playback, menu_state)
def unsubscribe_cancel_menu_events():
"""Unsubscribe to the ChannelDtmfReceived and StasisEnd events"""
dtmf_event.close()
stasis_end_event.close()
def unsubscribe_playback_event():
"""Unsubscribe to the PlaybackFinished event"""
playback_event.close()
queue_up_sound(channel, menu_state)
def handle_extension_one(channel):
"""Handler for a channel pressing '1'
Keyword Arguments:
channel The channel in the IVR
"""
channel.play(media='sound:you-entered')
channel.play(media='digits:1')
play_intro_menu(channel)
def handle_extension_two(channel):
"""Handler for a channel pressing '2'
Keyword Arguments:
channel The channel in the IVR
"""
channel.play(media='sound:you-entered')
channel.play(media='digits:2')
play_intro_menu(channel)
def cancel_timeout(channel):
"""Cancel the timeout timer for the channel
Keyword Arguments:
channel The channel in the IVR
"""
timer = channel_timers.get(channel.id)
if timer:
timer.cancel()
del channel_timers[channel.id]
def on_dtmf_received(channel, ev):
"""Our main DTMF handler for a channel in the IVR
Keyword Arguments:
channel The channel in the IVR
digit The DTMF digit that was pressed
"""
# Since they pressed something, cancel the timeout timer
cancel_timeout(channel)
digit = int(ev.get('digit'))
print 'Channel %s entered %d' % (channel.json.get('name'), digit)
if digit == 1:
handle_extension_one(channel)
elif digit == 2:
handle_extension_two(channel)
else:
print 'Channel %s entered an invalid option!' % channel.json.get('name')
channel.play(media='sound:option-is-invalid')
play_intro_menu(channel)
def stasis_start_cb(channel_obj, ev):
"""Handler for StasisStart event"""
channel = channel_obj.get('channel')
print "Channel %s has entered the application" % channel.json.get('name')
channel.answer()
channel.on_event('ChannelDtmfReceived', on_dtmf_received)
play_intro_menu(channel)
def stasis_end_cb(channel, ev):
"""Handler for StasisEnd event"""
print "%s has left the application" % channel.json.get('name')
cancel_timeout(channel)
client.on_channel_event('StasisStart', stasis_start_cb)
client.on_channel_event('StasisEnd', stasis_end_cb)
client.run(apps='channel-aa')
| StarcoderdataPython |
4838358 | <filename>extras/test.py
import datetime
x = 2
def main():
#experimenting with scope
global x
print(x)
x = 3
#experimenting with datetimes and time deltas
startdate = datetime.datetime(2021,4,22,1,0,0)
startdate = datetime.datetime(9999,1,1,0,0,0)
enddate = datetime.datetime.now()
timedelt = enddate - startdate
print(startdate)
print(enddate)
print(timedelt)
print(timedelt.total_seconds())
return;
main()
print(x)
x=4
main()
| StarcoderdataPython |
1790721 | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pickle
import time
import discord
from discord import Game
from discord.ext.commands import Bot
from lstm_network import create
NEURAL_NET = create()
BOT_PREFIX = '!'
# Get at https://discordapp.com/developers/applications/me
TOKEN = open('../Bot/token.txt', 'r').readline().rstrip()
client = Bot(command_prefix=BOT_PREFIX)
MAX_SCORE = 100
WARNING_SCORE = 20
BAN_SCORE = 0
def get_sentiment(sentence):
prediction = NEURAL_NET.predict(sentence)
negative_score = prediction[0]
non_negative_score = prediction[1]
string_format = f'Positive: {non_negative_score}\n' \
f'Negative: {negative_score}\n' \
f'Composite: {non_negative_score - negative_score}'
return non_negative_score - negative_score, string_format
# Class for user info
class DiscordMember:
def __init__(self, uid, last_message_time):
self.id = uid
self.score = MAX_SCORE
self.last_message_time = last_message_time
def __eq__(self, other):
return self.id == other.id
def __str__(self):
return f'ID: {self.id}\n' \
f'Score: {self.score}\n\n'
# Loads data from previous session of bot
try:
member_list = pickle.load(open('users.pickle', 'rb'))
except (OSError, IOError) as e:
member_list = []
pickle.dump(member_list, open('users.pickle', 'wb'), protocol=pickle.HIGHEST_PROTOCOL)
@client.event
async def on_ready():
await client.change_presence(game=Game(name='positively'))
print(f'Logged in as {client.user.name}\n')
servers = list(client.servers)
for server in servers:
for member in server.members:
temp = DiscordMember(member.id, time.time())
if temp not in member_list:
member_list.append(temp)
for member in member_list:
print(member)
async def list_servers():
await client.wait_until_ready()
print('Current servers:')
for server in client.servers:
print(server.name)
print()
@client.event
async def on_message(message):
await client.process_commands(message)
if message.content and message.content != '!score' and message.author.id != client.user.id:
score_change, string_format = get_sentiment(message.content)
score_change = score_change if score_change + 1 < 0 else 0 # Only count if score sentiment < -1
# print(string_format) # For testing
# Update score
current_time = time.time()
temp = DiscordMember(message.author.id, time.time())
if temp not in member_list:
member_list.append(temp)
for user in member_list:
if user.id == message.author.id:
prev_score = user.score
old_time = user.last_message_time
time_points = (current_time - old_time) / 600
new_score = min(prev_score + time_points, MAX_SCORE) + score_change
user.score = max(new_score, 0)
user.last_message_time = current_time
pickle.dump(member_list, open('users.pickle', 'wb'), protocol=pickle.HIGHEST_PROTOCOL)
if new_score <= BAN_SCORE:
try:
await client.ban(message.server.get_member(message.author.id), delete_message_days=0)
except discord.errors.Forbidden:
print('Privilege too low')
else:
member_list.remove(temp)
elif new_score <= WARNING_SCORE:
await client.send_message(message.channel,
f'**WARNING <@{<EMAIL>}> your positivity score is very low '
f'({"{0:0.1f}".format(new_score)}/{MAX_SCORE})**'
f'\nYou will be banned if your score reaches {BAN_SCORE}.')
break
@client.command(pass_context=True)
async def score(ctx):
temp = DiscordMember(ctx.message.author.id, time.time())
if temp not in member_list:
member_list.append(temp)
current_time = time.time()
for user in member_list:
if user.id == ctx.message.author.id:
prev_score = user.score
old_time = user.last_message_time
time_points = (current_time - old_time) / 600
user.score = min(prev_score + time_points, MAX_SCORE)
user.last_message_time = current_time
pickle.dump(member_list, open('users.pickle', 'wb'), protocol=pickle.HIGHEST_PROTOCOL)
await client.send_message(ctx.message.channel,
f'{ctx.message.author}\'s score is '
f'{"{0:0.1f}".format(min(prev_score + time_points, MAX_SCORE))}/{MAX_SCORE}')
if __name__ == '__main__':
client.loop.create_task(list_servers())
client.run(TOKEN)
| StarcoderdataPython |
18659 | <reponame>anconaesselmann/LiveUnit
import unittest
import os
if __name__ == '__main__' and __package__ is None:
from os import sys, path
sys.path.append(path.abspath(path.join(__file__, "..", "..")))
sys.path.append(path.abspath(path.join(__file__, "..", "..", "..", "classes_and_tests")))
from php.functions import *
from src.mocking.MockFileSystem import MockFileSystem
class PhpFunctionsTest(unittest.TestCase):
def test_get_doc_block_tag(self):
settings = "{\"author\": \"Axel\"}"
args = {"settings" : settings}
expected = "@author Axel"
fc = FunctionCollection()
result = fc.get_doc_block_tag(args)
self.assertEqual(expected, result)
def test_get_doc_block_tag_with_empty_value(self):
settings = "{\"author\": None}"
args = {"settings" : settings}
expected = None
fc = FunctionCollection()
result = fc.get_doc_block_tag(args)
self.assertEqual(expected, result)
def test_get_class_name(self):
args = {"dir" : path.join("Folder1", "Folder2", "FileName.php")}
expected = "FileName"
fc = FunctionCollection()
result = fc.get_class_name(args)
self.assertEqual(expected, result)
def test_get_py_package_name(self):
args = {"dir" : path.join(os.sep, "MyProject", "library", "aae", "mvc", "Controller.php")}
expected = path.join("aae\\mvc")
mockFileSystem = MockFileSystem()
mockFileSystem.createFile(path.join(os.sep, "MyProject", "libraryTest", "SomeFileTest.php"))
fc = FunctionCollection()
fc.fileSystem = mockFileSystem
result = fc.get_php_namespace(args)
self.assertEqual(expected, result)
"""def test_get_relative_autoloader_path(self):
settings = "{\"php_autoloader_dir\": \"relative/path/to/Autoloader.php\"}"
args = {"settings" : settings}
expected = "require_once strstr(__FILE__, 'Test', true).'/relative/path/to/Autoloader.php';"
result = FunctionCollection.get_php_autoloader(args)
self.assertEqual(expected, result)
def test_get_absolute_autoloader_path(self):
settings = "{\"php_autoloader_dir\": \"/absolute/path/to/Autoloader.php\"}"
args = {"settings" : settings}
expected = "require_once \"/absolute/path/to/Autoloader.php\";"
result = FunctionCollection.get_php_autoloader(args)
self.assertEqual(expected, result)
def test_getautoloader_path_with_no_value(self):
settings = "{\"php_autoloader_dir\": None}"
args = {"settings" : settings}
expected = None
result = FunctionCollection.get_php_autoloader(args)
self.assertEqual(expected, result)
def test_get_php_namespace(self):
settings = "{\"base_dir\": \"/MyProject/library\"}"
args = {"settings" : settings, "dir": "/MyProject/library/aae/mvc/Controller.php"}
expected = "aae\\mvc"
result = FunctionCollection.get_php_namespace(args)
self.assertEqual(expected, result)"""
if __name__ == '__main__':
unittest.main() | StarcoderdataPython |
3399663 | # ===-- toCSV.py - CSV converter tool --------------------------*- Python -*-===
#
# Part of the LOMP project, under the Apache License v2.0 with LLVM Exceptions.
# See https:llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
# ===------------------------------------------------------------------------===
#
# Convert a file to CSV format, expanding units
import sys
def convertSI(s):
"""Convert a measurement with a range suffix into a suitably scaled value"""
du = s.split()
if len(du) != 2:
return s
units = du[1]
# http://physics.nist.gov/cuu/Units/prefixes.html
factor = {
"Y": "e24",
"Z": "e21",
"E": "e18",
"P": "e15",
"T": "e12",
"G": "e9",
"M": "e6",
"k": "e3",
" ": "",
"m": "e-3",
"u": "e-6",
"n": "e-9",
"p": "e-12",
"f": "e-15",
"a": "e-18",
"z": "e-21",
"y": "e-24",
}[units[0]]
return du[0] + factor
for line in sys.stdin:
if "," in line:
items = line.split(",")
# Don't do anything with the first column
print(", ".join([items[0]] + [convertSI(s) for s in items[1:]]))
else:
print(line, end="")
| StarcoderdataPython |
3275408 | <filename>chapter1/hello-world.py
# -*- coding: utf-8 -*-
# Exercise 1.2
# Author: <NAME>
print("Hello, World!")
"""
Trial run
python3 hello-world.py
Hello, World!
"""
| StarcoderdataPython |
1709933 | <gh_stars>0
import grpc
import random
import time
from absl import app
from absl import flags
from concurrent import futures
from lib import telemetry
FLAGS = flags.FLAGS
flags.DEFINE_integer('grpc_port', 50090, 'Port for gRPC services.')
def main(argv):
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
telemetry.init('__test__', server)
server.add_insecure_port('[::]:{0}'.format(FLAGS.grpc_port))
server.start()
print 'Running...'
try:
while True:
telemetry.add_counter('counter1', random.randint(1, 10))
time.sleep(1)
except:
server.stop(0)
raise
if __name__ == '__main__':
app.run(main) | StarcoderdataPython |
16555 | # -*- coding: utf-8 -*-
"""
1556. Thousand Separator
Given an integer n, add a dot (".") as the thousands separator and return it in string format.
Constraints:
0 <= n < 2^31
"""
class Solution:
def thousandSeparator(self, n: int) -> str:
res = ""
str_n = str(n)
count = 0
ind = len(str_n) - 1
while ind >= 0:
count += 1
if count == 4:
res = "." + res
count = 1
res = str_n[ind] + res
ind -= 1
return res
| StarcoderdataPython |
3306439 | import unittest
import logging
import tempfile
import os
import docker
from .context import WDL
class TestTaskRunner(unittest.TestCase):
def setUp(self):
logging.basicConfig(level=logging.DEBUG, format='%(name)s %(levelname)s %(message)s')
self._dir = tempfile.mkdtemp(prefix="miniwdl_test_taskrun_")
def _test_task(self, wdl:str, inputs = None, expected_exception: Exception = None):
doc = WDL.parse_document(wdl)
assert len(doc.tasks) == 1
doc.typecheck()
if isinstance(inputs, dict):
inputs = WDL.values_from_json(inputs, doc.tasks[0].available_inputs, doc.tasks[0].required_inputs)
if expected_exception:
try:
WDL.runtime.run_local_task(doc.tasks[0], (inputs or []), parent_dir=self._dir)
except WDL.runtime.task.TaskFailure as exn:
self.assertIsInstance(exn.__context__, expected_exception)
return exn.__context__
self.assertFalse(str(expected_exception) + " not raised")
rundir, outputs = WDL.runtime.run_local_task(doc.tasks[0], (inputs or []), parent_dir=self._dir)
return WDL.values_to_json(outputs)
def test_docker(self):
outputs = self._test_task(R"""
version 1.0
task hello {
command <<<
cat /etc/issue
>>>
output {
String issue = read_string(stdout())
}
}
""")
self.assertTrue("18.04" in outputs["issue"])
outputs = self._test_task(R"""
version 1.0
task hello {
command <<<
cat /etc/issue
>>>
runtime {
docker: "ubuntu:18.10"
}
output {
String issue = read_string(stdout())
}
}
""")
self.assertTrue("18.10" in outputs["issue"])
outputs = self._test_task(R"""
version 1.0
task hello {
String version
command <<<
cat /etc/issue
>>>
runtime {
docker: "ubuntu:" + version
}
output {
String issue = read_string(stdout())
}
}
""", {"version": "18.10"})
self.assertTrue("18.10" in outputs["issue"])
self._test_task(R"""
version 1.0
task hello {
command <<<
cat /etc/issue
>>>
runtime {
docker: "nonexistent:202407"
}
}
""", expected_exception=docker.errors.ImageNotFound)
def test_hello_blank(self):
self._test_task(R"""
version 1.0
task hello_blank {
input {
String who
}
command <<<
echo "Hello, ~{who}!"
>>>
}
""", {"who": "Alyssa"})
def test_hello_file(self):
with open(os.path.join(self._dir, "alyssa.txt"), "w") as outfile:
outfile.write("Alyssa")
outputs = self._test_task(R"""
version 1.0
task hello_file {
input {
File who
}
command <<<
echo -n "Hello, $(cat ~{who})!" > message.txt
>>>
output {
File message = "message.txt"
}
}
""",
{"who": os.path.join(self._dir, "alyssa.txt")})
with open(outputs["message"]) as infile:
self.assertEqual(infile.read(), "Hello, Alyssa!")
# output an input file
outputs = self._test_task(R"""
version 1.0
task hello_file {
input {
File who
}
command <<<
echo -n "Hello, $(cat ~{who})!"
>>>
output {
File who2 = who
}
}
""",
{"who": os.path.join(self._dir, "alyssa.txt")})
self.assertEqual(outputs["who2"], os.path.join(self._dir, "alyssa.txt"))
# stdout()
outputs = self._test_task(R"""
version 1.0
task hello_file {
input {
File who
}
command <<<
echo -n "Hello, $(cat ~{who})!"
>>>
output {
File message = stdout()
}
}
""",
{"who": os.path.join(self._dir, "alyssa.txt")})
self.assertEqual(os.path.basename(outputs["message"]), "stdout.txt")
with open(outputs["message"]) as infile:
self.assertEqual(infile.read(), "Hello, Alyssa!")
def test_weird_output_files(self):
# nonexistent output file
self._test_task(R"""
version 1.0
task hello {
command {}
output {
File issue = "bogus.txt"
}
}
""", expected_exception=WDL.runtime.task.OutputError)
# attempt to output file which exists but we're not allowed to output
self._test_task(R"""
version 1.0
task hello {
command {}
output {
File issue = "/etc/issue"
}
}
""", expected_exception=WDL.runtime.task.OutputError)
self._test_task(R"""
version 1.0
task hello {
String trick = "/etc"
command {}
output {
File issue = trick + "/issue"
}
}
""", expected_exception=WDL.runtime.task.OutputError)
self._test_task(R"""
version 1.0
task hello {
command {
touch ../nono
}
output {
File issue = "../nono"
}
}
""", expected_exception=WDL.runtime.task.OutputError)
# circuitously output a file using an absolute path
outputs = self._test_task(R"""
version 1.0
task hello {
command {
echo -n $(pwd) > my_pwd
}
output {
File issue = read_string("my_pwd") + "/my_pwd"
}
}
""")
with open(outputs["issue"]) as infile:
pass
def test_command_error(self):
self._test_task(R"""
version 1.0
task hello {
command {
exit 1
}
}
""", expected_exception=WDL.runtime.task.CommandError)
def test_write_lines(self):
outputs = self._test_task(R"""
version 1.0
task hello_friends {
input {
Array[String] friends
}
command <<<
awk '{printf(" Hello, %s!",$0)}' ~{write_lines(friends)}
>>>
output {
String messages = read_string(stdout())
}
}
""",
{"friends": ["Alyssa", "Ben"]})
self.assertEqual(outputs["messages"], " Hello, Alyssa! Hello, Ben!")
outputs = self._test_task(R"""
version 1.0
task hello_friends2 {
input {
Array[String] friends
}
File friends_txt = write_lines(friends)
command <<<
awk '{printf(" Hello, %s!",$0)}' ~{friends_txt}
>>>
output {
String messages = read_string(stdout())
}
}
""",
{"friends": ["Alyssa", "Ben"]})
self.assertEqual(outputs["messages"], " Hello, Alyssa! Hello, Ben!")
def test_compound_files(self):
# tests filename mappings when Files are embedded in compound types
with open(os.path.join(self._dir, "alyssa.txt"), "w") as outfile:
outfile.write("Alyssa\n")
with open(os.path.join(self._dir, "ben.txt"), "w") as outfile:
outfile.write("Ben\n")
outputs = self._test_task(R"""
version 1.0
task hello {
Array[File] files
command {
while read fn; do
cat "$fn"
done < ~{write_lines(files)}
echo -n Alyssa, > alyssa.csv
echo -n Ben, > ben.csv
}
output {
File stdout = stdout()
Array[File] friends = ["alyssa.csv", "ben.csv"]
}
}
""", {"files": [ os.path.join(self._dir, "alyssa.txt"),
os.path.join(self._dir, "ben.txt") ]})
with open(outputs["stdout"]) as infile:
self.assertEqual(infile.read(), "Alyssa\nBen\n")
self.assertEqual(len(outputs["friends"]), 2)
with open(outputs["friends"][0]) as infile:
self.assertEqual(infile.read(), "Alyssa,")
with open(outputs["friends"][1]) as infile:
self.assertEqual(infile.read(), "Ben,")
def test_optional_inputs(self):
code = R"""
version 1.0
task defaults {
input {
String s0
String s1 = "ben"
String? s2
}
command {
echo "~{s0}"
echo "~{s1}"
echo "~{if (defined(s2)) then s2 else 'None'}"
}
output {
String out = read_string(stdout())
}
}
"""
outputs = self._test_task(code, {"s0": "alyssa"})
self.assertEqual(outputs["out"], "alyssa\nben\nNone\n")
outputs = self._test_task(code, {"s0": "alyssa", "s1": "cy"})
self.assertEqual(outputs["out"], "alyssa\ncy\nNone\n")
outputs = self._test_task(code, {"s0": "alyssa", "s2": "mallory"})
self.assertEqual(outputs["out"], "alyssa\nben\nmallory\n")
# FIXME: need some restrictions on what File inputs can default to
self._test_task(R"""
version 1.0
task hacker {
File host_passwords = <PASSWORD>"
command {
>&2 cat "~{host_passwords}"
}
output {
String owned = read_string(stderr())
}
}
""")
| StarcoderdataPython |
92754 | from __future__ import annotations
from typing import TYPE_CHECKING, Dict, List, Optional, Union
import datetime
from ..utils import Snowflake
from .embed import Embed
from .threads import Thread
from .attachments import Attachment
from .components import ActionRow
if TYPE_CHECKING:
from ..state import State
from .channel import DMChannel, TextChannel
from .guild import Guild
from .member import Member
from .user import User
Channels = Union[TextChannel, DMChannel]
__all__ = ("Message", "DeletedMessage")
class DeletedMessage:
"""
Represents a deleted message.
Attributes:
id (int): The ID of the message.
channel_id (int): The ID of the channel which the message was in.
guild_id (Optional[int]): The ID of the guild which the message was in.
"""
def __init__(self, data: Dict) -> None:
self.id: int = int(data["id"])
self.channel_id: int = int(data["channel_id"])
self.guild_id: Optional[int] = (
int(data["guild_id"]) if "guild_id" in data else None
)
class Message:
"""
Represents a message.
"""
def __init__(self, state: State, data: Dict, channel: Channels) -> None:
"""
Creates a Message object.
Parameters:
state (State): The [State](./state.md) of the client.
data (Dict): The data of the message.
channel (Channels): The [Channel](./channel.md) the message was sent in.
"""
self._channel = channel
self._state = state
self._data = data
self._pinned = data.get("pinned", False)
def __repr__(self) -> str:
return f"<Message id={self.id}>"
async def edit(
self,
content: Optional[str] = None,
*,
embeds: Optional[List[Embed]] = None,
rows: Optional[List[ActionRow]] = None,
**kwargs,
) -> Message:
...
"""
Edits the message.
Parameters:
content (Optional[str]): The content of the message.
embeds (Optional[List[lefi.Embed]]): The list of embeds.
rows (Optional[List[ActionRow]]): The rows to send with the message.
kwargs (Any): The options to pass to [lefi.HTTPClient.edit_message](./http.md#lefi.HTTPClient.edit_message).
Returns:
The message after being editted.
"""
embeds = [] if embeds is None else embeds
data = await self._state.client.http.edit_message(
channel_id=self.channel.id,
message_id=self.id,
content=content,
embeds=[embed.to_dict() for embed in embeds],
components=[row.to_dict() for row in rows] if rows is not None else None,
)
if rows is not None and data.get("components"):
for row in rows:
for component in row.components:
self._state._components[component.custom_id] = (
component.callback,
component,
)
self._data = data
return self
async def crosspost(self) -> Message:
"""
Crossposts the message.
Returns:
The message being crossposted.
"""
data = await self._state.http.crosspost_message(self.channel.id, self.id)
return self._state.create_message(data, self.channel)
async def add_reaction(self, reaction: str) -> None:
"""
Adds a reaction to the message.
Parameters:
reaction (str): The reaction to add.
"""
await self._state.http.create_reaction(
channel_id=self.channel.id, message_id=self.id, emoji=reaction
)
async def remove_reaction(
self, reaction: str, user: Optional[Snowflake] = None
) -> None:
"""
Removes a reaction from the message.
Parameters:
reaction (str): The reaction to remove.
user (Optional[Snowflake]): The message to remove the reaction from.
"""
await self._state.http.delete_reaction(
channel_id=self.channel.id,
message_id=self.id,
emoji=reaction,
user_id=user.id if user is not None else user,
)
async def pin(self) -> None:
"""
Pins the message.
"""
await self._state.http.pin_message(self.channel.id, self.id)
self._pinned = True
async def unpin(self) -> None:
"""
Unpins the message.
"""
await self._state.http.unpin_message(self.channel.id, self.id)
self._pinned = False
async def delete(self) -> None:
"""
Deletes the message.
"""
await self._state.http.delete_message(self.channel.id, self.id)
self._state._messages.pop(self.id, None)
async def create_thread(
self, *, name: str, auto_archive_duration: Optional[int] = None
) -> Thread:
"""
Creates a thread from the message.
Parameters:
name (str): The name of the thread.
auto_archive_duration (Optional[int]): The amount of time to archive the thread.
Returns:
The created thread.
"""
if not self.guild:
raise TypeError("Cannot a create thread in a DM channel.")
if auto_archive_duration is not None:
if auto_archive_duration not in (60, 1440, 4320, 10080):
raise ValueError(
"auto_archive_duration must be 60, 1440, 4320 or 10080"
)
data = await self._state.http.start_thread_with_message(
channel_id=self.channel.id,
message_id=self.id,
name=name,
auto_archive_duration=auto_archive_duration,
)
return Thread(self._state, self.guild, data)
def to_reference(self) -> Dict:
payload = {"message_id": self.id, "channel_id": self.channel.id}
if self.guild:
payload["guild_id"] = self.guild.id
return payload
@property
def id(self) -> int:
"""
The ID of the message.
"""
return int(self._data["id"])
@property
def created_at(self) -> datetime.datetime:
"""
The time the message was created at.
"""
return datetime.datetime.fromisoformat(self._data["timestamp"])
@property
def channel(self) -> Channels:
"""
The [lefi.Channel](./channel.md) which the message is in.
"""
return self._channel
@property
def guild(self) -> Optional[Guild]:
"""
The [lefi.Guild](./guild.md) which the message is in.
"""
return self._channel.guild
@property
def content(self) -> str:
"""
The content of the message.
"""
return self._data["content"]
@property
def author(self) -> Union[User, Member]:
"""
The author of the message.
"""
if self.guild is None:
return self._state.get_user(int(self._data["author"]["id"])) # type: ignore
if author := self.guild.get_member(int(self._data["author"]["id"])): # type: ignore
return author
else:
return self._state.add_user(self._data["author"])
@property
def embeds(self) -> List[Embed]:
return [Embed.from_dict(embed) for embed in self._data["embeds"]]
@property
def attachments(self) -> List[Attachment]:
return [
Attachment(self._state, attachment)
for attachment in self._data["attachments"]
]
@property
def pinned(self) -> bool:
"""
Whether the message is pinned.
"""
return self._pinned
| StarcoderdataPython |
1758048 | # Generated by Django 3.2.6 on 2021-09-20 19:47
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('digiapp', '0002_auto_20210918_0949'),
]
operations = [
migrations.RenameField(
model_name='good',
old_name='title',
new_name='name',
),
migrations.RemoveField(
model_name='good',
name='tags',
),
migrations.AddField(
model_name='review',
name='book',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.DO_NOTHING, related_name='book', to='digiapp.good'),
preserve_default=False,
),
migrations.AlterField(
model_name='good',
name='subcategory',
field=models.ForeignKey(max_length=200, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='subcatagory', to='digiapp.subcat'),
),
migrations.AlterField(
model_name='shoppingcart',
name='shopping_list',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='shopping_list', to='digiapp.good'),
),
migrations.AlterField(
model_name='user',
name='shopping_cart',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='shopping_cart', to='digiapp.shoppingcart'),
),
]
| StarcoderdataPython |
84208 | import argparse
import sys
import mdtraj as md
import numpy as np
import matplotlib.pyplot as plt
from numpy import linalg as LA
from sklearn import cluster, datasets
from scipy.optimize import minimize
from sklearn.neighbors import kneighbors_graph
from sklearn.manifold import SpectralEmbedding
from scipy.spatial.distance import squareform, pdist
from scipy.sparse.linalg import eigsh, lobpcg
from mpl_toolkits.mplot3d import Axes3D
from spherical_kmeans import SphericalKMeans
from sklearn.utils.graph import graph_shortest_path
from sklearn.metrics import silhouette_score
import networkx as nx
import os
class SpectralClustering:
file_end_name = '';
save_folder = '';
distances = 0;
validationDistances = 0;
nDimensions = 1;
def initialization(self, parser, input_args=None):
parser.add_argument('-d', '--distance_array', type=str, help='Input distances or input coordinates.',
default='');
parser.add_argument('-id', '--in_directory', type=str, help='Directory with input data (optional)', default='');
parser.add_argument('-fe', '--file_end_name', type=str, help='Output file end name (optional)', default='');
parser.add_argument('-od', '--out_directory', type=str,
help='The directory where data should be saved (optional)', default='');
parser.add_argument('-cdist', '--compressed_distances', help='Flag for using precomputed distances (optional)',
action='store_true');
parser.add_argument('-nDims', '--number_dimensions', type=float,
help='Setting the dimension of the data (optional)', default=1);
args = parser.parse_args() if input_args is None else parser.parse_args(input_args)
self.file_end_name = args.file_end_name;
self.save_folder = args.out_directory;
print('Spectral clustering.');
print('Reading distance matrix.');
self.distances = np.loadtxt(args.in_directory + args.distance_array);
if args.compressed_distances:
self.distances = squareform(self.distances);
else:
self.distances = squareform(pdist(self.distances));
print(self.distances.shape);
self.nDimensions = args.number_dimensions;
return;
def getKNNgraph(self, k):
print('Construct graph with k = ' + str(k) + ' neighbors');
nDist = self.distances.shape[0];
# Construct kNN network
adjacencyMatrix = np.zeros((nDist, nDist));
distance_sort_ind = np.argsort(self.distances);
for i in range(0, nDist):
iConnected = distance_sort_ind[i, 0:k];
adjacencyMatrix[i, iConnected] = self.distances[i, iConnected];
adjacencyMatrix[iConnected, i] = self.distances[i, iConnected];
# Symmetric adjacency matrix
adjacencyMatrix = 0.5 * (adjacencyMatrix + adjacencyMatrix.T);
return adjacencyMatrix;
def computeClusterPropensity(self, adjacencyMatrix):
nNodes = len(adjacencyMatrix[::, 0]);
degree = float(np.count_nonzero(adjacencyMatrix)) / float(nNodes);
print('Actual graph degree: ' + str(degree));
# Use networkx
graph = nx.Graph(adjacencyMatrix);
cluster_coeff = nx.average_clustering(graph);
cluster_coeff_random = float(degree) / (float(nNodes) - 1.0);
print('Clustering coefficient: ' + str(cluster_coeff));
print('dC = ' + str(cluster_coeff - cluster_coeff_random));
return cluster_coeff - cluster_coeff_random;
def getGraphMatrix(self, k_list):
c_diff = -1;
adjMat = [];
k_opt = -1;
for k in k_list:
A = self.getKNNgraph(k);
# cTmp = self.computeClusterPropensity(A);
# if c_diff < cTmp:
# c_diff = cTmp;
adjMat = A;
k_opt = k;
affinityMatrix = graph_shortest_path(adjMat, directed=False);
# Make similarity matrix
# affinityMatrix = np.exp(-affinityMatrix**2/2);
# for i in range(0,affinityMatrix.shape[0]):
# affinityMatrix[i,i] = 0;
# affinityMatrix = -adjMat**2/2;
print('Optimal k = ' + str(k_opt));
return affinityMatrix;
def computeSpectralEigenvectors(self, degree, affinity, n_components):
print('Compute spectral eigenvectors');
nDims = affinity.shape[0];
D = 1 / np.sqrt(degree) * np.eye(nDims);
L = np.eye(nDims) - np.dot(np.dot(D, affinity), D);
eigenvalues, eigenvectors = LA.eig(L);
sort_ind = np.argsort(eigenvalues);
X = eigenvectors[::, sort_ind[0:n_components]];
for i in range(0, len(X[::, 0])):
X[i, ::] /= np.linalg.norm(X[i, ::]);
return X;
def getGraphFromAdjacencyMatrix(self, adjacencyMatrix):
G = nx.Graph();
for i in range(0, len(adjacencyMatrix[::, 0])):
for j in range(i + 1, len(adjacencyMatrix[0, ::])):
if adjacencyMatrix[i, j] > 0:
G.add_edge(str(i), str(j), weight=adjacencyMatrix[i, j]);
return G;
def drawNetwork(self, adjacencyMatrix):
# Add shortest paths/geodesic distances + isomap projection before plotting
graph = nx.Graph(adjacencyMatrix);
pos = nx.spring_layout(graph);
nx.draw_networkx_nodes(graph, pos, node_size=100);
nx.draw_networkx_edges(graph, pos);
return;
def sphericalDistance(self, x1, x2):
# Distance along arc: r*theta (with r = 1 on unit n-sphere).
# Get theta from theta=arccos(u'*v/(norm(u)*norm(v))).
argument = np.dot(x1, x2) / (LA.norm(x1) * LA.norm(x2));
if argument > 1:
argument = 1;
elif argument < -1:
argument = -1;
return np.arccos(argument);
def sampleSphericalPoints(self, nPoints, nDimensions):
points = np.random.randn(nPoints, nDimensions);
for i in range(0, points.shape[0]):
points[i, ::] /= LA.norm(points[i, ::]);
# points[nDimensions-1,::] = np.abs(points[nDimensions-1,::]);
return points;
def qualityScore(self, x, centers):
nDimensions = len(centers[::, 0]);
tmpDist = np.zeros(nDimensions);
dist_fraction = np.zeros(len(x[::, 0]));
for i in range(0, len(x[::, 0])):
x1 = x[i, ::];
for j in range(0, nDimensions):
tmpDist[j] = self.sphericalDistance(x1, centers[j, ::]);
tmpDist = np.sort(tmpDist);
if tmpDist[0] < 1e-5:
tmpDist[0] = 1e-5;
dist_fraction[i] = tmpDist[1] / tmpDist[0];
qualityScore = np.median(dist_fraction);
return qualityScore;
def normalizedQualityScore(self, x, centers):
nDimensions = len(centers[::, 0]);
nPoints = len(x[::, 0]);
# Un-normalized Q score
Q = self.qualityScore(x, centers);
## Compute the normalization factor based on randomly sampled points
tmpQ = np.zeros(50);
for i in range(0, len(tmpQ)):
rand_points = self.sampleSphericalPoints(nPoints, nDimensions);
# Cluster with spherical k-means
kmeansObj = SphericalKMeans(cluster.KMeans());
kmeansObj.__init__(n_clusters=nDimensions, init='random');
kmeansObj.fit(rand_points);
# Get cluster centers
rand_centers = kmeansObj.cluster_centers_;
tmpQ[i] = self.qualityScore(rand_points, rand_centers);
# Compute quality score for random points
Q_rand = np.mean(tmpQ);
print('Q = ' + str(Q));
print('Q_rand = ' + str(Q_rand));
return Q / Q_rand;
def silhouetteScore(self, x, cluster_indices):
nPoints = x.shape[0];
allDistances = np.zeros((nPoints, nPoints));
dist_fraction = np.zeros(len(x[::, 0]));
for i in range(0, nPoints):
x1 = x[i, ::];
for j in range(i + 1, nPoints):
allDistances[i, j] = self.sphericalDistance(x1, x[j, ::]);
allDistances += allDistances.T;
qualityScore = silhouette_score(allDistances, labels=cluster_indices, metric='precomputed');
print('Silhouette score: ' + str(qualityScore));
return qualityScore;
def saveData(self, points, cluster_indices, index):
if self.save_folder[-1] != '/':
self.save_folder += '/';
if not os.path.exists(self.save_folder):
os.makedirs(self.save_folder);
np.savetxt(self.save_folder + 'cluster_indices_' + self.file_end_name + '.txt', cluster_indices[::, index]);
np.savetxt(self.save_folder + 'sphere_points_' + self.file_end_name + '.txt', points);
print('Data saved to files');
return;
def evaluateKDE(self, sigma, distances):
variance = sigma ** 2;
nTrainingPoints = float(self.validationDistances.shape[1]);
# Dimension of self.validationDistances: [nValPoints x nTrainPoints]
# print(nTrainingPoints*sigma**self.nDimensions);
scalingFactor = -self.nDimensions * np.log(sigma * np.sqrt(2 * np.pi));
allBasisFunctions = np.exp(-self.validationDistances ** 2 / (2 * variance));
tmpKDE = np.sum(allBasisFunctions, axis=1);
logLikelihood = np.sum(np.log(tmpKDE) + scalingFactor);
return logLikelihood, scalingFactor;
def optimalSigma(self):
print('Inferring Gaussian standard deviation.');
maxSigma = np.sqrt(np.mean(self.distances ** 2));
minSigma = 0.71 * np.min(self.distances[self.distances > 0]);
print('Min sigma = ' + str(minSigma));
# Let 60 % of points (randomly chosen) be in training set
nPoints = int(self.distances.shape[0]);
nTrainingPoints = int(np.floor(0.8 * nPoints));
nValidationPoints = nPoints - nTrainingPoints;
permutedPoints = np.random.permutation(nPoints);
trainingIndices = permutedPoints[0:nTrainingPoints];
validationIndices = permutedPoints[nTrainingPoints::];
self.validationDistances = np.zeros((nValidationPoints, nTrainingPoints));
for i in range(0, nTrainingPoints):
for j in range(0, nValidationPoints):
self.validationDistances[j, i] = self.distances[trainingIndices[i], validationIndices[j]];
constr = ({'type': 'ineq', 'fun': lambda sigma: sigma - minSigma},
{'type': 'ineq', 'fun': lambda sigma: maxSigma - sigma})
bestSigma = 1;
bestLogLikelihood = 0;
optResult = 0;
for sigma in np.arange(minSigma, maxSigma, (maxSigma - minSigma) / 100):
initialGuess = np.random.rand(1) * maxSigma;
tmpLoglikelihood, scalingFactor = self.evaluateKDE(
sigma); # minimize(self.evaluateKDE,initialGuess,constraints=constr);
# print tmpResult
if bestLogLikelihood == 0 and not (np.isnan(tmpLoglikelihood)):
# bestOpt = optResult.fun;
bestLogLikelihood = tmpLoglikelihood;
bestSigma = sigma;
elif tmpLoglikelihood > bestLogLikelihood:
print('Found better solution: ');
print(tmpLoglikelihood);
print(scalingFactor);
# print tmpResult.fun
# print bestOpt;
# print tmpResult.x;
# print initialGuess;
# bestOpt = optResult.fun;
bestLogLikelihood = tmpLoglikelihood;
bestSigma = sigma;
print('Inferred sigma: ' + str(bestSigma));
return bestSigma;
def KDE_density(self, sigma, distances):
variance = sigma ** 2;
nPoints = float(distances.shape[1]);
scalingFactor = -self.nDimensions * np.log(sigma * np.sqrt(2 * np.pi));
allBasisFunctions = np.exp(-distances ** 2 / (2 * variance));
tmpKDE = np.sum(allBasisFunctions, axis=1);
logLikelihood = np.sum(log(tmpKDE) + scalingFactor);
return logLikelihood, scalingFactor;
def leaveOneOutSigma(self):
maxSigma = np.sqrt(np.mean(self.distances ** 2));
minSigma = np.min(self.distances[self.distances > 0]);
print minSigma;
print maxSigma;
nPoints = self.distances.shape[0];
minDiff = 1000000000;
bestSigma = minSigma;
for sigma in np.arange(minSigma, maxSigma, (maxSigma - minSigma) / 100):
variance = sigma ** 2;
projections = np.exp(-self.distances ** 2 / (2 * variance));
total = 0;
for i in range(nPoints):
tmpDistances = self.distances[i, 0:i - 1];
tmpDistances = np.append(tmpDistances, self.distances[i, i + 1::]);
tmpProj = projections[i, 0:i - 1];
tmpProj = np.append(tmpProj, projections[i, i + 1::]);
prob_i = 1 / (nPoints - 1) * np.sum(tmpProj) + 1e-5;
K_i = np.sum(tmpProj * tmpDistances ** 2);
total += 1 / prob_i * K_i;
total = np.log(total) - self.nDimensions * np.log(sigma * np.sqrt(2 * np.pi))
diff = (np.log(variance * (self.nDimensions * (nPoints - 1) * nPoints)) - total) ** 2;
if diff < minDiff:
minDiff = diff;
bestSigma = sigma;
return bestSigma;
def graphLaplacian(self, adjecencyMatrix):
D = np.sum(adjecencyMatrix, axis=1);
D_inv = np.diag(1 / np.sqrt(D));
laplacian = np.dot(D_inv, np.dot(adjecencyMatrix, D_inv));
return laplacian;
def cluster(self):
k_list = [4];
nCompList = np.array([2, 3, 4, 5, 6, 7, 8]);
# Construct affinity matrix from interframe distances
# A = self.getGraphMatrix(k_list);
sigma = 1; # self.optimalSigma();
distSort = np.sort(self.distances, axis=1);
sigma = 0.7 * np.mean(distSort[::, 1]); # self.leaveOneOutSigma(); #
print('Sigma = ' + str(sigma));
dist_squared = np.multiply(self.distances, self.distances);
variance = np.mean(dist_squared);
A = np.exp(-dist_squared / (2 * sigma ** 2));
for i in range(0, A.shape[0]):
A[i, i] = 0;
# First project onto a bunch of dimensions, then pick optimal dim/clusters
print('Compute Laplacian');
laplacian = self.graphLaplacian(A);
print('Spectral embedding');
eigenValues, eigenVectors = eigsh(laplacian, k=(nCompList[-1] + 1));
# Sort in descending order
eigSortedInds = np.argsort(-eigenValues);
eigenValues = eigenValues[eigSortedInds];
proj = eigenVectors[::, eigSortedInds];
# proj = SpectralEmbedding(n_components=20, eigen_solver='arpack',
# affinity='precomputed').fit(A).embedding_;
print('Clustering in all dimensions');
counter = 1;
qualityScores = np.zeros(len(nCompList));
silhouetteScores = np.zeros(len(nCompList));
eigenGaps = np.zeros(len(nCompList));
BICs = np.zeros(len(nCompList));
cluster_indices = np.zeros((A.shape[0], len(nCompList)));
# Do clustering and vary the number of clusters
for nComponents in nCompList:
print('Number of clusters: ' + str(nComponents));
X = np.copy(proj[::, 0:nComponents]);
for i in range(0, len(X[::, 0])):
X[i, ::] /= LA.norm(X[i, ::]);
# Cluster with spherical k-means
kmeansObj = SphericalKMeans(cluster.KMeans());
kmeansObj.__init__(n_clusters=nComponents, init='random');
kmeansObj.fit(X);
# Get cluster indices and centers
cl_ind = kmeansObj.labels_;
cluster_indices[::, counter - 1] = cl_ind + 1;
centers = kmeansObj.cluster_centers_;
# Compute clustering quality score
qualityScores[counter - 1] = self.normalizedQualityScore(X, centers);
# silhouetteScores[counter-1] = self.silhouetteScore(X, cl_ind);
eigenGaps[counter - 1] = eigenValues[nComponents - 1] - eigenValues[nComponents];
print('Eigen gap: ' + str(eigenGaps[counter - 1]));
# BICs[counter-1] = 0.5*nComponents*X.shape[1]*np.log(X.shape[0]);
# print('BIC: ' + str(BICs[counter-1]));
colors = np.array([x for x in 'bgrcmykbgrcmykbgrcmykbgrcmyk'])
colors = np.hstack([colors] * 20)
if X.shape[1] > 2 and X.shape[1] < 4:
fig = plt.figure(counter);
ax = fig.add_subplot(111, projection='3d', aspect='equal');
ax.scatter(X[::, 0], X[::, 1], X[::, 2], color=colors[cl_ind].tolist());
elif X.shape[1] == 2:
fig = plt.figure(counter);
ax = fig.add_subplot(111, aspect='equal');
ax.scatter(X[::, 0], X[::, 1], color=colors[cl_ind].tolist());
plt.axis([-1, 1, -1, 1])
counter += 1;
silhouetteScores -= np.min(silhouetteScores);
silhouetteScores /= np.max(silhouetteScores) if np.max(silhouetteScores) != 0 else 1;
fig = plt.figure(counter);
ax = fig.add_subplot(111);
ax.plot(nCompList, qualityScores / np.max(qualityScores), marker='o', color='b', linewidth=2.0,
label='SPECTRUS Q');
# ax.plot(nCompList,silhouetteScores,marker='o',color='r',linewidth=2.0,label='Silhouette');
ax.plot(nCompList, eigenGaps / np.max(eigenGaps), marker='o', color='g', linewidth=2.0, label='Eigen gap');
# ax.plot(nCompList,BICs/np.max(BICs),marker='o',color='k',linewidth=2.0,label='BIC');
plt.title('Normalized quality scores: Pick number of clusters');
plt.xlabel('# clusters');
plt.legend()
plt.ylabel('Score');
# Decide on clusters
# nClusters = int(np.round(plt.ginput(1)[0][0]));
plt.show();
print("Ready to read input?")
nClusters = int(raw_input("Pick number of clusters:"))
print("Number of clusters: " + str(nClusters));
index = np.where(nCompList == nClusters)[0][0];
X = np.copy(proj[::, 0:nClusters]);
for i in range(0, len(X[::, 0])):
X[i, ::] /= np.linalg.norm(X[i, ::]);
self.saveData(X, cluster_indices, index);
return;
def clusterFindSigma(self):
k_list = [4];
nCompList = np.array([2, 3, 4, 5, 6, 7, 8]);
# Construct affinity matrix from interframe distances
# A = self.getGraphMatrix(k_list);
sigma = 1; # self.optimalSigma(); #self.leaveOneOutSigma();
distSort = np.sort(self.distances, axis=1);
sigma_min = 0.5 * np.min(distSort[::, 1]); # self.leaveOneOutSigma(); #
print('Sigma min = ' + str(sigma_min));
dist_squared = np.multiply(self.distances, self.distances);
sigma_max = 0.5 * np.max(distSort[::, 1]);
print('sigma max: ' + str(sigma_max));
sigma_best = sigma_min;
minDistortion = 100000;
bestX = 0;
for nComponents in nCompList:
print('Clustering in all dimensions');
for sigma in np.arange(sigma_min, sigma_max, (sigma_max - sigma_min) / 20):
A = np.exp(-dist_squared / (2 * sigma ** 2));
for i in range(0, A.shape[0]):
A[i, i] = 0;
# First project onto a bunch of dimensions, then pick optimal dim/clusters
print('Spectral embedding with sigma = ' + str(sigma));
laplacian = self.graphLaplacian(A);
eigenValues, eigenVectors = LA.eig(laplacian);
# Sort in descending order
eigSortedInds = np.argsort(-eigenValues);
eigenValues = eigenValues[eigSortedInds];
proj = eigenVectors[:, eigSortedInds];
# proj = SpectralEmbedding(n_components=20, eigen_solver='arpack',
# affinity='precomputed').fit(A).embedding_;
counter = 1;
qualityScores = np.zeros(len(nCompList));
silhouetteScores = np.zeros(len(nCompList));
eigenGaps = np.zeros(len(nCompList));
cluster_indices = np.zeros((A.shape[0], len(nCompList)));
# Do clustering and vary the number of clusters
X = np.copy(proj[::, 0:nComponents]);
print('Number of clusters: ' + str(nComponents));
if sigma == sigma_min:
bestX = X;
for i in range(0, len(X[::, 0])):
X[i, ::] /= LA.norm(X[i, ::]);
# Cluster with spherical k-means
kmeansObj = SphericalKMeans(cluster.KMeans());
kmeansObj.__init__(n_clusters=nComponents, init='random');
kmeansObj.fit(X);
# Get cluster indices and centers
cl_ind = kmeansObj.labels_;
clusterDistortion = kmeansObj.inertia_;
if clusterDistortion < minDistortion:
minDistortion = clusterDistortion;
bestX = X;
sigma_best = sigma;
cluster_indices[::, counter - 1] = cl_ind + 1;
centers = kmeansObj.cluster_centers_;
else:
break;
print('Chosen sigma: ' + str(sigma_best));
# Compute clustering quality score
qualityScores[counter - 1] = self.normalizedQualityScore(X, centers);
silhouetteScores[counter - 1] = self.silhouetteScore(X, cl_ind);
eigenGaps[counter - 1] = eigenValues[nComponents - 1] - eigenValues[nComponents];
print('Eigen gap: ' + str(eigenGaps[counter - 1]));
colors = np.array([x for x in 'bgrcmykbgrcmykbgrcmykbgrcmyk'])
colors = np.hstack([colors] * 20)
if X.shape[1] > 2 and X.shape[1] < 4:
fig = plt.figure(counter);
ax = fig.add_subplot(111, projection='3d', aspect='equal');
ax.scatter(X[::, 0], X[::, 1], X[::, 2], color=colors[cl_ind].tolist());
elif X.shape[1] == 2:
fig = plt.figure(counter);
ax = fig.add_subplot(111, aspect='equal');
ax.scatter(X[::, 0], X[::, 1], color=colors[cl_ind].tolist());
plt.axis([-1, 1, -1, 1])
counter += 1;
silhouetteScores -= np.min(silhouetteScores);
silhouetteScores /= np.max(silhouetteScores);
fig = plt.figure(counter);
ax = fig.add_subplot(111);
ax.plot(nCompList, qualityScores / np.max(qualityScores), marker='o', color='b', linewidth=2.0,
label='SPECTRUS Q');
ax.plot(nCompList, silhouetteScores, marker='o', color='r', linewidth=2.0, label='Silhouette');
ax.plot(nCompList, eigenGaps / np.max(eigenGaps), marker='o', color='g', linewidth=2.0, label='Eigen gap');
plt.title('Normalized quality scores: Pick number of clusters');
plt.xlabel('# clusters');
plt.ylabel('Score');
# Decide on clusters
nClusters = int(np.round(plt.ginput(1)[0][0]));
print("Number of clusters: " + str(nClusters));
index = np.where(nCompList == nClusters)[0][0];
X = np.copy(proj[::, 0:nClusters]);
for i in range(0, len(X[::, 0])):
X[i, ::] /= np.linalg.norm(X[i, ::]);
self.saveData(X, cluster_indices, index);
plt.show();
return;
# parser = argparse.ArgumentParser(epilog='Perform spectral clustering with spherical k-means on given data. <NAME> 2017.');
# clustering = SpectralClustering();
# clustering.initialization(parser);
# clustering.cluster();
| StarcoderdataPython |
1782901 | <filename>Python/sniffer.py
from scapy.all import *
# Task 1A: A simple sniffing program that was given to us.
def got_packet(pkt):
pkt.show()
print('Sniffing...')
pkt = sniff(iface='enp0s3', filter='icmp', prn=got_packet)
| StarcoderdataPython |
1660938 | <gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayMerchantWeikeBillModifyModel(object):
def __init__(self):
self._actual_service_charge = None
self._alipay_trans_serial_no = None
self._bill_month = None
self._bill_no = None
self._bill_version = None
self._current_actual_service_charge = None
self._current_expect_service_charge = None
self._current_user_task_count = None
self._expect_service_charge = None
self._expect_tax = None
self._gmt_modified = None
self._gmt_pay = None
self._out_biz_no = None
self._paid_charge_tax_include = None
self._paid_service_charge = None
self._service_charge_serial_no = None
self._to_pay_service_charge = None
self._weike_user_id = None
@property
def actual_service_charge(self):
return self._actual_service_charge
@actual_service_charge.setter
def actual_service_charge(self, value):
self._actual_service_charge = value
@property
def alipay_trans_serial_no(self):
return self._alipay_trans_serial_no
@alipay_trans_serial_no.setter
def alipay_trans_serial_no(self, value):
self._alipay_trans_serial_no = value
@property
def bill_month(self):
return self._bill_month
@bill_month.setter
def bill_month(self, value):
self._bill_month = value
@property
def bill_no(self):
return self._bill_no
@bill_no.setter
def bill_no(self, value):
self._bill_no = value
@property
def bill_version(self):
return self._bill_version
@bill_version.setter
def bill_version(self, value):
self._bill_version = value
@property
def current_actual_service_charge(self):
return self._current_actual_service_charge
@current_actual_service_charge.setter
def current_actual_service_charge(self, value):
self._current_actual_service_charge = value
@property
def current_expect_service_charge(self):
return self._current_expect_service_charge
@current_expect_service_charge.setter
def current_expect_service_charge(self, value):
self._current_expect_service_charge = value
@property
def current_user_task_count(self):
return self._current_user_task_count
@current_user_task_count.setter
def current_user_task_count(self, value):
self._current_user_task_count = value
@property
def expect_service_charge(self):
return self._expect_service_charge
@expect_service_charge.setter
def expect_service_charge(self, value):
self._expect_service_charge = value
@property
def expect_tax(self):
return self._expect_tax
@expect_tax.setter
def expect_tax(self, value):
self._expect_tax = value
@property
def gmt_modified(self):
return self._gmt_modified
@gmt_modified.setter
def gmt_modified(self, value):
self._gmt_modified = value
@property
def gmt_pay(self):
return self._gmt_pay
@gmt_pay.setter
def gmt_pay(self, value):
self._gmt_pay = value
@property
def out_biz_no(self):
return self._out_biz_no
@out_biz_no.setter
def out_biz_no(self, value):
self._out_biz_no = value
@property
def paid_charge_tax_include(self):
return self._paid_charge_tax_include
@paid_charge_tax_include.setter
def paid_charge_tax_include(self, value):
self._paid_charge_tax_include = value
@property
def paid_service_charge(self):
return self._paid_service_charge
@paid_service_charge.setter
def paid_service_charge(self, value):
self._paid_service_charge = value
@property
def service_charge_serial_no(self):
return self._service_charge_serial_no
@service_charge_serial_no.setter
def service_charge_serial_no(self, value):
self._service_charge_serial_no = value
@property
def to_pay_service_charge(self):
return self._to_pay_service_charge
@to_pay_service_charge.setter
def to_pay_service_charge(self, value):
self._to_pay_service_charge = value
@property
def weike_user_id(self):
return self._weike_user_id
@weike_user_id.setter
def weike_user_id(self, value):
self._weike_user_id = value
def to_alipay_dict(self):
params = dict()
if self.actual_service_charge:
if hasattr(self.actual_service_charge, 'to_alipay_dict'):
params['actual_service_charge'] = self.actual_service_charge.to_alipay_dict()
else:
params['actual_service_charge'] = self.actual_service_charge
if self.alipay_trans_serial_no:
if hasattr(self.alipay_trans_serial_no, 'to_alipay_dict'):
params['alipay_trans_serial_no'] = self.alipay_trans_serial_no.to_alipay_dict()
else:
params['alipay_trans_serial_no'] = self.alipay_trans_serial_no
if self.bill_month:
if hasattr(self.bill_month, 'to_alipay_dict'):
params['bill_month'] = self.bill_month.to_alipay_dict()
else:
params['bill_month'] = self.bill_month
if self.bill_no:
if hasattr(self.bill_no, 'to_alipay_dict'):
params['bill_no'] = self.bill_no.to_alipay_dict()
else:
params['bill_no'] = self.bill_no
if self.bill_version:
if hasattr(self.bill_version, 'to_alipay_dict'):
params['bill_version'] = self.bill_version.to_alipay_dict()
else:
params['bill_version'] = self.bill_version
if self.current_actual_service_charge:
if hasattr(self.current_actual_service_charge, 'to_alipay_dict'):
params['current_actual_service_charge'] = self.current_actual_service_charge.to_alipay_dict()
else:
params['current_actual_service_charge'] = self.current_actual_service_charge
if self.current_expect_service_charge:
if hasattr(self.current_expect_service_charge, 'to_alipay_dict'):
params['current_expect_service_charge'] = self.current_expect_service_charge.to_alipay_dict()
else:
params['current_expect_service_charge'] = self.current_expect_service_charge
if self.current_user_task_count:
if hasattr(self.current_user_task_count, 'to_alipay_dict'):
params['current_user_task_count'] = self.current_user_task_count.to_alipay_dict()
else:
params['current_user_task_count'] = self.current_user_task_count
if self.expect_service_charge:
if hasattr(self.expect_service_charge, 'to_alipay_dict'):
params['expect_service_charge'] = self.expect_service_charge.to_alipay_dict()
else:
params['expect_service_charge'] = self.expect_service_charge
if self.expect_tax:
if hasattr(self.expect_tax, 'to_alipay_dict'):
params['expect_tax'] = self.expect_tax.to_alipay_dict()
else:
params['expect_tax'] = self.expect_tax
if self.gmt_modified:
if hasattr(self.gmt_modified, 'to_alipay_dict'):
params['gmt_modified'] = self.gmt_modified.to_alipay_dict()
else:
params['gmt_modified'] = self.gmt_modified
if self.gmt_pay:
if hasattr(self.gmt_pay, 'to_alipay_dict'):
params['gmt_pay'] = self.gmt_pay.to_alipay_dict()
else:
params['gmt_pay'] = self.gmt_pay
if self.out_biz_no:
if hasattr(self.out_biz_no, 'to_alipay_dict'):
params['out_biz_no'] = self.out_biz_no.to_alipay_dict()
else:
params['out_biz_no'] = self.out_biz_no
if self.paid_charge_tax_include:
if hasattr(self.paid_charge_tax_include, 'to_alipay_dict'):
params['paid_charge_tax_include'] = self.paid_charge_tax_include.to_alipay_dict()
else:
params['paid_charge_tax_include'] = self.paid_charge_tax_include
if self.paid_service_charge:
if hasattr(self.paid_service_charge, 'to_alipay_dict'):
params['paid_service_charge'] = self.paid_service_charge.to_alipay_dict()
else:
params['paid_service_charge'] = self.paid_service_charge
if self.service_charge_serial_no:
if hasattr(self.service_charge_serial_no, 'to_alipay_dict'):
params['service_charge_serial_no'] = self.service_charge_serial_no.to_alipay_dict()
else:
params['service_charge_serial_no'] = self.service_charge_serial_no
if self.to_pay_service_charge:
if hasattr(self.to_pay_service_charge, 'to_alipay_dict'):
params['to_pay_service_charge'] = self.to_pay_service_charge.to_alipay_dict()
else:
params['to_pay_service_charge'] = self.to_pay_service_charge
if self.weike_user_id:
if hasattr(self.weike_user_id, 'to_alipay_dict'):
params['weike_user_id'] = self.weike_user_id.to_alipay_dict()
else:
params['weike_user_id'] = self.weike_user_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayMerchantWeikeBillModifyModel()
if 'actual_service_charge' in d:
o.actual_service_charge = d['actual_service_charge']
if 'alipay_trans_serial_no' in d:
o.alipay_trans_serial_no = d['alipay_trans_serial_no']
if 'bill_month' in d:
o.bill_month = d['bill_month']
if 'bill_no' in d:
o.bill_no = d['bill_no']
if 'bill_version' in d:
o.bill_version = d['bill_version']
if 'current_actual_service_charge' in d:
o.current_actual_service_charge = d['current_actual_service_charge']
if 'current_expect_service_charge' in d:
o.current_expect_service_charge = d['current_expect_service_charge']
if 'current_user_task_count' in d:
o.current_user_task_count = d['current_user_task_count']
if 'expect_service_charge' in d:
o.expect_service_charge = d['expect_service_charge']
if 'expect_tax' in d:
o.expect_tax = d['expect_tax']
if 'gmt_modified' in d:
o.gmt_modified = d['gmt_modified']
if 'gmt_pay' in d:
o.gmt_pay = d['gmt_pay']
if 'out_biz_no' in d:
o.out_biz_no = d['out_biz_no']
if 'paid_charge_tax_include' in d:
o.paid_charge_tax_include = d['paid_charge_tax_include']
if 'paid_service_charge' in d:
o.paid_service_charge = d['paid_service_charge']
if 'service_charge_serial_no' in d:
o.service_charge_serial_no = d['service_charge_serial_no']
if 'to_pay_service_charge' in d:
o.to_pay_service_charge = d['to_pay_service_charge']
if 'weike_user_id' in d:
o.weike_user_id = d['weike_user_id']
return o
| StarcoderdataPython |
1714819 | <filename>PercorsoDati/Lab3-4/etl/utils.py<gh_stars>10-100
import numpy as np
@np.vectorize
def remove_dollar(label: str):
return float(label.replace("$", "").replace(",", ""))
| StarcoderdataPython |
133838 | """
Convert tabular data from
Tabular Benchmarks for Joint Architecture and Hyperparameter Optimization
<NAME> <NAME>
https://arxiv.org/pdf/1905.04970.pdf.
"""
import urllib
import tarfile
from pathlib import Path
from typing import Optional
import pandas as pd
import numpy as np
import ast
import h5py
from syne_tune.blackbox_repository.blackbox_tabular import serialize, BlackboxTabular
from syne_tune.blackbox_repository.conversion_scripts.utils import repository_path
from syne_tune.util import catchtime
from syne_tune.config_space import choice, logfinrange, finrange, randint
BLACKBOX_NAME = 'fcnet'
METRIC_VALID_LOSS = 'metric_valid_loss'
METRIC_ELAPSED_TIME = 'metric_elapsed_time'
RESOURCE_ATTR = 'hp_epoch'
MAX_RESOURCE_LEVEL = 100
NUM_UNITS_1 = 'hp_n_units_1'
NUM_UNITS_2 = 'hp_n_units_2'
CONFIGURATION_SPACE = {
"hp_activation_fn_1": choice(["tanh", "relu"]),
"hp_activation_fn_2": choice(["tanh", "relu"]),
"hp_batch_size": logfinrange(8, 64, 4, cast_int=True),
"hp_dropout_1": finrange(0.0, 0.6, 3),
"hp_dropout_2": finrange(0.0, 0.6, 3),
"hp_init_lr": choice([0.0005, 0.001, 0.005, 0.01, 0.05, 0.1]),
'hp_lr_schedule': choice(["cosine", "const"]),
NUM_UNITS_1: logfinrange(16, 512, 6, cast_int=True),
NUM_UNITS_2: logfinrange(16, 512, 6, cast_int=True),
}
def convert_dataset(dataset_path: Path, max_rows: int = None):
data = h5py.File(dataset_path, "r")
keys = data.keys()
if max_rows is not None:
keys = list(keys)[:max_rows]
hyperparameters = pd.DataFrame(ast.literal_eval(key) for key in keys)
hyperparameters.rename(columns={col: "hp_" + col for col in hyperparameters.columns}, inplace=True)
objective_names = [
'valid_loss',
'train_loss',
'final_test_error',
'n_params',
'elapsed_time',
]
# todo for now only full metrics
fidelity_values = np.arange(1, MAX_RESOURCE_LEVEL + 1)
n_fidelities = len(fidelity_values)
n_objectives = len(objective_names)
n_seeds = 4
n_hps = len(keys)
objective_evaluations = np.empty((n_hps, n_seeds, n_fidelities, n_objectives)).astype('float32')
def save_objective_values_helper(name, values):
assert values.shape == (n_hps, n_seeds, n_fidelities)
name_index = dict(zip(
objective_names,
range(len(objective_names)))
)
objective_evaluations[..., name_index[name]] = values
# (n_hps, n_seeds,)
final_test_error = np.stack([data[key]['final_test_error'][:].astype('float32') for key in keys])
# (n_hps, n_seeds, n_fidelities)
final_test_error = np.repeat(np.expand_dims(final_test_error, axis=-1), n_fidelities, axis=-1)
save_objective_values_helper('final_test_error', final_test_error)
# (n_hps, n_seeds,)
n_params = np.stack([data[key]['n_params'][:].astype('float32') for key in keys])
# (n_hps, n_seeds, n_fidelities)
n_params = np.repeat(np.expand_dims(n_params, axis=-1), n_fidelities, axis=-1)
save_objective_values_helper('n_params', n_params)
# (n_hps, n_seeds,)
runtime = np.stack([data[key]['runtime'][:].astype('float32') for key in keys])
# linear interpolation to go from total training time to training time per epoch as in fcnet code
# (n_hps, n_seeds, n_epochs)
# todo utilize expand dim instead of reshape
epochs = np.repeat(fidelity_values.reshape(1, -1),
n_hps * n_seeds, axis=0).reshape(n_hps, n_seeds, -1)
elapsed_time = (epochs / MAX_RESOURCE_LEVEL) * runtime.reshape(
(n_hps, n_seeds, 1))
save_objective_values_helper('elapsed_time', elapsed_time)
# metrics that are fully observed, only use train/valid loss as mse are the same numbers
# for m in ['train_loss', 'train_mse', 'valid_loss', 'valid_mse']:
for m in ['train_loss', 'valid_loss']:
save_objective_values_helper(
m,
np.stack([data[key][m][:].astype('float32') for key in keys])
)
fidelity_space = {
RESOURCE_ATTR: randint(lower=1, upper=MAX_RESOURCE_LEVEL)
}
objective_names = [f"metric_{m}" for m in objective_names]
# Sanity checks:
assert objective_names[0] == METRIC_VALID_LOSS
assert objective_names[4] == METRIC_ELAPSED_TIME
return BlackboxTabular(
hyperparameters=hyperparameters,
configuration_space=CONFIGURATION_SPACE,
fidelity_space=fidelity_space,
objectives_evaluations=objective_evaluations,
fidelity_values=fidelity_values,
objectives_names=objective_names,
)
def generate_fcnet(s3_root: Optional[str] = None):
blackbox_name = BLACKBOX_NAME
fcnet_file = repository_path / "fcnet_tabular_benchmarks.tar.gz"
if not fcnet_file.exists():
src = "http://ml4aad.org/wp-content/uploads/2019/01/fcnet_tabular_benchmarks.tar.gz"
print(f"did not find {fcnet_file}, downloading {src}")
urllib.request.urlretrieve(src, fcnet_file)
with tarfile.open(fcnet_file) as f:
f.extractall(path=repository_path)
with catchtime("converting"):
bb_dict = {}
for dataset in ['protein_structure', 'naval_propulsion', 'parkinsons_telemonitoring', 'slice_localization']:
print(f"converting {dataset}")
dataset_path = repository_path / "fcnet_tabular_benchmarks" / f"fcnet_{dataset}_data.hdf5"
bb_dict[dataset] = convert_dataset(dataset_path=dataset_path)
with catchtime("saving to disk"):
serialize(bb_dict=bb_dict, path=repository_path / blackbox_name)
with catchtime("uploading to s3"):
from syne_tune.blackbox_repository.conversion_scripts.utils import upload
upload(blackbox_name, s3_root=s3_root)
def plot_learning_curves():
import matplotlib.pyplot as plt
from syne_tune.blackbox_repository.repository import load
# plot one learning-curve for sanity-check
bb_dict = load(BLACKBOX_NAME)
b = bb_dict['naval_propulsion']
configuration = {k: v.sample() for k, v in b.configuration_space.items()}
print(configuration)
errors = []
for i in range(1, MAX_RESOURCE_LEVEL + 1):
res = b.objective_function(configuration=configuration, fidelity={'epochs': i})
errors.append(res[METRIC_VALID_LOSS])
plt.plot(errors)
if __name__ == '__main__':
generate_fcnet()
# plot_learning_curves()
| StarcoderdataPython |
1607786 | import torch
from torch.utils.data.dataset import Dataset
import numpy as np
import pandas as pd
import cv2
from albumentations import Compose, Flip, RandomScale, ShiftScaleRotate, RandomBrightnessContrast, Rotate, RandomCrop, CenterCrop, Resize, Blur, CLAHE, Equalize, Normalize, OneOf, IAASharpen, IAAEmboss
from sklearn.model_selection import train_test_split
from . import config
class PlantPathology(Dataset):
'''Class that represents the images dataset. It allows to feed DataLoader with transformed images and corresponding labels to a model for training.
Inherits from Dataset class.
'''
def __init__(self, df, label_cols=None, is_test=False, apply_transforms=True, to_tensor=True):
self.is_test = is_test
self.apply_transforms = apply_transforms
self.to_tensor = to_tensor
# load metadata
self.df_metadata = df
self.image_ids = self.df_metadata['image_id'].values
if not self.is_test:
self.label_cols = label_cols
self.labels = self.df_metadata[self.label_cols].values
# class weights
self.label_weights = np.log(self.labels.shape[0] / self.labels.sum(axis=0) - 1)
self.transforms = Compose([
Flip(p=0.8),
ShiftScaleRotate(shift_limit=0.05,
scale_limit=0.2,
rotate_limit=90,
p=1.),
RandomBrightnessContrast(p=1.),
OneOf([
IAASharpen(),
IAAEmboss(),
], p=0.5),
RandomCrop(1024, 1024, p=1.),
Resize(64, 64),
CLAHE(clip_limit=(1, 4), tile_grid_size=(8, 8), p=1.),
])
def __len__(self):
return self.image_ids.shape[0]
def __getitem__(self, index):
# read file
if torch.is_tensor(index):
index = index.tolist()
image_path = config.get_image_filename(self.image_ids[index])
image = cv2.imread(image_path)
if self.apply_transforms:
image = self._transform(image)
if self.to_tensor:
image = self._to_tensor(image)
if self.is_test:
return image
else:
label = self.labels[index]
return (image, label)
def _transform(self, image):
return self.transforms(image=image)['image']
def _to_tensor(self, image):
image = image.transpose((2, 0, 1))
image = torch.from_numpy(image).float()
return image
def label_from_vect(self, label_vector):
return self.label_cols[np.argmax(label_vector)]
def stratified_split(df, label_cols, test_size=.2, shuffle=True):
'''Split a dataframe into a training and validation while preserving classes distributions
'''
train, test, _, _ = train_test_split(df, df[label_cols], test_size=test_size, stratify=df[label_cols], shuffle=True)
return train, test
def oversample(df, label_cols, factor, balance_classes=True):
'''Duplicate samples in a dataframe according to the classes distributions and a multiplying factor
'''
if balance_classes:
class_balance = df[label_cols].sum(axis=0) / df[label_cols].shape[0]
class_balance = np.round(class_balance.max() / class_balance).astype('int8').to_dict()
else:
class_balance = {k: 1 for k in label_cols}
for k, v in class_balance.items():
df = df.append([df[df[k] == 1]]*factor*v, ignore_index=True)
return df | StarcoderdataPython |
1767453 | #!/usr/bin/env python3
import json
import datetime
import os.path
from franken import crane, trivy
with open("conf/scan.json") as config_file:
config_data = json.load(config_file)
result = {}
date_str = datetime.datetime.now().strftime("%Y%m%d%H")
output_home = os.path.join("output", "scan-" + date_str)
if not os.path.exists(output_home):
os.mkdir(output_home)
for key, value in config_data.items():
tag_list = crane.get_tag_list(value["repo"])
for tag in tag_list:
repo_path = os.path.join(output_home, key)
if not os.path.exists(repo_path):
os.mkdir(repo_path)
result = trivy.scan(value["repo"], tag, os.path.join(repo_path, tag + ".json"))
| StarcoderdataPython |
3376588 | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the migratoryBirds function below.
def migratoryBirds(arr):
d = {}
max_ = 0
a = []
for i in arr:
if i in d.keys():
d[i] += 1
else :
d[i] = 1
for i in d:
if d[i] >= max_:
max_ = d[i]
print(max_)
for i in list(d.keys()):
if (d[i] == max_):
a.append(i)
print(d)
return min(a)
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
arr_count = int(input().strip())
arr = list(map(int, input().rstrip().split()))
result = migratoryBirds(arr)
fptr.write(str(result) + '\n')
fptr.close()
| StarcoderdataPython |
99986 | <gh_stars>10-100
from astrometry.util.fits import fits_table
from glob import glob
import os
import numpy as np
'''After DR5 we moved a bunch of CP input files that were in a
NonDECaLS-DR5 directory into NonDECaLS, and also rationalized some
duplicate files, including deleting some "v1" CP files in favor of
"v2" versions. This script patches up the CCDs tables to point to the
new files.
Before running this script, create a symlink to the 'decam' directory:
ln -s ~/cosmo/staging/decam/ .
'''
def update_paths(T):
replacements = dict()
for i,fn in enumerate(T.image_filename):
#fn = t.image_filename.strip()
#fn = os.path.join(basedir, fn)
#print(fn)
fn = fn.strip()
rfn = replacements.get(fn)
if rfn is not None:
T.image_filename[i] = rfn
continue
if os.path.exists(fn):
print('Found', fn)
replacements[fn] = fn
continue
#print('Not found:', fn)
dirnm,filename = os.path.split(fn)
dirnm,cpdir = os.path.split(dirnm)
dirnm,nddir = os.path.split(dirnm)
#print('components', dirnm, nddir, cpdir, filename)
if nddir == 'NonDECaLS-DR5':
nddir = 'NonDECaLS'
pat = os.path.join(dirnm, nddir, '*', filename)
#print('Pattern', pat)
fns = glob(pat)
#print('-> ', fns)
if len(fns) == 1:
rfn = fns[0]
T.image_filename[i] = rfn
replacements[fn] = rfn
#print('Found', len(fns))
print('Nondecals', fn, '->', rfn)
continue
assert(len(fns) == 0)
# v1 -> v2, etc
components = filename.split('.')
fn2 = '_'.join(components[0].split('_')[:-1]) + '*'
#c4d_140626_015021_ooi_g_v2.fits.fz
pat = os.path.join(dirnm, nddir, '*', fn2)
#print('Pattern', pat)
fns = glob(pat)
#print('-> ', fns)
if len(fns) == 1:
rfn = fns[0]
T.image_filename[i] = rfn
replacements[fn] = rfn
#print('Found', len(fns))
print('Version', fn, '->', rfn)
continue
#print('Found', len(fns))
#break
assert(False)
print('Maximum length of replacement filenames:', max([len(r) for r in replacements.values()]))
if True:
T = fits_table('/project/projectdirs/cosmo/data/legacysurvey/dr5/ccds-annotated-dr5.fits.gz')
update_paths(T)
# convert from 32A to float
T.temp = np.array([float(t) for t in T.temp])
# gratuitous
T.delete_column('expid')
T.writeto('ccds-annotated-dr5-patched.fits.gz')
if False:
T = fits_table('/project/projectdirs/cosmo/data/legacysurvey/dr5/survey-ccds-dr5.kd.fits')
update_paths(T)
T.writeto('survey-ccds-dr5-patched.fits')
| StarcoderdataPython |
1777298 | <gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 28 16:38:27 2018
@author: song
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 12 20:49:19 2018
@author: song
"""
# -*- coding: utf-8 -*-
"""
Created on Tue April 3 10:56:53 2018
Convolutional VAriational Autoencode
@author: <NAME>
"""
import sys, os
#sys.path.append(os.pardir) # parent directory
import numpy as np
import time
import glob
from xml.etree.ElementTree import Element, SubElement, dump, ElementTree
import torch
#import torch.nn as nn
#import torch.optim as optim
#import torch.nn.init as init
#import torch.nn.functional as F
#import torchvision.datasets as dset
#import torchvision.transforms as transforms
#from torch.utils.data import TensorDataset
#from torch.utils.data import DataLoader
from torch.autograd import Variable
#from mpl_toolkits.mplot3d import Axes3D
#from mpl_toolkits.mplot3d import proj3d
#import matplotlib.pyplot as plt
import cv2
import visdom
sys.path.append("../../ML_utils") # parent directory
import JK_image
#import matplotlib.animation as animation
#from sklearn.feature_extraction import image
#from PIL import Image
sys.path.append("networks") # parent directory
def numpyToTorchVariable( array, isGPU=True):
array_torch = torch.from_numpy(array)
if isGPU:
array_torch = Variable(array_torch).cuda()
else :
array_torch = Variable(array_torch).cpu()
return array_torch
def detectDefectViaVisdom( args, result_type=1, isResultSave=True):
resultFolderPath = "./results/"
if not os.path.exists(resultFolderPath):
os.mkdir(resultFolderPath)
modelPath = "models/"+ args.load_folder_file[0] + args.load_folder_file[1] + '_all.pkl'
convParamsPath = "models/"+ args.load_folder_file[0] + args.load_folder_file[1] + 'convParams.pkl'
deconvParamsPath = "models/"+ args.load_folder_file[0] + args.load_folder_file[1] + 'deconvParams.pkl'
convNet = ""
deconvNet = ""
try:
print("Model path :", modelPath)
convNet, deconvNet = torch.load(modelPath, map_location=lambda storage, location: storage)
print("\n--------" + modelPath + " is restored--------\n")
# print("Conv parameters path :", convParamsPath)
# print("Deconv parameters path :", deconvParamsPath)
# convNet.load_state_dict(torch.load(convParamsPath)) # it loads only the model parameters (recommended)
# deconvNet.load_state_dict(torch.load(deconvParamsPath)) # it loads only the model parameters (recommended)
# print("\n--------" + convParamsPath + " is restored--------\n")
# print("\n--------" + deconvParamsPath + " is restored--------\n")
if args.isGPU:
convNet.cuda()
deconvNet.cuda()
else :
convNet.cpu()
deconvNet.cpu()
except:
print("\n--------There are no models.--------\n")
pass
convNet.eval()
deconvNet.eval()
# images = getImagesNumberOrder(args.folderPathForEval)
images = JK_image.getImages(args.folderPathForEval)
nImages = len(images)
height = images[0].shape[0]
width = images[0].shape[1]
X = np.array(images, dtype=np.float32)/255.
X = X.reshape([-1,1,height,width])
# XX = np.hstack([X[0], X[1]]).reshape([-1,1,512,2*512])
# X = XX
X_tensor = torch.from_numpy(X)
if args.isGPU:
X = Variable(X_tensor).cuda()
else :
X = Variable(X_tensor).cpu()
# X = numpyToTorchVariable(X, args.isGPU)
output = convNet(X)
output = deconvNet(output)
if args.isGPU:
output = output.cpu()
Y_tensor = output.data
vis = visdom.Visdom()
if result_type==0:
for i in range(nImages):
vis.image(Y_tensor[i])
elif result_type==1:
results_tensor = torch.cat((X_tensor, Y_tensor), 0)
vis.images(X_tensor, nrow=6)
vis.images(Y_tensor, nrow=6)
vis.images(results_tensor, nrow=6)
# kk = out_images.numpy().reshape(-1,512,512)
# heat = np.hstack([kk[0], kk[1]]).reshape([512,2*512])
# vis.heatmap(heat)
elif result_type==2:
inputImages_np = X_tensor.numpy().reshape([-1, height, width])
outputImages_np = Y_tensor.numpy().reshape([-1, height, width])
for i in range(nImages):
inputImg = inputImages_np[i]
# tempInput = JK_image.GRAY2CRGB(inputImg)
tempInput = cv2.cvtColor(inputImg, cv2.COLOR_GRAY2BGR)
tempInput = JK_image.HWC3toC3HW(tempInput)
outputImg = outputImages_np[i]
tempOutput = cv2.cvtColor(outputImg, cv2.COLOR_GRAY2BGR)
tempOutput = JK_image.HWC3toC3HW(tempOutput)
tempHeatmap = JK_image.GRAY2HeatMap(outputImg)
tempInput = np.array(tempInput).reshape(-1, 3, height, width)
tempOutput = np.array(tempOutput).reshape(-1, 3, height, width)
tempHeatmap = np.array(tempHeatmap).reshape(-1, 3, height, width)
result = np.concatenate((tempInput, tempOutput), axis=0)
result = np.concatenate((result, tempHeatmap), axis=0)
vis.images(result, nrow=6)
elif result_type==3:
inputImages_np = X_tensor.numpy().reshape([-1, height, width])
outputImages_np = Y_tensor.numpy().reshape([-1, height, width])
# inputImages = []
# outputImages = []
# heatmapImages = []
resultImages = []
for i in range(nImages):
inputImg = JK_image.HWC3toC3HW( cv2.cvtColor(inputImages_np[i], cv2.COLOR_GRAY2RGB) )
outputGray = JK_image.HWC3toC3HW( cv2.cvtColor(outputImages_np[i] , cv2.COLOR_GRAY2RGB) )
outputHeat = JK_image.GRAY2HeatMap(outputImages_np[i] )
resultImg = np.concatenate((inputImg, outputGray), axis=0)
resultImg = np.concatenate((resultImg, outputHeat), axis=0)
resultImages.append(resultImg)
# inputImages.append( util_image.HWC3toC3HW( cv2.cvtColor(inputImg, cv2.COLOR_GRAY2RGB) ) )
# outputImages.append( util_image.HWC3toC3HW( cv2.cvtColor(outputImg, cv2.COLOR_GRAY2RGB) ) )
# heatmapImages.append( util_image.GRAY2HeatMap(outputImg) )
# inputImages = np.array(inputImages).reshape(-1, 3, height, width)
# outputImages = np.array(outputImages).reshape(-1, 3, height, width)
# heatmapImages = np.array(heatmapImages).reshape(-1, 3, height, width)
# results = np.concatenate((inputImages, outputImages), axis=0)
# results = np.concatenate((results, heatmapImages), axis=0)
# vis.images(results, nrow=6)
resultImages = np.array(resultImages).reshape(-1, 3, height, width)
vis.images(resultImages, nrow=3)
print("Input size :", X_tensor.size())
print("Output size :", Y_tensor.size())
print("========================================================")
class dotdict(dict):
def __getattr__(self, name):
return self[name]
if __name__ == '__main__':
print("Torch version :", torch.__version__)
# GPU check
useGPU = torch.cuda.is_available()
if useGPU :
deviceNo = torch.cuda.current_device()
print("GPU_is_available.")
print("DeviceNo :", deviceNo)
print(torch.cuda.device(deviceNo))
print("Device_count :", torch.cuda.device_count())
# print(torch.cuda.get_device_name(0))
# print("Device_capability :", torch.cuda.get_device_capability(deviceNo))
# print("Device_max_memory :", torch.cuda.max_memory_allocated(deviceNo))
# print("Device_max_memory_cached :", torch.cuda.max_memory_cached(deviceNo))
else :
print("There are no GPU.")
args = dotdict({
'isGPU' : False, #False, # True,
'load_folder_file': ("DAGM_jkfcn3/",'jkfcn3'), #("ForClass4_jkfcn3/",'ForClass4_jkfcn3'), #
'folderPathForEval': "../DataForEvaluation",
})
if useGPU==False and args.isGPU==True:
args.isGPU = False
print("GPU is not availabe.")
if args.isGPU==False:
print("Runing by CPU")
if useGPU==False and args.isGPU==True:
args.isGPU = False
print("GPU is not availabe.")
if args.isGPU==False:
print("Runing by CPU")
detectDefectViaVisdom(args, result_type=0)
| StarcoderdataPython |
3236864 | #!/usr/bin/python2.7
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Initialize Django and its internationalization machinery.
In any file that uses django modules, always import django_setup first.
To localize strings in Python files, import either gettext_lazy or ugettext
from this module as _, then use _('foo') to mark the strings to be translated.
Use gettext_lazy for strings that are declared before a language has been
selected; ugettext for those after (ugettext is safe to use in all Handlers)."""
__author__ = '<EMAIL> (<NAME>)'
import django
import django.conf
import django.template
import django.template.loaders.base
import django.utils.translation
import os
from django.utils.translation import activate, gettext_lazy, ugettext
LANGUAGE_CODE = 'en'
LANGUAGES_BIDI = ['ar', 'he', 'fa', 'iw', 'ur']
if os.environ.get('SERVER_SOFTWARE', '').startswith('Development'):
# See https://web.archive.org/web/20160916120959/http://code.google.com/p/googleappengine/issues/detail?id=985
import urllib
urllib.getproxies_macosx_sysconf = lambda: {}
django.conf.settings.configure()
django.conf.settings.LANGUAGE_CODE = LANGUAGE_CODE
# Enables Django translation system e.g. {% trans %} tag
django.conf.settings.USE_I18N = True
# Enables localized formatting
# e.g. localizing date/time format for {% my_date|date:"DATETIME_FORMAT" %}
django.conf.settings.USE_L10N = True
django.conf.settings.LOCALE_PATHS = ('locale',)
django.conf.settings.LANGUAGES_BIDI = LANGUAGES_BIDI
# https://docs.djangoproject.com/en/1.9/ref/templates/upgrading/#the-templates-settings
django.conf.settings.TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
],
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django_setup.TemplateLoader',
]
},
},
]
# It's required to call this function when we use Django features outside
# Django framework.
django.setup()
class TemplateLoader(django.template.loaders.base.Loader):
"""Our custom template loader, which loads templates from Resources."""
def get_template(self, name, template_dirs=None, skip=None):
import resources
lang = django.utils.translation.get_language() # currently active lang
resource = resources.get_localized(name, lang)
template = resource and resource.get_template()
if template:
return template
else:
raise django.template.TemplateDoesNotExist(name)
def get_contents(self, origin):
# Defining this method is necessary so that Django recognizes that
# this loader is in the new format (using get_template() instead of
# load_template()). But this method is actually not called when
# get_template() is overridden.
raise Exception('Not expected to be called')
| StarcoderdataPython |
4804203 | from gqa_dataset import *
def calc_statistics():
with open('{}/full_vocab.json'.format('meta_info/'), 'r') as f:
vocab = json.load(f)
ivocab = {v: k for k, v in vocab.items()}
with open('{}/answer_vocab.json'.format('meta_info/'), 'r') as f:
answer = json.load(f)
inv_answer = {v: k for k, v in answer.items()}
split = 'testdev_pred'
diagnose = GQA(split=split, mode='val', contained_weight=0.1, threshold=0.0, folder='gqa_bottom_up_features/', cutoff=0.5, vocab=vocab, answer=answer,
forbidden='', object_info='meta_info/gqa_objects_merged_info.json', num_tokens=30,
num_regions=48, length=9, max_layer=5, distribution=False)
all_lengths = {}
for idx, ele in enumerate(diagnose):
prog = ele[2]
length = 0
for one_r in prog:
if max(one_r) > 0:
length += 1
if length not in all_lengths:
all_lengths[length] = 1
else:
all_lengths[length] += 1
print(f'************************ idx: {idx} ************************')
print(f"program stats:", all_lengths)
if __name__ == '__main__':
calc_statistics()
| StarcoderdataPython |
1610444 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2014 eNovance Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from kazoo import client
from kazoo import exceptions
from kazoo import security
try:
from kazoo.handlers import eventlet as eventlet_handler
except ImportError:
eventlet_handler = None
from kazoo.handlers import threading as threading_handler
from kazoo.protocol import paths
from oslo_utils import encodeutils
from oslo_utils import strutils
import tooz
from tooz import coordination
from tooz import locking
from tooz import utils
class ZooKeeperLock(locking.Lock):
def __init__(self, name, lock):
super(ZooKeeperLock, self).__init__(name)
self._lock = lock
self._client = lock.client
def is_still_owner(self):
if not self.acquired:
return False
try:
data, _znode = self._client.get(
paths.join(self._lock.path, self._lock.node))
return data == self._lock.data
except (self._client.handler.timeout_exception,
exceptions.ConnectionLoss,
exceptions.ConnectionDropped,
exceptions.NoNodeError):
return False
except exceptions.KazooException as e:
utils.raise_with_cause(tooz.ToozError,
"operation error: %s" % (e),
cause=e)
def acquire(self, blocking=True, shared=False):
if shared:
raise tooz.NotImplemented
blocking, timeout = utils.convert_blocking(blocking)
return self._lock.acquire(blocking=blocking,
timeout=timeout)
def release(self):
if self.acquired:
self._lock.release()
return True
else:
return False
@property
def acquired(self):
return self._lock.is_acquired
class KazooDriver(coordination.CoordinationDriverCachedRunWatchers):
"""This driver uses the `kazoo`_ client against real `zookeeper`_ servers.
It **is** fully functional and implements all of the coordination
driver API(s). It stores data into `zookeeper`_ using znodes
and `msgpack`_ encoded values.
To configure the client to your liking a subset of the options defined at
http://kazoo.readthedocs.org/en/latest/api/client.html
will be extracted from the coordinator url (or any provided options),
so that a specific coordinator can be created that will work for you.
The Zookeeper coordinator url should look like::
zookeeper://[USERNAME:PASSWORD@][HOST[:PORT]][?OPTION1=VALUE1[&OPTION2=VALUE2[&...]]]
Currently the following options will be proxied to the contained client:
================ =============================== ====================
Name Source Default
================ =============================== ====================
hosts url netloc + 'hosts' option key localhost:2181
timeout 'timeout' options key 10.0 (kazoo default)
connection_retry 'connection_retry' options key None
command_retry 'command_retry' options key None
randomize_hosts 'randomize_hosts' options key True
================ =============================== ====================
.. _kazoo: http://kazoo.readthedocs.org/
.. _zookeeper: http://zookeeper.apache.org/
.. _msgpack: http://msgpack.org/
"""
#: Default namespace when none is provided.
TOOZ_NAMESPACE = b"tooz"
HANDLERS = {
'threading': threading_handler.SequentialThreadingHandler,
}
if eventlet_handler:
HANDLERS['eventlet'] = eventlet_handler.SequentialEventletHandler
"""
Restricted immutable dict of handler 'kinds' -> handler classes that
this driver can accept via 'handler' option key (the expected value for
this option is one of the keys in this dictionary).
"""
CHARACTERISTICS = (
coordination.Characteristics.NON_TIMEOUT_BASED,
coordination.Characteristics.DISTRIBUTED_ACROSS_THREADS,
coordination.Characteristics.DISTRIBUTED_ACROSS_PROCESSES,
coordination.Characteristics.DISTRIBUTED_ACROSS_HOSTS,
# Writes *always* go through a single leader process, but it may
# take a while for those writes to propagate to followers (and =
# during this time clients can read older values)...
coordination.Characteristics.SEQUENTIAL,
)
"""
Tuple of :py:class:`~tooz.coordination.Characteristics` introspectable
enum member(s) that can be used to interogate how this driver works.
"""
def __init__(self, member_id, parsed_url, options):
super(KazooDriver, self).__init__(member_id, parsed_url, options)
options = utils.collapse(options, exclude=['hosts'])
self.timeout = int(options.get('timeout', '10'))
self._namespace = options.get('namespace', self.TOOZ_NAMESPACE)
self._coord = self._make_client(parsed_url, options)
self._timeout_exception = self._coord.handler.timeout_exception
def _start(self):
try:
self._coord.start(timeout=self.timeout)
except self._coord.handler.timeout_exception as e:
e_msg = encodeutils.exception_to_unicode(e)
utils.raise_with_cause(coordination.ToozConnectionError,
"Operational error: %s" % e_msg,
cause=e)
try:
self._coord.ensure_path(self._paths_join("/", self._namespace))
except exceptions.KazooException as e:
e_msg = encodeutils.exception_to_unicode(e)
utils.raise_with_cause(tooz.ToozError,
"Operational error: %s" % e_msg,
cause=e)
self._leader_locks = {}
def _stop(self):
self._coord.stop()
@staticmethod
def _dumps(data):
return utils.dumps(data)
@staticmethod
def _loads(blob):
return utils.loads(blob)
def _create_group_handler(self, async_result, timeout,
timeout_exception, group_id):
try:
async_result.get(block=True, timeout=timeout)
except timeout_exception as e:
utils.raise_with_cause(coordination.OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
except exceptions.NodeExistsError:
raise coordination.GroupAlreadyExist(group_id)
except exceptions.NoNodeError as e:
utils.raise_with_cause(tooz.ToozError,
"Tooz namespace '%s' has not"
" been created" % self._namespace,
cause=e)
except exceptions.ZookeeperError as e:
utils.raise_with_cause(tooz.ToozError,
encodeutils.exception_to_unicode(e),
cause=e)
def create_group(self, group_id):
group_path = self._path_group(group_id)
async_result = self._coord.create_async(group_path)
return ZooAsyncResult(async_result, self._create_group_handler,
timeout_exception=self._timeout_exception,
group_id=group_id)
@staticmethod
def _delete_group_handler(async_result, timeout,
timeout_exception, group_id):
try:
async_result.get(block=True, timeout=timeout)
except timeout_exception as e:
utils.raise_with_cause(coordination.OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
except exceptions.NoNodeError:
raise coordination.GroupNotCreated(group_id)
except exceptions.NotEmptyError:
raise coordination.GroupNotEmpty(group_id)
except exceptions.ZookeeperError as e:
utils.raise_with_cause(tooz.ToozError,
encodeutils.exception_to_unicode(e),
cause=e)
def delete_group(self, group_id):
group_path = self._path_group(group_id)
async_result = self._coord.delete_async(group_path)
return ZooAsyncResult(async_result, self._delete_group_handler,
timeout_exception=self._timeout_exception,
group_id=group_id)
@staticmethod
def _join_group_handler(async_result, timeout,
timeout_exception, group_id, member_id):
try:
async_result.get(block=True, timeout=timeout)
except timeout_exception as e:
utils.raise_with_cause(coordination.OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
except exceptions.NodeExistsError:
raise coordination.MemberAlreadyExist(group_id, member_id)
except exceptions.NoNodeError:
raise coordination.GroupNotCreated(group_id)
except exceptions.ZookeeperError as e:
utils.raise_with_cause(tooz.ToozError,
encodeutils.exception_to_unicode(e),
cause=e)
def join_group(self, group_id, capabilities=b""):
member_path = self._path_member(group_id, self._member_id)
capabilities = self._dumps(capabilities)
async_result = self._coord.create_async(member_path,
value=capabilities,
ephemeral=True)
return ZooAsyncResult(async_result, self._join_group_handler,
timeout_exception=self._timeout_exception,
group_id=group_id, member_id=self._member_id)
@staticmethod
def _leave_group_handler(async_result, timeout,
timeout_exception, group_id, member_id):
try:
async_result.get(block=True, timeout=timeout)
except timeout_exception as e:
utils.raise_with_cause(coordination.OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
except exceptions.NoNodeError:
raise coordination.MemberNotJoined(group_id, member_id)
except exceptions.ZookeeperError as e:
utils.raise_with_cause(tooz.ToozError,
encodeutils.exception_to_unicode(e),
cause=e)
def heartbeat(self):
# Just fetch the base path (and do nothing with it); this will
# force any waiting heartbeat responses to be flushed, and also
# ensures that the connection still works as expected...
base_path = self._paths_join("/", self._namespace)
try:
self._coord.get(base_path)
except self._timeout_exception as e:
utils.raise_with_cause(coordination.OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
except exceptions.NoNodeError:
pass
except exceptions.ZookeeperError as e:
utils.raise_with_cause(tooz.ToozError,
encodeutils.exception_to_unicode(e),
cause=e)
return self.timeout
def leave_group(self, group_id):
member_path = self._path_member(group_id, self._member_id)
async_result = self._coord.delete_async(member_path)
return ZooAsyncResult(async_result, self._leave_group_handler,
timeout_exception=self._timeout_exception,
group_id=group_id, member_id=self._member_id)
@staticmethod
def _get_members_handler(async_result, timeout,
timeout_exception, group_id):
try:
members_ids = async_result.get(block=True, timeout=timeout)
except timeout_exception as e:
utils.raise_with_cause(coordination.OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
except exceptions.NoNodeError:
raise coordination.GroupNotCreated(group_id)
except exceptions.ZookeeperError as e:
utils.raise_with_cause(tooz.ToozError,
encodeutils.exception_to_unicode(e),
cause=e)
else:
return set(m.encode('ascii') for m in members_ids)
def get_members(self, group_id):
group_path = self._paths_join("/", self._namespace, group_id)
async_result = self._coord.get_children_async(group_path)
return ZooAsyncResult(async_result, self._get_members_handler,
timeout_exception=self._timeout_exception,
group_id=group_id)
@staticmethod
def _update_capabilities_handler(async_result, timeout,
timeout_exception, group_id, member_id):
try:
async_result.get(block=True, timeout=timeout)
except timeout_exception as e:
utils.raise_with_cause(coordination.OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
except exceptions.NoNodeError:
raise coordination.MemberNotJoined(group_id, member_id)
except exceptions.ZookeeperError as e:
utils.raise_with_cause(tooz.ToozError,
encodeutils.exception_to_unicode(e),
cause=e)
def update_capabilities(self, group_id, capabilities):
member_path = self._path_member(group_id, self._member_id)
capabilities = self._dumps(capabilities)
async_result = self._coord.set_async(member_path, capabilities)
return ZooAsyncResult(async_result, self._update_capabilities_handler,
timeout_exception=self._timeout_exception,
group_id=group_id, member_id=self._member_id)
@classmethod
def _get_member_capabilities_handler(cls, async_result, timeout,
timeout_exception, group_id,
member_id):
try:
capabilities = async_result.get(block=True, timeout=timeout)[0]
except timeout_exception as e:
utils.raise_with_cause(coordination.OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
except exceptions.NoNodeError:
raise coordination.MemberNotJoined(group_id, member_id)
except exceptions.ZookeeperError as e:
utils.raise_with_cause(tooz.ToozError,
encodeutils.exception_to_unicode(e),
cause=e)
else:
return cls._loads(capabilities)
def get_member_capabilities(self, group_id, member_id):
member_path = self._path_member(group_id, member_id)
async_result = self._coord.get_async(member_path)
return ZooAsyncResult(async_result,
self._get_member_capabilities_handler,
timeout_exception=self._timeout_exception,
group_id=group_id, member_id=self._member_id)
@classmethod
def _get_member_info_handler(cls, async_result, timeout,
timeout_exception, group_id,
member_id):
try:
capabilities, znode_stats = async_result.get(block=True,
timeout=timeout)
except timeout_exception as e:
utils.raise_with_cause(coordination.OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
except exceptions.NoNodeError:
raise coordination.MemberNotJoined(group_id, member_id)
except exceptions.ZookeeperError as e:
utils.raise_with_cause(tooz.ToozError,
encodeutils.exception_to_unicode(e),
cause=e)
else:
member_info = {
'capabilities': cls._loads(capabilities),
'created_at': utils.millis_to_datetime(znode_stats.ctime),
'updated_at': utils.millis_to_datetime(znode_stats.mtime)
}
return member_info
def get_member_info(self, group_id, member_id):
member_path = self._path_member(group_id, member_id)
async_result = self._coord.get_async(member_path)
return ZooAsyncResult(async_result,
self._get_member_info_handler,
timeout_exception=self._timeout_exception,
group_id=group_id, member_id=self._member_id)
def _get_groups_handler(self, async_result, timeout, timeout_exception):
try:
group_ids = async_result.get(block=True, timeout=timeout)
except timeout_exception as e:
utils.raise_with_cause(coordination.OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
except exceptions.NoNodeError as e:
utils.raise_with_cause(tooz.ToozError,
"Tooz namespace '%s' has not"
" been created" % self._namespace,
cause=e)
except exceptions.ZookeeperError as e:
utils.raise_with_cause(tooz.ToozError,
encodeutils.exception_to_unicode(e),
cause=e)
else:
return set(g.encode('ascii') for g in group_ids)
def get_groups(self):
tooz_namespace = self._paths_join("/", self._namespace)
async_result = self._coord.get_children_async(tooz_namespace)
return ZooAsyncResult(async_result, self._get_groups_handler,
timeout_exception=self._timeout_exception)
def _path_group(self, group_id):
return self._paths_join("/", self._namespace, group_id)
def _path_member(self, group_id, member_id):
return self._paths_join("/", self._namespace, group_id, member_id)
@staticmethod
def _paths_join(arg, *more_args):
"""Converts paths into a string (unicode)."""
args = [arg]
args.extend(more_args)
cleaned_args = []
for arg in args:
if isinstance(arg, bytes):
cleaned_args.append(
encodeutils.safe_decode(arg, incoming='ascii')
)
else:
cleaned_args.append(arg)
return paths.join(*cleaned_args)
def _make_client(self, parsed_url, options):
# Creates a kazoo client,
# See: https://github.com/python-zk/kazoo/blob/2.2.1/kazoo/client.py
# for what options a client takes...
if parsed_url.username and parsed_url.password:
username = parsed_url.username
password = parsed_url.password
digest_auth = "%s:%s" % (username, password)
digest_acl = security.make_digest_acl(username, password, all=True)
default_acl = (digest_acl,)
auth_data = [('digest', digest_auth)]
else:
default_acl = None
auth_data = None
maybe_hosts = [parsed_url.netloc] + list(options.get('hosts', []))
hosts = list(filter(None, maybe_hosts))
if not hosts:
hosts = ['localhost:2181']
randomize_hosts = options.get('randomize_hosts', True)
client_kwargs = {
'hosts': ",".join(hosts),
'timeout': float(options.get('timeout', self.timeout)),
'connection_retry': options.get('connection_retry'),
'command_retry': options.get('command_retry'),
'randomize_hosts': strutils.bool_from_string(randomize_hosts),
'auth_data': auth_data,
'default_acl': default_acl,
}
handler_kind = options.get('handler')
if handler_kind:
try:
handler_cls = self.HANDLERS[handler_kind]
except KeyError:
raise ValueError("Unknown handler '%s' requested"
" valid handlers are %s"
% (handler_kind,
sorted(self.HANDLERS.keys())))
client_kwargs['handler'] = handler_cls()
return client.KazooClient(**client_kwargs)
def stand_down_group_leader(self, group_id):
if group_id in self._leader_locks:
self._leader_locks[group_id].release()
return True
return False
def _get_group_leader_lock(self, group_id):
if group_id not in self._leader_locks:
self._leader_locks[group_id] = self._coord.Lock(
self._path_group(group_id) + "/leader",
encodeutils.safe_decode(self._member_id, incoming='ascii'))
return self._leader_locks[group_id]
def get_leader(self, group_id):
contenders = self._get_group_leader_lock(group_id).contenders()
if contenders and contenders[0]:
leader = contenders[0].encode('ascii')
else:
leader = None
return ZooAsyncResult(None, lambda *args: leader)
def get_lock(self, name):
z_lock = self._coord.Lock(
self._paths_join(b"/", self._namespace, b"locks", name),
encodeutils.safe_decode(self._member_id, incoming='ascii'))
return ZooKeeperLock(name, z_lock)
def run_elect_coordinator(self):
for group_id in self._hooks_elected_leader.keys():
leader_lock = self._get_group_leader_lock(group_id)
if leader_lock.is_acquired:
# Previously acquired/still leader, leave it be...
continue
if leader_lock.acquire(blocking=False):
# We are now leader for this group
self._hooks_elected_leader[group_id].run(
coordination.LeaderElected(
group_id,
self._member_id))
def run_watchers(self, timeout=None):
results = super(KazooDriver, self).run_watchers(timeout)
self.run_elect_coordinator()
return results
class ZooAsyncResult(coordination.CoordAsyncResult):
def __init__(self, kazoo_async_result, handler, **kwargs):
self._kazoo_async_result = kazoo_async_result
self._handler = handler
self._kwargs = kwargs
def get(self, timeout=None):
return self._handler(self._kazoo_async_result, timeout, **self._kwargs)
def done(self):
return self._kazoo_async_result.ready()
| StarcoderdataPython |
1707036 | <filename>instr_helpers.py<gh_stars>1-10
#!/usr/bin/python3
def isreg(d):
if type(d) == dict:
return 'register' in d.keys()
return False
def is_half_width(r):
if 'half_width' in r.keys():
if r['half_width']:
return 'w'
return ''
def safe_pullregs(operands):
rs = []
for o in operands:
if 'register' in o.keys():
rs.append(o['register'])
return rs
def pulltypes(operands):
o_types = []
for o in operands:
if 'register' in o.keys():
o_types.append('register')
elif 'immediate' in o.keys():
o_types.append('immediate')
elif 'label' in o.keys():
o_types.append('label')
else:
o_types.append(None)
return o_types
fpfmtmap = {
16: 'h',
32: 's',
64: 'd',
128: 'q'
}
float_load_fmt_map = {
32: 'w',
64: 'd',
128: 'q'
}
def get_fl_flag(reg):
"""Return the appropriate RISC-V char suffix for the width
"""
if type(reg) == dict:
if 'width' in reg.keys():
return fpfmtmap[reg['width']]
def get_fl_ld_flag(reg):
if type(reg) == dict:
if 'width' in reg.keys():
return float_load_fmt_map[reg['width']] | StarcoderdataPython |
3393045 | <gh_stars>0
import os
import sys
import time
import json
import unittest
from jc.exceptions import ParseError
import jc.parsers.stat_s
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
# Set the timezone on POSIX systems. Need to manually set for Windows tests
if not sys.platform.startswith('win32'):
os.environ['TZ'] = 'America/Los_Angeles'
time.tzset()
# To create streaming output use:
# $ cat stat.out | jc --stat-s | jello -c > stat-streaming.json
class MyTests(unittest.TestCase):
def setUp(self):
# input
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/stat.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_stat = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/stat.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_stat = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/stat.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_stat = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/stat-filename-with-spaces.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_stat_filename_with_spaces = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/stat.out'), 'r', encoding='utf-8') as f:
self.freebsd12_stat = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/stat-streaming.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_stat_streaming_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/stat-streaming.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_stat_streaming_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/stat-streaming.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_stat_streaming_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/stat-filename-with-spaces-streaming.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_stat_filename_with_spaces_streaming_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/stat-streaming.json'), 'r', encoding='utf-8') as f:
self.freebsd12_stat_streaming_json = json.loads(f.read())
def test_stat_s_nodata(self):
"""
Test 'stat' with no data
"""
self.assertEqual(list(jc.parsers.stat_s.parse([], quiet=True)), [])
def test_stat_s_unparsable(self):
data = 'unparsable data'
g = jc.parsers.stat_s.parse(data.splitlines(), quiet=True)
with self.assertRaises(ParseError):
list(g)
def test_stat_s_centos_7_7(self):
"""
Test 'stat /bin/*' on Centos 7.7
"""
self.assertEqual(list(jc.parsers.stat_s.parse(self.centos_7_7_stat.splitlines(), quiet=True)), self.centos_7_7_stat_streaming_json)
def test_stat_s_ubuntu_18_4(self):
"""
Test 'stat /bin/*' on Ubuntu 18.4
"""
self.assertEqual(list(jc.parsers.stat_s.parse(self.ubuntu_18_4_stat.splitlines(), quiet=True)), self.ubuntu_18_4_stat_streaming_json)
def test_stat_s_osx_10_14_6(self):
"""
Test 'stat /bin/*' on OSX 10.14.6
"""
self.assertEqual(list(jc.parsers.stat_s.parse(self.osx_10_14_6_stat.splitlines(), quiet=True)), self.osx_10_14_6_stat_streaming_json)
def test_stat_s_filename_with_spaces_osx_10_14_6(self):
"""
Test 'stat' filename with spaces on OSX 10.14.6
"""
self.assertEqual(list(jc.parsers.stat_s.parse(self.osx_10_14_6_stat_filename_with_spaces.splitlines(), quiet=True)), self.osx_10_14_6_stat_filename_with_spaces_streaming_json)
def test_stat_s_freebsd12(self):
"""
Test 'stat /foo/*' on FreeBSD12
"""
self.assertEqual(list(jc.parsers.stat_s.parse(self.freebsd12_stat.splitlines(), quiet=True)), self.freebsd12_stat_streaming_json)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
98720 | from decimal import Decimal as D
from django.db.models import Sum
from django.test import TestCase, TransactionTestCase
from oscar.test.factories import UserFactory
import mock
from oscar_accounts import facade, exceptions
from oscar_accounts.models import Account, Transfer, Transaction
from oscar_accounts.test_factories import AccountFactory
class TestReversingATransfer(TestCase):
def setUp(self):
self.user = UserFactory()
self.source = AccountFactory(primary_user=None, credit_limit=None)
self.destination = AccountFactory(primary_user=None)
self.transfer = facade.transfer(self.source, self.destination,
D('100'), user=self.user,
description="Give money to customer")
self.reverse = facade.reverse(self.transfer, self.user,
description="Oops! Return money")
def test_creates_4_transactions(self):
self.assertEqual(4, Transaction.objects.all().count())
def test_creates_2_transfers(self):
self.assertEqual(2, Transfer.objects.all().count())
def test_leaves_both_balances_unchanged(self):
self.assertEqual(D('0.00'), self.source.balance)
self.assertEqual(D('0.00'), self.destination.balance)
def test_records_the_authorising_user(self):
self.assertEqual(self.user, self.reverse.user)
def test_records_the_transfer_message(self):
self.assertEqual("Oops! Return money", self.reverse.description)
def test_records_the_correct_accounts(self):
self.assertEqual(self.source, self.reverse.destination)
self.assertEqual(self.destination, self.reverse.source)
def test_records_the_correct_amount(self):
self.assertEqual(D('100'), self.reverse.amount)
class TestATransfer(TestCase):
def setUp(self):
self.user = UserFactory()
self.source = AccountFactory(credit_limit=None, primary_user=None)
self.destination = AccountFactory()
self.transfer = facade.transfer(
self.source, self.destination, D('100'),
user=self.user, description="Give money to customer")
def test_generates_an_unguessable_reference(self):
self.assertTrue(len(self.transfer.reference) > 0)
def test_records_the_authorising_user(self):
self.assertEqual(self.user, self.transfer.user)
def test_can_record_a_description(self):
self.assertEqual("Give money to customer", self.transfer.description)
def test_creates_two_transactions(self):
self.assertEqual(2, self.transfer.transactions.all().count())
def test_preserves_zero_sum_invariant(self):
aggregates = self.transfer.transactions.aggregate(sum=Sum('amount'))
self.assertEqual(D('0.00'), aggregates['sum'])
def test_debits_the_source_account(self):
self.assertEqual(D('-100.00'), self.source.balance)
def test_credits_the_destination_account(self):
self.assertEqual(D('100.00'), self.destination.balance)
def test_creates_a_credit_transaction(self):
destination_txn = self.transfer.transactions.get(
account=self.destination)
self.assertEqual(D('100.00'), destination_txn.amount)
def test_creates_a_debit_transaction(self):
source_txn = self.transfer.transactions.get(account=self.source)
self.assertEqual(D('-100.00'), source_txn.amount)
class TestAnAnonymousTransaction(TestCase):
def test_doesnt_explode(self):
source = AccountFactory(credit_limit=None)
destination = AccountFactory()
facade.transfer(source, destination, D('1'))
class TestErrorHandling(TransactionTestCase):
def tearDown(self):
Account.objects.all().delete()
def test_no_transaction_created_when_exception_raised(self):
user = UserFactory()
source = AccountFactory(credit_limit=None)
destination = AccountFactory()
with mock.patch('oscar_accounts.abstract_models.PostingManager._wrap') as mock_method:
mock_method.side_effect = RuntimeError()
try:
facade.transfer(source, destination, D('100'), user)
except Exception:
pass
self.assertEqual(0, Transfer.objects.all().count())
self.assertEqual(0, Transaction.objects.all().count())
def test_account_exception_raised_for_invalid_transfer(self):
user = UserFactory()
source = AccountFactory(credit_limit=D('0.00'))
destination = AccountFactory()
with self.assertRaises(exceptions.AccountException):
facade.transfer(source, destination, D('100'), user)
def test_account_exception_raised_for_runtime_error(self):
user = UserFactory()
source = AccountFactory(credit_limit=None)
destination = AccountFactory()
with mock.patch('oscar_accounts.abstract_models.PostingManager._wrap') as mock_method:
mock_method.side_effect = RuntimeError()
with self.assertRaises(exceptions.AccountException):
facade.transfer(source, destination, D('100'), user)
| StarcoderdataPython |
1745351 | <gh_stars>1-10
import nose
import angr
import logging
l = logging.getLogger("angr_tests.managers")
import os
location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
addresses_fauxware = {
'armel': 0x8524,
'armhf': 0x104c9, # addr+1 to force thumb
#'i386': 0x8048524, # commenting out because of the freaking stack check
'mips': 0x400710,
'mipsel': 0x4006d0,
'ppc': 0x1000054c,
'ppc64': 0x10000698,
'x86_64': 0x400664
}
def run_fauxware(arch, threads):
p = angr.Project(os.path.join(location, arch, 'fauxware'), load_options={'auto_load_libs': False})
pg = p.factory.simgr(threads=threads)
nose.tools.assert_equal(len(pg.active), 1)
nose.tools.assert_equal(pg.active[0].history.depth, 0)
# step until the backdoor split occurs
pg2 = pg.step(until=lambda lpg: len(lpg.active) > 1, step_func=lambda lpg: lpg.prune())
nose.tools.assert_equal(len(pg2.active), 2)
nose.tools.assert_true(any("SOSNEAKY" in s for s in pg2.mp_active.posix.dumps(0).mp_items))
nose.tools.assert_false(all("SOSNEAKY" in s for s in pg2.mp_active.posix.dumps(0).mp_items))
# separate out the backdoor and normal paths
pg3 = pg2.stash(lambda path: "SOSNEAKY" in path.posix.dumps(0), to_stash="backdoor").move('active', 'auth')
nose.tools.assert_equal(len(pg3.active), 0)
nose.tools.assert_equal(len(pg3.backdoor), 1)
nose.tools.assert_equal(len(pg3.auth), 1)
# step the backdoor path until it returns to main
pg4 = pg3.step(until=lambda lpg: lpg.backdoor[0].history.jumpkinds[-1] == 'Ijk_Ret', stash='backdoor')
main_addr = pg4.backdoor[0].addr
nose.tools.assert_equal(len(pg4.active), 0)
nose.tools.assert_equal(len(pg4.backdoor), 1)
nose.tools.assert_equal(len(pg4.auth), 1)
# now step the real path until the real authentication paths return to the same place
pg5 = pg4.explore(find=main_addr, num_find=2, stash='auth').move('found', 'auth')
nose.tools.assert_equal(len(pg5.active), 0)
nose.tools.assert_equal(len(pg5.backdoor), 1)
nose.tools.assert_equal(len(pg5.auth), 2)
# now unstash everything
pg6 = pg5.unstash(from_stash='backdoor').unstash(from_stash='auth')
nose.tools.assert_equal(len(pg6.active), 3)
nose.tools.assert_equal(len(pg6.backdoor), 0)
nose.tools.assert_equal(len(pg6.auth), 0)
nose.tools.assert_equal(len(set(pg6.mp_active.addr.mp_items)), 1)
# now merge them!
pg7 = pg6.merge()
nose.tools.assert_equal(len(pg7.active), 2)
nose.tools.assert_equal(len(pg7.backdoor), 0)
nose.tools.assert_equal(len(pg7.auth), 0)
#import ipdb; ipdb.set_trace()
#print pg2.mp_active.addr.mp_map(hex).mp_items
# test selecting paths to step
pg_a = p.factory.simgr(immutable=True)
pg_b = pg_a.step(until=lambda lpg: len(lpg.active) > 1, step_func=lambda lpg: lpg.prune().drop(stash='pruned'))
pg_c = pg_b.step(selector_func=lambda p: p is pg_b.active[0], step_func=lambda lpg: lpg.prune().drop(stash='pruned'))
nose.tools.assert_is(pg_b.active[1], pg_c.active[1])
nose.tools.assert_is_not(pg_b.active[0], pg_c.active[0])
total_active = len(pg_c.active)
# test special stashes
nose.tools.assert_equals(len(pg_c.stashes['stashed']), 0)
pg_d = pg_c.stash(filter_func=lambda p: p is pg_c.active[1], to_stash='asdf')
nose.tools.assert_equals(len(pg_d.stashes['stashed']), 0)
nose.tools.assert_equals(len(pg_d.asdf), 1)
nose.tools.assert_equals(len(pg_d.active), total_active-1)
pg_e = pg_d.stash(from_stash=pg_d.ALL, to_stash='fdsa')
nose.tools.assert_equals(len(pg_e.asdf), 0)
nose.tools.assert_equals(len(pg_e.active), 0)
nose.tools.assert_equals(len(pg_e.fdsa), total_active)
pg_f = pg_e.stash(from_stash=pg_e.ALL, to_stash=pg_e.DROP)
nose.tools.assert_true(all(len(s) == 0 for s in pg_f.stashes.values()))
def test_fauxware():
for arch in addresses_fauxware:
yield run_fauxware, arch, None
# yield run_fauxware, arch, 2
def test_find_to_middle():
# Test the ability of PathGroup to execute until an instruction in the middle of a basic block
p = angr.Project(os.path.join(location, 'x86_64', 'fauxware'), load_options={'auto_load_libs': False})
pg = p.factory.simgr(immutable=False)
pg.explore(find=(0x4006ee,))
nose.tools.assert_equal(len(pg.found), 1)
nose.tools.assert_true(pg.found[0].addr == 0x4006ee)
def test_explore_with_cfg():
p = angr.Project(os.path.join(location, 'x86_64', 'fauxware'), load_options={'auto_load_libs': False})
cfg = p.analyses.CFGAccurate()
pg = p.factory.simgr()
pg.use_technique(angr.exploration_techniques.Explorer(find=0x4006ED, cfg=cfg, num_find=3))
pg.run()
nose.tools.assert_equal(len(pg.active), 0)
nose.tools.assert_equal(len(pg.avoid), 1)
nose.tools.assert_equal(len(pg.found), 2)
nose.tools.assert_equal(pg.found[0].addr, 0x4006ED)
nose.tools.assert_equal(pg.found[1].addr, 0x4006ED)
nose.tools.assert_equal(pg.avoid[0].addr, 0x4007C9)
if __name__ == "__main__":
logging.getLogger('angr.sim_manager').setLevel('DEBUG')
print 'explore_with_cfg'
test_explore_with_cfg()
print 'find_to_middle'
test_find_to_middle()
for func, march, threads in test_fauxware():
print 'testing ' + march
func(march, threads)
| StarcoderdataPython |
1764561 | import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
df_forest = pd.read_excel(r'/Users/talen/Downloads/Other Datasets/Forest.xlsx')
#Checking whether there is missing data in the dataframe
print(df_forest.isnull().sum().sum())
#Use the first column — class as the result, whereas use the rest columns as the properties
X = df_forest.iloc[:,1:]
Y = df_forest.iloc[:,0]
#Split the dataset into training and testing datasets
x_train, x_test, y_train, y_test = train_test_split(X,Y,test_size=0.20, random_state=0)
#Using Decision Tree Classifier to train the dataset and make prediction
from sklearn.tree import DecisionTreeClassifier
from sklearn import tree
tree_clf = DecisionTreeClassifier(criterion='entropy', random_state=70, max_depth=4,min_samples_leaf=5)
tree_clf.fit(x_train,y_train)
tree_pred = tree_clf.predict(x_test)
#Plot the tree
tree.plot_tree(tree_clf, filled=True, class_names=df_forest.columns)
#Using Confusion Matrix to evaluate the accuracy of the prediction made by Decision Tree Method
from sklearn.metrics import confusion_matrix,accuracy_score
from sklearn.metrics import plot_confusion_matrix
from sklearn import svm
import matplotlib.pyplot as plt
cm = confusion_matrix(y_test,tree_pred)
tree_plot = plot_confusion_matrix(tree_clf,x_test,y_test)
tree_plot.ax_.set_title('Confusion Matrix for Decision Tree')
plt.show()
accuracy_tree = accuracy_score(y_test,tree_pred)
print(accuracy_tree)
| StarcoderdataPython |
1605961 | <filename>tax/python/antchain_sdk_tax/models.py
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
from typing import List
class Config(TeaModel):
"""
Model for initing client
"""
def __init__(
self,
access_key_id: str = None,
access_key_secret: str = None,
security_token: str = None,
protocol: str = None,
read_timeout: int = None,
connect_timeout: int = None,
http_proxy: str = None,
https_proxy: str = None,
endpoint: str = None,
no_proxy: str = None,
max_idle_conns: int = None,
user_agent: str = None,
socks_5proxy: str = None,
socks_5net_work: str = None,
max_idle_time_millis: int = None,
keep_alive_duration_millis: int = None,
max_requests: int = None,
max_requests_per_host: int = None,
):
# accesskey id
self.access_key_id = access_key_id
# accesskey secret
self.access_key_secret = access_key_secret
# security token
self.security_token = security_token
# http protocol
self.protocol = protocol
# read timeout
self.read_timeout = read_timeout
# connect timeout
self.connect_timeout = connect_timeout
# http proxy
self.http_proxy = http_proxy
# https proxy
self.https_proxy = https_proxy
# endpoint
self.endpoint = endpoint
# proxy white list
self.no_proxy = no_proxy
# max idle conns
self.max_idle_conns = max_idle_conns
# user agent
self.user_agent = user_agent
# socks5 proxy
self.socks_5proxy = socks_5proxy
# socks5 network
self.socks_5net_work = socks_5net_work
# 长链接最大空闲时长
self.max_idle_time_millis = max_idle_time_millis
# 长链接最大连接时长
self.keep_alive_duration_millis = keep_alive_duration_millis
# 最大连接数(长链接最大总数)
self.max_requests = max_requests
# 每个目标主机的最大连接数(分主机域名的长链接最大总数
self.max_requests_per_host = max_requests_per_host
def validate(self):
pass
def to_map(self):
result = dict()
if self.access_key_id is not None:
result['accessKeyId'] = self.access_key_id
if self.access_key_secret is not None:
result['accessKeySecret'] = self.access_key_secret
if self.security_token is not None:
result['securityToken'] = self.security_token
if self.protocol is not None:
result['protocol'] = self.protocol
if self.read_timeout is not None:
result['readTimeout'] = self.read_timeout
if self.connect_timeout is not None:
result['connectTimeout'] = self.connect_timeout
if self.http_proxy is not None:
result['httpProxy'] = self.http_proxy
if self.https_proxy is not None:
result['httpsProxy'] = self.https_proxy
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.no_proxy is not None:
result['noProxy'] = self.no_proxy
if self.max_idle_conns is not None:
result['maxIdleConns'] = self.max_idle_conns
if self.user_agent is not None:
result['userAgent'] = self.user_agent
if self.socks_5proxy is not None:
result['socks5Proxy'] = self.socks_5proxy
if self.socks_5net_work is not None:
result['socks5NetWork'] = self.socks_5net_work
if self.max_idle_time_millis is not None:
result['maxIdleTimeMillis'] = self.max_idle_time_millis
if self.keep_alive_duration_millis is not None:
result['keepAliveDurationMillis'] = self.keep_alive_duration_millis
if self.max_requests is not None:
result['maxRequests'] = self.max_requests
if self.max_requests_per_host is not None:
result['maxRequestsPerHost'] = self.max_requests_per_host
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('accessKeyId') is not None:
self.access_key_id = m.get('accessKeyId')
if m.get('accessKeySecret') is not None:
self.access_key_secret = m.get('accessKeySecret')
if m.get('securityToken') is not None:
self.security_token = m.get('securityToken')
if m.get('protocol') is not None:
self.protocol = m.get('protocol')
if m.get('readTimeout') is not None:
self.read_timeout = m.get('readTimeout')
if m.get('connectTimeout') is not None:
self.connect_timeout = m.get('connectTimeout')
if m.get('httpProxy') is not None:
self.http_proxy = m.get('httpProxy')
if m.get('httpsProxy') is not None:
self.https_proxy = m.get('httpsProxy')
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('noProxy') is not None:
self.no_proxy = m.get('noProxy')
if m.get('maxIdleConns') is not None:
self.max_idle_conns = m.get('maxIdleConns')
if m.get('userAgent') is not None:
self.user_agent = m.get('userAgent')
if m.get('socks5Proxy') is not None:
self.socks_5proxy = m.get('socks5Proxy')
if m.get('socks5NetWork') is not None:
self.socks_5net_work = m.get('socks5NetWork')
if m.get('maxIdleTimeMillis') is not None:
self.max_idle_time_millis = m.get('maxIdleTimeMillis')
if m.get('keepAliveDurationMillis') is not None:
self.keep_alive_duration_millis = m.get('keepAliveDurationMillis')
if m.get('maxRequests') is not None:
self.max_requests = m.get('maxRequests')
if m.get('maxRequestsPerHost') is not None:
self.max_requests_per_host = m.get('maxRequestsPerHost')
return self
class InvoiceItem(TeaModel):
def __init__(
self,
spbm: str = None,
mc: str = None,
jldw: str = None,
shul: str = None,
je: str = None,
sl: str = None,
se: str = None,
mxxh: str = None,
dj: str = None,
ggxh: str = None,
):
# 税收分类编码
self.spbm = spbm
# 项目名称, 如果为折扣行,商品名称须与被折扣行的商品名称相同,不能多行折扣。
self.mc = mc
# 单位
self.jldw = jldw
# 数量
self.shul = shul
# 含税金额,2位小数
self.je = je
# 税率,当fplx!=51or53时,必填,3位小数,例1%为0.010;
self.sl = sl
# 税额,当fplx!=51or53时,必填
self.se = se
# 明细序号,从1开始递增
self.mxxh = mxxh
# 单价
self.dj = dj
# 规格型号
self.ggxh = ggxh
def validate(self):
self.validate_required(self.spbm, 'spbm')
self.validate_required(self.mc, 'mc')
self.validate_required(self.je, 'je')
self.validate_required(self.mxxh, 'mxxh')
self.validate_required(self.dj, 'dj')
def to_map(self):
result = dict()
if self.spbm is not None:
result['spbm'] = self.spbm
if self.mc is not None:
result['mc'] = self.mc
if self.jldw is not None:
result['jldw'] = self.jldw
if self.shul is not None:
result['shul'] = self.shul
if self.je is not None:
result['je'] = self.je
if self.sl is not None:
result['sl'] = self.sl
if self.se is not None:
result['se'] = self.se
if self.mxxh is not None:
result['mxxh'] = self.mxxh
if self.dj is not None:
result['dj'] = self.dj
if self.ggxh is not None:
result['ggxh'] = self.ggxh
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('spbm') is not None:
self.spbm = m.get('spbm')
if m.get('mc') is not None:
self.mc = m.get('mc')
if m.get('jldw') is not None:
self.jldw = m.get('jldw')
if m.get('shul') is not None:
self.shul = m.get('shul')
if m.get('je') is not None:
self.je = m.get('je')
if m.get('sl') is not None:
self.sl = m.get('sl')
if m.get('se') is not None:
self.se = m.get('se')
if m.get('mxxh') is not None:
self.mxxh = m.get('mxxh')
if m.get('dj') is not None:
self.dj = m.get('dj')
if m.get('ggxh') is not None:
self.ggxh = m.get('ggxh')
return self
class Invoice(TeaModel):
def __init__(
self,
bz: str = None,
fhr: str = None,
fpdm: str = None,
fphm: str = None,
fplx: str = None,
fpxxmxs: List[InvoiceItem] = None,
fpztdm: str = None,
gfdzdh: str = None,
gfmc: str = None,
gfsh: str = None,
gfyhzh: str = None,
gmflx: str = None,
je: str = None,
jshj: str = None,
kplx: str = None,
kpr: str = None,
kprq: str = None,
qdbz: str = None,
rzdklbdjgdm: str = None,
rzdklbdrq: str = None,
se: str = None,
skr: str = None,
slbz: str = None,
ssdq: str = None,
xfdzdh: str = None,
xfmc: str = None,
xfsh: str = None,
xfyhzh: str = None,
yfpdm: str = None,
yfphm: str = None,
zfbz: str = None,
zfsj: str = None,
ext_filed: str = None,
):
# 备注
self.bz = bz
# 复核人
self.fhr = fhr
# 发票代码, 发票代码+发票号码唯一
self.fpdm = fpdm
# 发票号码,发票代码+发票号码唯一
self.fphm = fphm
# 发票类型代码,
# 默认填:20:广东电子普通发票
# 01:增值税专用发票
# 04:增值税普通发票
# 10:增值税电子普通发票
# 51:重庆通用机打一联发票76
# 53:重庆通用机打三联发票210
# 90:重庆通用机打三联发票190
self.fplx = fplx
# 发票明细集合
self.fpxxmxs = fpxxmxs
# 发票状态代码,0 正常 1 失控 2 作废 3 红字 4 异常票
self.fpztdm = fpztdm
# 购买方地址、电话
self.gfdzdh = gfdzdh
# 购买方名称
self.gfmc = gfmc
# 购买方纳税人识别号
self.gfsh = gfsh
# 购买方银行账号
self.gfyhzh = gfyhzh
# 购买方类型,1企业 2个人 3其他
self.gmflx = gmflx
# 金额
self.je = je
# 价税合计 , 单位:元(2 位小数)
self.jshj = jshj
# 开票类型,0-蓝字发票;1-红字发票;
self.kplx = kplx
# 开票人
self.kpr = kpr
# 开票日期
self.kprq = kprq
# 清单标志,00:无清单 01:有清单
self.qdbz = qdbz
# 认证状态
self.rzdklbdjgdm = rzdklbdjgdm
# 认证日期
self.rzdklbdrq = rzdklbdrq
# 税额,当发票类型代码 fplx!=51or fplx!=53时,必填
self.se = se
# 收款人
self.skr = skr
# 税率标识,0不含税税率;1含税税率
self.slbz = slbz
# 所属税务机关代码:
# 山东省343
# 重庆市350
# 广东省347
self.ssdq = ssdq
# 销售方地址、电话
self.xfdzdh = xfdzdh
# 销售方名称
self.xfmc = xfmc
# 销售方纳税人识别号
self.xfsh = xfsh
# 销售方银行账号
self.xfyhzh = xfyhzh
# 原发票代码, kplx开票类型为1时必填
self.yfpdm = yfpdm
# 原发票号码,kplx开票类型为1时必填
self.yfphm = yfphm
# 作废标志,0:未作废,1作废
self.zfbz = zfbz
# 作废时间
self.zfsj = zfsj
# 扩展字段
self.ext_filed = ext_filed
def validate(self):
self.validate_required(self.fpdm, 'fpdm')
self.validate_required(self.fphm, 'fphm')
self.validate_required(self.fplx, 'fplx')
if self.fpxxmxs:
for k in self.fpxxmxs:
if k:
k.validate()
self.validate_required(self.fpztdm, 'fpztdm')
self.validate_required(self.gfmc, 'gfmc')
self.validate_required(self.je, 'je')
self.validate_required(self.jshj, 'jshj')
self.validate_required(self.kplx, 'kplx')
self.validate_required(self.kpr, 'kpr')
self.validate_required(self.kprq, 'kprq')
if self.kprq is not None:
self.validate_pattern(self.kprq, 'kprq', '\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})')
if self.rzdklbdrq is not None:
self.validate_pattern(self.rzdklbdrq, 'rzdklbdrq', '\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})')
self.validate_required(self.slbz, 'slbz')
self.validate_required(self.ssdq, 'ssdq')
self.validate_required(self.xfmc, 'xfmc')
self.validate_required(self.xfsh, 'xfsh')
self.validate_required(self.zfbz, 'zfbz')
if self.zfsj is not None:
self.validate_pattern(self.zfsj, 'zfsj', '\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})')
self.validate_required(self.ext_filed, 'ext_filed')
def to_map(self):
result = dict()
if self.bz is not None:
result['bz'] = self.bz
if self.fhr is not None:
result['fhr'] = self.fhr
if self.fpdm is not None:
result['fpdm'] = self.fpdm
if self.fphm is not None:
result['fphm'] = self.fphm
if self.fplx is not None:
result['fplx'] = self.fplx
result['fpxxmxs'] = []
if self.fpxxmxs is not None:
for k in self.fpxxmxs:
result['fpxxmxs'].append(k.to_map() if k else None)
if self.fpztdm is not None:
result['fpztdm'] = self.fpztdm
if self.gfdzdh is not None:
result['gfdzdh'] = self.gfdzdh
if self.gfmc is not None:
result['gfmc'] = self.gfmc
if self.gfsh is not None:
result['gfsh'] = self.gfsh
if self.gfyhzh is not None:
result['gfyhzh'] = self.gfyhzh
if self.gmflx is not None:
result['gmflx'] = self.gmflx
if self.je is not None:
result['je'] = self.je
if self.jshj is not None:
result['jshj'] = self.jshj
if self.kplx is not None:
result['kplx'] = self.kplx
if self.kpr is not None:
result['kpr'] = self.kpr
if self.kprq is not None:
result['kprq'] = self.kprq
if self.qdbz is not None:
result['qdbz'] = self.qdbz
if self.rzdklbdjgdm is not None:
result['rzdklbdjgdm'] = self.rzdklbdjgdm
if self.rzdklbdrq is not None:
result['rzdklbdrq'] = self.rzdklbdrq
if self.se is not None:
result['se'] = self.se
if self.skr is not None:
result['skr'] = self.skr
if self.slbz is not None:
result['slbz'] = self.slbz
if self.ssdq is not None:
result['ssdq'] = self.ssdq
if self.xfdzdh is not None:
result['xfdzdh'] = self.xfdzdh
if self.xfmc is not None:
result['xfmc'] = self.xfmc
if self.xfsh is not None:
result['xfsh'] = self.xfsh
if self.xfyhzh is not None:
result['xfyhzh'] = self.xfyhzh
if self.yfpdm is not None:
result['yfpdm'] = self.yfpdm
if self.yfphm is not None:
result['yfphm'] = self.yfphm
if self.zfbz is not None:
result['zfbz'] = self.zfbz
if self.zfsj is not None:
result['zfsj'] = self.zfsj
if self.ext_filed is not None:
result['ext_filed'] = self.ext_filed
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('bz') is not None:
self.bz = m.get('bz')
if m.get('fhr') is not None:
self.fhr = m.get('fhr')
if m.get('fpdm') is not None:
self.fpdm = m.get('fpdm')
if m.get('fphm') is not None:
self.fphm = m.get('fphm')
if m.get('fplx') is not None:
self.fplx = m.get('fplx')
self.fpxxmxs = []
if m.get('fpxxmxs') is not None:
for k in m.get('fpxxmxs'):
temp_model = InvoiceItem()
self.fpxxmxs.append(temp_model.from_map(k))
if m.get('fpztdm') is not None:
self.fpztdm = m.get('fpztdm')
if m.get('gfdzdh') is not None:
self.gfdzdh = m.get('gfdzdh')
if m.get('gfmc') is not None:
self.gfmc = m.get('gfmc')
if m.get('gfsh') is not None:
self.gfsh = m.get('gfsh')
if m.get('gfyhzh') is not None:
self.gfyhzh = m.get('gfyhzh')
if m.get('gmflx') is not None:
self.gmflx = m.get('gmflx')
if m.get('je') is not None:
self.je = m.get('je')
if m.get('jshj') is not None:
self.jshj = m.get('jshj')
if m.get('kplx') is not None:
self.kplx = m.get('kplx')
if m.get('kpr') is not None:
self.kpr = m.get('kpr')
if m.get('kprq') is not None:
self.kprq = m.get('kprq')
if m.get('qdbz') is not None:
self.qdbz = m.get('qdbz')
if m.get('rzdklbdjgdm') is not None:
self.rzdklbdjgdm = m.get('rzdklbdjgdm')
if m.get('rzdklbdrq') is not None:
self.rzdklbdrq = m.get('rzdklbdrq')
if m.get('se') is not None:
self.se = m.get('se')
if m.get('skr') is not None:
self.skr = m.get('skr')
if m.get('slbz') is not None:
self.slbz = m.get('slbz')
if m.get('ssdq') is not None:
self.ssdq = m.get('ssdq')
if m.get('xfdzdh') is not None:
self.xfdzdh = m.get('xfdzdh')
if m.get('xfmc') is not None:
self.xfmc = m.get('xfmc')
if m.get('xfsh') is not None:
self.xfsh = m.get('xfsh')
if m.get('xfyhzh') is not None:
self.xfyhzh = m.get('xfyhzh')
if m.get('yfpdm') is not None:
self.yfpdm = m.get('yfpdm')
if m.get('yfphm') is not None:
self.yfphm = m.get('yfphm')
if m.get('zfbz') is not None:
self.zfbz = m.get('zfbz')
if m.get('zfsj') is not None:
self.zfsj = m.get('zfsj')
if m.get('ext_filed') is not None:
self.ext_filed = m.get('ext_filed')
return self
class AuthCorpRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
corp_address_phone_no: str = None,
corp_bank_no: str = None,
corp_name: str = None,
corp_private_key: str = None,
corp_tax_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 企业注册地址和电话号码
self.corp_address_phone_no = corp_address_phone_no
# 企业银行账号
self.corp_bank_no = corp_bank_no
# 填写公司名称
self.corp_name = corp_name
# 企业授权秘钥
self.corp_private_key = corp_private_key
# 企业纳税人识别号
self.corp_tax_id = corp_tax_id
def validate(self):
self.validate_required(self.corp_address_phone_no, 'corp_address_phone_no')
self.validate_required(self.corp_bank_no, 'corp_bank_no')
self.validate_required(self.corp_name, 'corp_name')
self.validate_required(self.corp_private_key, 'corp_private_key')
self.validate_required(self.corp_tax_id, 'corp_tax_id')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.corp_address_phone_no is not None:
result['corp_address_phone_no'] = self.corp_address_phone_no
if self.corp_bank_no is not None:
result['corp_bank_no'] = self.corp_bank_no
if self.corp_name is not None:
result['corp_name'] = self.corp_name
if self.corp_private_key is not None:
result['corp_private_key'] = self.corp_private_key
if self.corp_tax_id is not None:
result['corp_tax_id'] = self.corp_tax_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('corp_address_phone_no') is not None:
self.corp_address_phone_no = m.get('corp_address_phone_no')
if m.get('corp_bank_no') is not None:
self.corp_bank_no = m.get('corp_bank_no')
if m.get('corp_name') is not None:
self.corp_name = m.get('corp_name')
if m.get('corp_private_key') is not None:
self.corp_private_key = m.get('corp_private_key')
if m.get('corp_tax_id') is not None:
self.corp_tax_id = m.get('corp_tax_id')
return self
class AuthCorpResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
return self
class PushChargeRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
biz_tenant: str = None,
biz_type: str = None,
customer_id: str = None,
system_code: str = None,
task_request_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 租户
self.biz_tenant = biz_tenant
# 平台分发,子业务类型,对应原来的authType.
self.biz_type = biz_type
# 用户ID,根据业务对主体定义决定,比如票据业务的纳税人识别号
self.customer_id = customer_id
# 系统编码,平台分发,定义枚举,全局用
self.system_code = system_code
# 请求id
self.task_request_id = task_request_id
def validate(self):
self.validate_required(self.biz_tenant, 'biz_tenant')
self.validate_required(self.biz_type, 'biz_type')
self.validate_required(self.customer_id, 'customer_id')
self.validate_required(self.system_code, 'system_code')
self.validate_required(self.task_request_id, 'task_request_id')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.biz_tenant is not None:
result['biz_tenant'] = self.biz_tenant
if self.biz_type is not None:
result['biz_type'] = self.biz_type
if self.customer_id is not None:
result['customer_id'] = self.customer_id
if self.system_code is not None:
result['system_code'] = self.system_code
if self.task_request_id is not None:
result['task_request_id'] = self.task_request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('biz_tenant') is not None:
self.biz_tenant = m.get('biz_tenant')
if m.get('biz_type') is not None:
self.biz_type = m.get('biz_type')
if m.get('customer_id') is not None:
self.customer_id = m.get('customer_id')
if m.get('system_code') is not None:
self.system_code = m.get('system_code')
if m.get('task_request_id') is not None:
self.task_request_id = m.get('task_request_id')
return self
class PushChargeResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
return self
class QueryChargeAuthRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
biz_tenant: str = None,
biz_type: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 租户
self.biz_tenant = biz_tenant
# 授权类型,业务类型
self.biz_type = biz_type
def validate(self):
self.validate_required(self.biz_tenant, 'biz_tenant')
self.validate_required(self.biz_type, 'biz_type')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.biz_tenant is not None:
result['biz_tenant'] = self.biz_tenant
if self.biz_type is not None:
result['biz_type'] = self.biz_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('biz_tenant') is not None:
self.biz_tenant = m.get('biz_tenant')
if m.get('biz_type') is not None:
self.biz_type = m.get('biz_type')
return self
class QueryChargeAuthResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
auth_type: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# OPEN:开通
# STOP:停止
# NOT_SALE:未授权
self.auth_type = auth_type
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.auth_type is not None:
result['auth_type'] = self.auth_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('auth_type') is not None:
self.auth_type = m.get('auth_type')
return self
class PushIcmInvoiceRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
pch: str = None,
fpxx: str = None,
fplx: str = None,
islsp: bool = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 批次号
self.pch = pch
# 发票信息(json格式)
self.fpxx = fpxx
# 发票类型
# 01:增值税专用发票
# 04:增值税普通发票
# 10:增值税电子普通发票
self.fplx = fplx
# 是否是历史数据推送
self.islsp = islsp
def validate(self):
self.validate_required(self.pch, 'pch')
self.validate_required(self.fpxx, 'fpxx')
self.validate_required(self.fplx, 'fplx')
self.validate_required(self.islsp, 'islsp')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.pch is not None:
result['pch'] = self.pch
if self.fpxx is not None:
result['fpxx'] = self.fpxx
if self.fplx is not None:
result['fplx'] = self.fplx
if self.islsp is not None:
result['islsp'] = self.islsp
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('pch') is not None:
self.pch = m.get('pch')
if m.get('fpxx') is not None:
self.fpxx = m.get('fpxx')
if m.get('fplx') is not None:
self.fplx = m.get('fplx')
if m.get('islsp') is not None:
self.islsp = m.get('islsp')
return self
class PushIcmInvoiceResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
return self
class QueryIcmInvoiceRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
app_id: str = None,
auth_type: str = None,
callback_url: str = None,
end_amount: int = None,
end_date: str = None,
invoice_type: str = None,
nsrsbh: str = None,
request_id: str = None,
start_amount: int = None,
start_date: str = None,
city_code: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# ISV名称,用于标识合作的企业
self.app_id = app_id
# 授权类型
# 01:发票归集授权; 02:记账; 03:报销; 11:发票贷授权; (0X发票相关授权,1X金融类授权)
self.auth_type = auth_type
# 数据通知地址接口 (当type=02或者03时必填) 用于数据采集完毕后通知该接口如何取发票数据
#
self.callback_url = callback_url
# java long型
# 起始金额,当type=03(报销查询)时必填
self.end_amount = end_amount
# 查询起始时间(当auth_type=02或者03时必填)
# 当auth_type=02(记账查询)时,查询起始时间和查询截止时间必须在同一个月内,如查询起始日期是6.31,截止日期为7.1,则会提示查询时间不能跨月,最长时间为一个月
# 最大查询范围为6.1-6.30
self.end_date = end_date
# 非必填,不填则查询所有类型发票
# "01": "增值税专用发票"
# "04": "增值税普通发票"
# "10": "增值税电子普通发票"
self.invoice_type = invoice_type
# 查询的企业纳税人识别号
self.nsrsbh = nsrsbh
# 请求号,调用方企业保证每次调用唯一,蚂蚁发票平台通过该字段和app_id两个字段做幂等判断
self.request_id = request_id
# java long型
# 起始金额,当type=03(报销查询)时必填
self.start_amount = start_amount
# 查询起始时间(当auth_type=02或者03时必填)
# 当auth_type=02(记账查询)时,查询起始时间和查询截止时间必须在同一个月内,如查询起始日期是6.31,截止日期为7.1,则会提示查询时间不能跨月,最长时间为一个月
# 最大查询范围为6.1-6.30
self.start_date = start_date
# 地区编码
self.city_code = city_code
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.auth_type, 'auth_type')
if self.end_date is not None:
self.validate_pattern(self.end_date, 'end_date', '\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})')
self.validate_required(self.nsrsbh, 'nsrsbh')
self.validate_required(self.request_id, 'request_id')
if self.start_date is not None:
self.validate_pattern(self.start_date, 'start_date', '\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})')
self.validate_required(self.city_code, 'city_code')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.app_id is not None:
result['app_id'] = self.app_id
if self.auth_type is not None:
result['auth_type'] = self.auth_type
if self.callback_url is not None:
result['callback_url'] = self.callback_url
if self.end_amount is not None:
result['end_amount'] = self.end_amount
if self.end_date is not None:
result['end_date'] = self.end_date
if self.invoice_type is not None:
result['invoice_type'] = self.invoice_type
if self.nsrsbh is not None:
result['nsrsbh'] = self.nsrsbh
if self.request_id is not None:
result['request_id'] = self.request_id
if self.start_amount is not None:
result['start_amount'] = self.start_amount
if self.start_date is not None:
result['start_date'] = self.start_date
if self.city_code is not None:
result['city_code'] = self.city_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('auth_type') is not None:
self.auth_type = m.get('auth_type')
if m.get('callback_url') is not None:
self.callback_url = m.get('callback_url')
if m.get('end_amount') is not None:
self.end_amount = m.get('end_amount')
if m.get('end_date') is not None:
self.end_date = m.get('end_date')
if m.get('invoice_type') is not None:
self.invoice_type = m.get('invoice_type')
if m.get('nsrsbh') is not None:
self.nsrsbh = m.get('nsrsbh')
if m.get('request_id') is not None:
self.request_id = m.get('request_id')
if m.get('start_amount') is not None:
self.start_amount = m.get('start_amount')
if m.get('start_date') is not None:
self.start_date = m.get('start_date')
if m.get('city_code') is not None:
self.city_code = m.get('city_code')
return self
class QueryIcmInvoiceResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
return self
class PushIcmInvoiceinfoRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
app_id: str = None,
invoice: Invoice = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 上票ISV app_id即secret_id
self.app_id = app_id
# 发票号码
self.invoice = invoice
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.invoice, 'invoice')
if self.invoice:
self.invoice.validate()
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.app_id is not None:
result['app_id'] = self.app_id
if self.invoice is not None:
result['invoice'] = self.invoice.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('invoice') is not None:
temp_model = Invoice()
self.invoice = temp_model.from_map(m['invoice'])
return self
class PushIcmInvoiceinfoResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
return self
| StarcoderdataPython |
127675 | from model.exam import ExamData
from dbjudge.connection_manager.manager import Manager
from dbjudge import squema_recollector, exceptions
from PyQt5.QtCore import pyqtSlot, QItemSelectionModel
class Exam_controller():
def __init__(self, selection_view, exam_view, results_view):
self.selection_view = selection_view
self.exam_view = exam_view
self.results_view = results_view
self.model = ExamData()
self.manager = Manager.singleton_instance
self.selection_view.scenario_selection.currentTextChanged.connect(
self.update_scenario_data)
self.selection_view.access_button.clicked.connect(
self.update_scenario_data)
self.exam_view.test_button.clicked.connect(self.try_answer)
self.exam_view.question_list.setModel(self.model.questions)
self.exam_view.question_list.selectionModel().currentRowChanged.connect(
self.update_current_exam_question)
self.exam_view.generate_report_button.clicked.connect(
self.finish_exam)
self.results_view.question_list.setModel(self.model.questions)
self.results_view.question_list.selectionModel().currentRowChanged.connect(
self.update_current_results_question)
def load_avaiable_scenarios(self):
self.model.scenarios = self.manager.get_databases()
self.selection_view.load_scenarios(self.model.scenarios)
def update_scenario_data(self, scenario):
scenario = self.selection_view.get_current_scenario()
if scenario:
self.manager.select_database(scenario)
self.model.questions = self.manager.get_questions()
tables = self.manager.get_tables()
tuples = self.manager.get_total_data_count()
total_tables = len(tables)
total_questions = len(self.model.questions)
total_rows = tuples
self.selection_view.update_scenario_data(
total_tables, total_questions, total_rows)
self.selection_view.access_button.setEnabled(total_questions > 0)
preselected_index = self.model.questions.index(0, 0)
self.exam_view.question_list.selectionModel().setCurrentIndex(
preselected_index, QItemSelectionModel.SelectionFlag.ClearAndSelect)
def update_current_exam_question(self, index):
self.model.answers[self.model.questions.question_list[self.model.current_question]
] = self.exam_view.get_answer_text()
self.exam_view.update_current_question(
self.model.questions.question_list[index.row()])
self.exam_view.set_answer_text(
self.model.answers[self.model.questions.question_list[index.row()]])
self.model.current_question = index.row()
def try_answer(self):
answer = self.exam_view.get_answer_text()
try:
response = self.manager.execute_in_readonly(answer)
for row in response:
complete_row = ''
for element in row:
complete_row += ' '+str(element)
self.exam_view.set_console_output(complete_row)
except exceptions.ReadOnlyError:
error_message = 'Error sintáctico detectado, revisa tu consulta'
self.exam_view.set_console_output(error_message)
def finish_exam(self):
self.model.answers[self.model.questions.question_list[self.model.current_question]
] = self.exam_view.get_answer_text()
self.exam_view.clear_ui()
self.model.generate_report()
preselected_index = self.model.questions.index(0, 0)
self.results_view.question_list.selectionModel().setCurrentIndex(
preselected_index, QItemSelectionModel.SelectionFlag.ClearAndSelect)
def update_current_results_question(self, index):
correct_answers_count = 0
total_count = 0
for question, analysis in self.model.report.items():
if analysis.is_correct():
correct_answers_count += 1
total_count += 1
question = self.model.questions.question_list[index.row()]
self.results_view.update_total_count(
correct_answers_count, total_count)
self.results_view.update_current_question(question)
self.results_view.update_correct_result(
self.model.report[question].correct_result)
self.results_view.update_correct_answer(
self.model.report[question].is_correct())
self.results_view.update_excess_tables(
self.model.report[question].excess_tables_used)
self.results_view.update_keywords(
self.model.report[question].expected_keywords, self.model.report[question].used_keywords)
self.model.current_question = index.row()
| StarcoderdataPython |
1719708 | from utils import *
from rtid_out_info import RtidOutInfo
from rtid_config import RTIDConfig
from content_manager import ContentManager
from datetime import datetime
from os import path, makedirs
import json
import praw
import secret
import sys
class RTID(Logger):
def __init__(self, rtid_config: RTIDConfig):
super().__init__()
self.rtid_config = rtid_config
self.reddit = None
self.subreddit_instance = None
self.init()
self.rtid_out_info = RtidOutInfo(self.rtid_config.subreddit_name)
self.content_manager = ContentManager(self.subreddit_instance, self.rtid_config)
def init(self):
self.log.info("Starting RTID")
self.log.info("Instantiating Reddit instance")
self.reddit = praw.Reddit(
client_id=secret.reddit_client_id,
client_secret=secret.reddit_client_secret,
username=secret.reddit_username,
password=<PASSWORD>,
user_agent=secret.reddit_user_agent,
)
# Subreddits is a Listing class that provides various subreddit lists
try:
subreddits = self.reddit.subreddits
except Exception as e:
print(e)
sys.exit(1)
self.subreddit_instance = self.reddit.subreddit(self.rtid_config.subreddit_name)
sub_exist = check_subreddit_exists(subreddits, self.rtid_config.subreddit_name)
if not sub_exist:
print(f"r/{self.rtid_config.subreddit_name} does not exist.")
sys.exit(1)
def run(self):
hot_submission_contents = self.content_manager.get_hot_submission_contents()
for content in hot_submission_contents:
content.print_content_info()
img_download_path = path.join(self.rtid_out_info.subreddit_download_path, content.content_full_name)
self.log.info(f"Downloading [{content.title}] to path [{img_download_path}] . Image url: {content.content_url}")
self.content_manager.download_img(img_download_path=img_download_path, img_url=content.content_url)
self.log.info("Finished.")
| StarcoderdataPython |
3244691 | <filename>bot/cogs/error_handler.py
import datetime
import logging
from concurrent.futures._base import TimeoutError
import sentry_sdk
import discord
from discord.ext import commands
from bot.bot_client import Bot
from bot.utils.context import Context
class CommandErrorHandler(commands.Cog):
def __init__(self, bot: Bot):
self.bot = bot
self.logger = logging.getLogger('commands')
self.logger.setLevel(logging.INFO)
handler = logging.FileHandler(filename='commands.log', encoding='utf-8')
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
if not self.logger.handlers:
# Prevent multiple handlers sending duplicate messages
self.logger.addHandler(handler)
async def bot_check(self, ctx: Context):
"""This runs at the start of every command"""
await ctx.trigger_typing()
time = datetime.datetime.utcnow().strftime('%d/%m/%y - %H:%M')
msg = f"'{ctx.command}' ran by '{ctx.author}' as '{ctx.invoked_with}' at {time}. with '{ctx.message.content}'"
self.logger.info(msg)
return True
@commands.Cog.listener()
async def on_command_error(self, ctx: Context, error: commands.CommandError):
if hasattr(ctx.command, 'on_error'):
# Don't try to handle the error if the command has a local handler
return
arguments_error = [
commands.MissingRequiredArgument,
commands.BadArgument,
commands.TooManyArguments,
]
if any([isinstance(error, arg_error) for arg_error in arguments_error]):
embed = discord.Embed(
title=f"Argumentos do comando '{ctx.command}':",
description="",
color=discord.Colour.red()
)
for param, param_type in ctx.command.clean_params.items():
try:
default_name = param_type.default.__name__
except AttributeError:
default_name = param_type.default
default = f"(Opcional, Padrão: {default_name})" if default_name != '_empty' else '(Obrigatório)'
p_type = param_type.annotation.__name__
if p_type == 'str':
p_type = 'Texto'
elif p_type == 'bool':
p_type = '[True, False]'
elif p_type == 'Member':
p_type = 'Membro'
elif p_type == 'int':
p_type = 'Número'
embed.add_field(name=param, value=f"**Tipo:** *{p_type}*\n*{default}*", inline=False)
try:
await ctx.send(embed=embed)
except discord.errors.Forbidden:
await ctx.send("Erro. Permissões insuficientes para enviar um Embed.")
elif isinstance(error, commands.CommandNotFound):
pass
elif isinstance(error, commands.DisabledCommand):
pass
# await ctx.send("Esse comando está desabilitado.")
elif isinstance(error, commands.NoPrivateMessage):
await ctx.send("Esse comando não pode ser usado em mensagens privadas.")
elif isinstance(error, commands.PrivateMessageOnly):
await ctx.send(
f"Esse comando só pode ser usado em Mensagens Privadas.\n"
f"Fale comigo aqui: {self.bot.user.mention}"
)
elif isinstance(error, commands.NotOwner):
await ctx.send("Você não pode usar isso.")
elif isinstance(error, commands.MissingPermissions):
permissions = [f"***{perm.title().replace('_', ' ')}***" for perm in error.missing_perms]
await ctx.send(f"Você precisa das seguintes permissões para fazer isso: {', '.join(permissions)}")
elif isinstance(error, commands.CommandOnCooldown):
await ctx.send(
f"Ei! Você já usou este comando recentemente. "
f"Espere mais {error.retry_after:.1f}s para usar novamente"
)
elif isinstance(error, commands.BotMissingPermissions):
permissions = [f"***{perm.title().replace('_', ' ')}***" for perm in error.missing_perms]
await ctx.send(f"Eu preciso das seguintes permissões para fazer isso: {', '.join(permissions)}")
elif isinstance(error, commands.errors.CheckFailure):
pass
elif isinstance(error, commands.errors.CommandInvokeError) and isinstance(error.original, TimeoutError):
await ctx.send('Ação cancelada. Tempo esgotado.')
else:
await ctx.send("Erro inesperado. Os logs desse erro foram enviados para um Dev e em breve será arrumado.")
sentry_sdk.set_user({
'id': ctx.author and ctx.author.id,
'username': str(ctx.author) if ctx.author else None,
})
sentry_sdk.set_context('discord', {
'guild': ctx.guild,
'channel': ctx.channel and (hasattr(ctx.channel, 'name') or None) and ctx.channel,
'message': ctx.message and ctx.message.content,
'message_id': ctx.message and ctx.message.id,
'cog': ctx.cog and ctx.cog.qualified_name,
'command': ctx.command and ctx.command.name
})
sentry_sdk.capture_exception(error)
def setup(bot):
bot.add_cog(CommandErrorHandler(bot))
| StarcoderdataPython |
3821 | <filename>Py3Challenges/saves/challenges/c6_min.py
"""
To master this you should consider using the builtin-``min``-function.
"""
from ...challenge import Challenge
from random import randint
x = []
for _ in range(randint(2, 10)):
x.append(randint(1, 100))
intro = f"You have to print the lowest value of {', '.join(str(_) for _ in x[:-1])} and {x[-1]}. (values: x)"
def validate_function(stdin: str, stdout: str, stderr: str, exc: tuple) -> bool:
try:
z = int(stdout.removesuffix("\n"))
except ValueError:
return False
else:
return min(x) == z
challenge = Challenge(
intro=intro,
validate_function=validate_function,
help=__doc__,
values={"x": x},
capture_stdout=True,
)
| StarcoderdataPython |
3366746 | from plenum.test.view_change.helper import view_change_in_between_3pc
def test_view_change_in_between_3pc_all_nodes(txnPoolNodeSet, looper,
wallet1, client1):
"""
- Slow processing 3PC messages for all nodes
- do view change
"""
view_change_in_between_3pc(looper, txnPoolNodeSet, txnPoolNodeSet, wallet1,
client1)
def test_view_change_in_between_3pc_all_nodes_long_delay(
txnPoolNodeSet, looper, wallet1, client1):
"""
- Slow processing 3PC messages for all nodes
- do view change
"""
view_change_in_between_3pc(looper, txnPoolNodeSet,
txnPoolNodeSet,
wallet1, client1,
slow_delay=20)
| StarcoderdataPython |
1766329 | """Contains a Graph Attention Network v2 and associated layers."""
from typing import Any, Callable, Optional, Tuple, Union
import tensorflow as tf
from tensorflow_gnn.graph import graph_constants as const
from tensorflow_gnn.graph import graph_tensor as gt
from tensorflow_gnn.graph import graph_tensor_ops as ops
from tensorflow_gnn.graph import normalization_ops
class GATv2(tf.keras.layers.Layer):
"""Simple Graph Attention Network V2 (GATv2).
Based off of https://arxiv.org/abs/2105.14491, the GATv2 brings strict
improvements over the original GAT (https://arxiv.org/abs/1710.10903) by
allowing the network to compute a more expressived "dynamic" instead of
just "static" attention. See the above papers for more details.
This implementation of GAT attends only to edges that are explicitly stored
in the input GraphTensor. Attention of a node to itself requires having an
explicit loop in the edge set at hand.
"""
def __init__(self,
*,
num_heads: int,
per_head_channels: int,
edge_set_name: str,
feature_name: str = const.DEFAULT_STATE_NAME,
output_feature_name: str = const.DEFAULT_STATE_NAME,
use_bias: bool = True,
edge_dropout: float = 0.,
attention_activation: Union[str,
Callable[..., Any]] = 'leaky_relu',
query_kernel_initializer: Optional[
tf.keras.initializers.Initializer] = None,
key_kernel_initializer: Optional[
tf.keras.initializers.Initializer] = None,
attention_kernel_initializers: Optional[
tf.keras.initializers.Initializer] = None,
**kwargs):
"""Initializes the Graph Attention Network v2.
Like the Keras Dense module, if the input features have rank greater than 2,
this computes a point-wise GAT along the last axis of the inputs. For
example, if the input features is [num_nodes, 2, 4, 1], then this will
perform an identical GAT layer on each of the num_nodes * 2 * 4 input
values.
Args:
num_heads: The number of attention heads.
per_head_channels: The number of channels for each attention head. This
means that the final output size will be per_head_channels * num_heads.
edge_set_name: The edge set name indexing the nodes to run over.
feature_name: The name of the feature to run over.
output_feature_name: The name for the feature where the results will be
stored in the returned GraphTensor.
use_bias: If true, add a bias term to the transformation of the input
features. For consistency with the GATv2 paper's code at
https://github.com/tech-srl/how_attentive_are_gats/blob/main/gatv2_conv_PyG.py,
a bias term is not used in the attention weights.
edge_dropout: The percentage (between 0 and 1) of edges to randomly drop
during training.
attention_activation: The nonlinearity used on the transformed inputs
before multiplying with the trained weights of the attention layer.
This can be specified as a Keras layer, a tf.keras.activations.*
function, or a string understood by tf.keras.layers.Activation().
Defaults to "leaky_relu", which in turn defaults to a negative slope
of alpha=0.2.
query_kernel_initializer: An initializer for the `query` part of the
linear transformation of the input.
key_kernel_initializer: An initializer for the `key` part of the linear
transformation of the input.
attention_kernel_initializers: Initializers for the attention logit
function weights. Note that this will be equally partitioned into
separate weights for each head.
**kwargs: Any extra arguments to pass to the super class's init.
Raises:
ValueError if the `softmax` reduce operation is not registered.
ValueError if num_heads is less than 1.
ValueError if per_head_channels is less than 1.
"""
super().__init__(**kwargs)
if num_heads <= 0:
raise ValueError(f'Number of heads {num_heads} must be greater than 0.')
if per_head_channels <= 0:
raise ValueError(
f'Per-head channels {per_head_channels} must be greater than 0.')
self._num_heads = num_heads
self._per_head_channels = per_head_channels
self._edge_set_name = edge_set_name
self._feature_name = feature_name
self._output_feature_name = output_feature_name
self._use_bias = use_bias
self._edge_dropout = edge_dropout
self._attention_activation = tf.keras.activations.get(attention_activation)
self._query_kernel_initializer = query_kernel_initializer
self._key_kernel_initializer = key_kernel_initializer
self._attention_kernel_initializers = attention_kernel_initializers
# Decompose W into W_query (left) and W_key (right). See call() for details.
self._w_query = tf.keras.layers.Dense(
per_head_channels * num_heads,
kernel_initializer=query_kernel_initializer,
use_bias=use_bias,
name='gatv2_query')
self._w_key = tf.keras.layers.Dense(
per_head_channels * num_heads,
kernel_initializer=key_kernel_initializer,
use_bias=use_bias,
name='gatv2_key')
# Multi-head attention pooling.
self._attention_pool = GATv2AttentionPool(
num_heads,
per_head_channels,
edge_dropout=edge_dropout,
attention_activation=attention_activation,
# TODO(b/205960151): Expose setting, possibly rename to receiver_tag.
tag=const.TARGET,
edge_set_name=self._edge_set_name,
attention_kernel_initializers=attention_kernel_initializers)
def call(self, graph: gt.GraphTensor, training=False) -> gt.GraphTensor:
"""Runs a single GATv2 layer on the input graph.
Args:
graph: The input, which should be a scalar GraphTensor, i.e. its batch
dimension has been merged to the components dimension.
training: True iff we are training, not evaluating. Used to enable
training-specific features like dropout.
Returns:
A new GraphTensor after the GATv2 has been applied.
Raises:
ValueError if the input GraphTensor is not a scalar.
ValueError if the edge_set_name is not in the GraphTensor's edge sets.
"""
if graph.shape.rank != 0:
raise ValueError(
f'Input GraphTensor must be a scalar, but had rank {graph.shape.rank}'
)
if self._edge_set_name not in graph.edge_sets:
raise ValueError(f'Edge {self._edge_set_name} not in Graph edge sets')
adjacency = graph.edge_sets[self._edge_set_name].adjacency
# The *query* is posed by the prospective receiver of the pooled values.
# The *keys* (and their baked-in values) come from the data sources.
query_node = graph.node_sets[adjacency.target_name]
key_node = graph.node_sets[adjacency.source_name]
# Decompose W*[query || key] into W_Left * query + W_Right * key,
# since we'll need W_Left * query later. See GATv2AttentionPool for details.
# Note that we are performing the transformation before broadcasting.
# [num_nodes, *extra_dims, per_head_channels * num_heads]
query = self._w_query(query_node[self._feature_name])
key = self._w_key(key_node[self._feature_name])
# Broadcast these features to get them ready for the pooling layer.
# [num_edges, *extra_dims, per_head_channels * num_heads]
query_broadcasted = ops.broadcast_node_to_edges(
graph, self._edge_set_name, const.TARGET, feature_value=query)
key_broadcasted = ops.broadcast_node_to_edges(
graph, self._edge_set_name, const.SOURCE, feature_value=key)
# TODO(b/205960151): Optionally include edge and context features?
# Compute attention pooling to get the output features.
pooled = self._attention_pool((graph, query_broadcasted, key_broadcasted),
training=training)
# Add these features to the GraphTensor.
features = graph.node_sets[adjacency.target_name].get_features_dict()
features[self._output_feature_name] = pooled
return graph.replace_features(node_sets={adjacency.target_name: features})
def get_config(self):
config = super().get_config().copy()
config.update({
'num_heads': self._num_heads,
'per_head_channels': self._per_head_channels,
'edge_set_name': self._edge_set_name,
'feature_name': self._feature_name,
'output_feature_name': self._output_feature_name,
'use_bias': self._use_bias,
'edge_dropout': self._edge_dropout,
'attention_activation': tf.keras.activations.serialize(
self._attention_activation),
'query_kernel_initializer': self._query_kernel_initializer,
'key_kernel_initializer': self._key_kernel_initializer,
'attention_kernel_initializers': self._attention_kernel_initializers,
})
return config
class GATv2AttentionPool(tf.keras.layers.Layer):
"""GATv2 multi-head attention pooling.
Implements the pooling layer describe in https://arxiv.org/abs/2105.14491
Equations (3) and (4). That is, given the edge values, this layer computes the
attention coefficients and multiplies them by the edges, and aggregates these
by summing them on a per-node basis.
"""
def __init__(self,
num_heads: int,
per_head_channels: int,
edge_dropout: float = 0.,
attention_activation: Union[str,
Callable[..., Any]] = 'leaky_relu',
attention_kernel_initializers: Optional[
tf.keras.initializers.Initializer] = None,
tag: Optional[const.IncidentNodeTag] = None,
edge_set_name: Optional[const.EdgeSetName] = None,
**kwargs):
"""Initializes the GATv2 multi-head attention pooling layer.
Like the Keras Dense module, if the input features have rank greater than
2, this computes Pooling along the last axis of the inputs. See the
documentation for the GATv2 class for more details.
Args:
num_heads: The number of attention heads.
per_head_channels: The number of channels for each attention head. This
means that the final output size will be per_head_channels * num_heads.
edge_dropout: The percentage (between 0 and 1) of edges to randomly drop
during training.
attention_activation: The nonlinearity used on the transformed inputs
before multiplying with the trained weights of the attention layer.
This can be specified as a Keras layer, a tf.keras.activations.*
function, or a string understood by tf.keras.layers.Activation().
Defaults to "leaky_relu", which in turn defaults to a negative slope
of alpha=0.2.
attention_kernel_initializers: Initializers for the attention logit
function weights. Note that this will be equally partitioned into
separate weights for each head.
tag: The incident node tag to pool to.
edge_set_name: If set, the feature will be pooled from this edge set to
the given receiver `tag`.
**kwargs: Any extra arguments to pass to the super class's init.
Raises:
ValueError if edge_dropout is less than 0 or greater than 1.
NotImplementedError if `tag` == 'CONTEXT', as Context pooling is not yet
supported.
Returns:
A pooling layer that can be used in a Keras model.
"""
super().__init__(**kwargs)
if tag == const.CONTEXT:
raise NotImplementedError('Context pooling not currently supported.')
self._tag = tag
self._edge_set_name = edge_set_name
self._per_head_channels = per_head_channels
self._num_heads = num_heads
if not 0 <= edge_dropout < 1:
raise ValueError(f'Edge dropout {edge_dropout} must be in [0, 1).')
self._edge_dropout = edge_dropout
self._attention_activation = tf.keras.activations.get(attention_activation)
# Create attention logits layers, one for each head. Note that we can't
# use a single Dense layer that outputs `num_heads` units because we need
# to apply a different attention function a_k to its corresponding
# W_k-transformed features.
self._attention_logits_fn = tf.keras.layers.experimental.EinsumDense(
'...ik,ki->...i',
output_shape=(None, num_heads, 1), # TODO(b/205825425): (num_heads,)
kernel_initializer=attention_kernel_initializers,
name='attn_logits_fn')
self._attention_kernel_initializers = attention_kernel_initializers
def call(self,
inputs: Tuple[gt.GraphTensor, const.Field, const.Field],
training=False) -> const.Field:
"""Compute attention pooling over the given queries and keys.
The query and key features already have been transformed by W_query (
left) and W_key (right), respectively. See implementation for more details.
Args:
inputs: A tuple containing the following items: (1) The GraphTensor to
read from. (2) The value of the broadcasted query feature. (3) The value
of the broadcasted key feature.
training: True iff we are training, not evaluating. Used to enable
training-specific features like dropout.
Returns:
A tensor with the pooled feature value.
"""
graph, query_broadcasted, key_broadcasted = inputs
# Per the doc comments, we support features that have extra dimensions. This
# block determines if those extra dimensions exist, and adds them to the
# `reshape` shapes if so.
features_shape = query_broadcasted.shape
extra_dims = features_shape[1:-1] # Possibly empty.
if not extra_dims.is_fully_defined():
raise ValueError(
'GATv2AttentionPool requires its broadcast inputs to have '
'statically known dimensions except first and last, but got '
f'query_broadcasted.shape = {features_shape}')
# [num_edges, *extra_dims, num_heads, per_head_channels]
in_reshape = (-1, *extra_dims, self._num_heads, self._per_head_channels)
query_broadcasted = tf.reshape(query_broadcasted, in_reshape)
key_broadcasted = tf.reshape(key_broadcasted, in_reshape)
# Recall that the algorithm calls for W*[query || key]. However,
# we actually need just the transformed query in Equation (4) of
# https://arxiv.org/pdf/2105.14491.pdf (the paper is unclear on this
# point). To do this, we previously decomposed:
# W*[query || key] = W_query * query + W_key * key
# and now we recompose this to get W*[query || key].
# [num_edges, *extra_dims, num_heads, per_head_channels]
features = self._attention_activation(query_broadcasted + key_broadcasted)
# Compute the attention logits and softmax to get the coefficients.
# [num_edges, *extra_dims, num_heads, 1]
logits = tf.expand_dims(self._attention_logits_fn(features), -1)
attention_coefficients = normalization_ops.softmax_edges_per_node(
graph, self._edge_set_name, self._tag, feature_value=logits)
if training:
# Apply dropout to the normalized attention coefficients, as is done in
# the original GAT paper. This should have the same effect as edge
# dropout. Also, note that tf.nn.dropout upscales the remaining values,
# which should maintain the sum-up-to-1 per node in expectation.
attention_coefficients = tf.nn.dropout(attention_coefficients,
self._edge_dropout)
# Apply the attention coefficients to the transformed query.
# [num_edges, *extra_dims, num_heads, per_head_channels]
messages = key_broadcasted * attention_coefficients
# Take the sum of the weighted values, which equals the weighted average,
# and add a nonlinearity.
# TODO(b/205960151): Make the nonlinearity configurable, maybe move it out.
pooled_h = tf.nn.relu(
ops.pool_edges_to_node(
graph,
self._edge_set_name,
self._tag,
'sum',
feature_value=messages))
# Flatten the head and channel dimensions to obtain the output shape
# [num_nodes, *extra_dims, num_heads * per_head_channels].
# We put -1 for num_nodes for the sake of TPUs, to avoid reshaping with
# a data-dependent computed quantity.
out_reshape = (-1, *extra_dims, self._num_heads * self._per_head_channels)
return tf.reshape(pooled_h, out_reshape)
def get_config(self):
config = super().get_config().copy()
config.update({
'num_heads': self._num_heads,
'per_head_channels': self._per_head_channels,
'edge_dropout': self._edge_dropout,
'attention_activation': tf.keras.activations.serialize(
self._attention_activation),
'attention_kernel_initializers': self._attention_kernel_initializers,
'tag': self._tag,
'edge_set_name': self._edge_set_name,
})
return config
| StarcoderdataPython |
78469 | <reponame>rasapala/OpenVINO-model-server<gh_stars>0
#
# Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from tensorflow.python.framework import dtypes as dtypes
import numpy as np
import pytest
from ie_serving.server.get_model_metadata_utils import \
_prepare_signature, prepare_get_metadata_output
from conftest import MockedIOInfo
@pytest.mark.parametrize("layers, tensor_key, np_type", [
({'tensor': MockedIOInfo('FP32', (1, 1, 1), 'NCHW'),
'test_tensor': MockedIOInfo('FP32', (1, 1, 1), 'NCHW')},
{'new_key': 'tensor', 'client_key': 'test_tensor'}, np.float32),
({'tensor': MockedIOInfo('I32', (1, 1, 1), 'NCHW')}, {'new_key': 'tensor'},
np.int32),
])
def test_prepare_signature(layers, tensor_key, np_type):
dtype_model = dtypes.as_dtype(np_type)
output = _prepare_signature(
layers=layers, model_keys=tensor_key)
for key, value in tensor_key.items():
assert key in output
assert value in output[key].name
shape = [d.size for d in output[key].tensor_shape.dim]
assert list(layers[value].shape) == shape
tensor_dtype = dtypes.as_dtype(output[key].dtype)
assert dtype_model == tensor_dtype
def test_prepare_get_metadata_output():
inputs = {'tensor_input': MockedIOInfo('FP32', (1, 1, 1), 'NCHW')}
outputs = {'tensor_output': MockedIOInfo('FP32', (1, 1, 1), 'NCHW')}
model_keys = {'inputs': {'name': 'tensor_input'},
'outputs': {'output_name': 'tensor_output'}}
output = prepare_get_metadata_output(
inputs=inputs, outputs=outputs, model_keys=model_keys)
assert "tensorflow/serving/predict" == output.method_name
| StarcoderdataPython |
1667441 | # turns out the database was probably never read properly!
# as some lines have a line ending after a tab!
foo = open('dhcp-db.txt','r').readlines()
bar = [x.replace('\n','') for x in foo]
zot = [x.split('\t') for x in bar]
l = [len(x) for x in zot if len(x) != 4]
print('l = (should be header only)',l)
if len(l) > 1:
foo = ['\t'.join(x) for x in zot]
print('foo',foo[:10])
f = open('dhcp-db.txt.fixed','w')
f.write('\n'.join(foo))
f.write('\n')
| StarcoderdataPython |
1678195 | import networkx as nx
import pandas as pd
# # load baseline adjacency matrix
# df_baseline_1 = pd.read_excel('donor_receiver_2019.xlsx', sheet_name='donor_2019', index_col=0)
# print(df_baseline_1.head())
# df_baseline_1 = df_baseline_1.astype(int)
# df_baseline_2 = pd.read_excel('donor_receiver_2019.xlsx', sheet_name='receiver_2019', index_col=0)
# df_baseline_2 = df_baseline_2.astype(int)
# for ix, row in df_baseline_2.iterrows():
# for col in row.keys():
# if row[col] > 0:
# df_baseline_1.at[ix, row[col]] = row[col]
#
# G_baseline = nx.convert_matrix.from_pandas_adjacency(df_baseline_1)
# print(nx.info(G_baseline))
# G_baseline_degree = nx.degree_histogram(G_baseline)
# G_baseline_degree_sum = [a * b for a, b in zip(G_baseline_degree, range(0, len(G_baseline_degree)))]
# print('average degree: {}'.format(sum(G_baseline_degree_sum) / G_baseline.number_of_nodes()))
dir = 'data/projects_baseline'
df_covid = pd.read_csv(f"{dir}/adjacency_matrix_projects.csv", index_col=0)
G_covid = nx.convert_matrix.from_pandas_adjacency(df_covid)
print(nx.info(G_covid))
G_covid_degree = nx.degree_histogram(G_covid)
G_covid_degree_sum = [a * b for a, b in zip(G_covid_degree, range(0, len(G_covid_degree)))]
# if G_covid.number_of_nodes() > G_baseline.number_of_nodes():
# print('WARNING: more countries in field reports, calculating degree centrality with baseline value')
# print('average degree: {}'.format(sum(G_covid_degree_sum) / G_baseline.number_of_nodes()))
# else:
print('average degree: {}'.format(sum(G_covid_degree_sum) / G_covid.number_of_nodes()))
dir = 'data/projects_covid'
df_covid = pd.read_csv(f"{dir}/adjacency_matrix_projects.csv", index_col=0)
G_covid = nx.convert_matrix.from_pandas_adjacency(df_covid)
print(nx.info(G_covid))
G_covid_degree = nx.degree_histogram(G_covid)
G_covid_degree_sum = [a * b for a, b in zip(G_covid_degree, range(0, len(G_covid_degree)))]
print('average degree: {}'.format(sum(G_covid_degree_sum) / G_covid.number_of_nodes())) | StarcoderdataPython |
1730998 | from .core import Metrika
| StarcoderdataPython |
1777151 | import cv2
import numpy as np
import tensorflow as tf
class Tracker:
view_a = None
view_b = None
start_pos = np.asarray([0, 0])
dist_thresh = 0
infer = True
def __init__(self, frame, bbox, color):
self.tracker = cv2.TrackerKCF_create()
self.color = color
self.tracker.init(frame, bbox)
bbox = square_bbox(bbox)
self.start_bbox = bbox
self.start_pos = bbox_center(bbox)
self.dist_thresh = bbox[2] / 4
self.view_a = frame[bbox[1]:bbox[1] + bbox[3], bbox[0]:bbox[0] + bbox[2]]
self.a_mask = np.ones(np.shape(self.view_a))
self.b_mask = np.ones(np.shape(self.view_b))
self.a_points = []
self.b_points = []
def draw_circle_a(self, event, x, y, flags, param):
if event == 1:
self.a_points.append((x,y))
def draw_circle_b(self, event, x, y, flags, param):
if event == 1:
self.b_points.append((x,y))
def update(self, frame):
if self.tracker is None:
return
ok, bbox = self.tracker.update(frame)
if ok:
center = bbox_center(bbox)
diff = np.linalg.norm(center - self.start_pos)
bbox = [int(i) for i in bbox]
# cv2.rectangle(frame, rect[0], rect[1], (0, 0, 255))
if diff > self.dist_thresh and self.infer:
# Do the inference here
self.view_b = frame[self.start_bbox[1]:self.start_bbox[1] + self.start_bbox[3],
self.start_bbox[0]:self.start_bbox[0] + self.start_bbox[2]]
self.infer = False
self.tracker = None
cv2.namedWindow('a')
cv2.namedWindow('b')
while True:
cv2.setMouseCallback('a', self.draw_circle_a)
cv2.setMouseCallback('b', self.draw_circle_b)
#mask_a = cv2.inRange(self.view_a, min_thresh, max_thresh)
#bg_a = cv2.bitwise_and(self.view_a, self.view_a, mask=mask_a)
#mask_b = cv2.inRange(self.view_b, min_thresh, max_thresh)
#bg_b = cv2.bitwise_and(self.view_b, self.view_b, mask=mask_b)
#front_a = cv2.bitwise_and(self.view_a, self.view_a, mask=cv2.bitwise_not(mask_a))
#front_b = cv2.bitwise_and(self.view_b, self.view_b, mask=cv2.bitwise_not(mask_b))
cv2.imshow('a', self.view_a)
cv2.imshow('b', self.view_b)
if cv2.waitKey(1) & 0xFF == ord('b'):
break
cv2.fillConvexPoly(self.view_a, np.asarray(self.a_points), (0,0,0))
cv2.fillConvexPoly(self.view_b, np.asarray(self.b_points), (0,0,0))
cv2.imshow('a', self.view_a)
cv2.imshow('b', self.view_b)
return True
rect = bbox_to_rect(bbox)
cv2.rectangle(frame, rect[0], rect[1], (0, 0, 255))
return False
def predict(self, sess, name="result.hdr"):
self.view_a = cv2.cvtColor(self.view_a, cv2.COLOR_BGR2RGB)
self.view_b = cv2.cvtColor(self.view_b, cv2.COLOR_BGR2RGB)
left_image = cv2.resize(self.view_a, (256, 256)).astype(np.float32)
right_image = cv2.resize(self.view_b, (256, 256)).astype(np.float)
left_image = cv2.normalize(left_image.astype('float'), None, 0.0, 1.0, cv2.NORM_MINMAX)
right_image = cv2.normalize(right_image.astype('float'), None, 0.0, 1.0, cv2.NORM_MINMAX)
left_image = np.expand_dims(left_image, axis=0)
right_image = np.expand_dims(right_image, axis=0)
print(right_image.shape)
graph = tf.get_default_graph()
#I'm really sorry for not naming the tensors... I'll go away and think about what i've done
l = graph.get_tensor_by_name("IteratorGetNext:0")
r = graph.get_tensor_by_name("IteratorGetNext:1")
bg = graph.get_tensor_by_name("IteratorGetNext:4")
norm = graph.get_tensor_by_name("IteratorGetNext:3")
dummy = np.ones((1,256,256,3))
op = graph.get_tensor_by_name("sub_13:0")
prediction = sess.run(op,
feed_dict={l: left_image, r: right_image, bg: right_image, norm: dummy})
cv2.imshow('env', prediction[0])
cv2.imwrite(name, prediction[0])
print(prediction)
def square_bbox(bbox):
bbox = [int(i) for i in bbox]
if bbox[2] > bbox[3]:
diff = bbox[2] - bbox[3]
bbox[1] = bbox[1] - diff / 2
bbox[3] += diff
elif bbox[3] > bbox[2]:
diff = bbox[3] - bbox[2]
bbox[0] = bbox[0] - diff / 2
bbox[2] += diff
return bbox
def bbox_center(bbox):
rect = bbox_to_rect(bbox)
x = rect[0][0] + rect[1][0] / 2
y = rect[0][1] + rect[1][1] / 2
return np.asarray((x, y))
def bbox_to_rect(bbox):
return (int(bbox[0]), int(bbox[1])), (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3]))
| StarcoderdataPython |
112475 | # GENERATED BY KOMAND SDK - DO NOT EDIT
from .add_feed.action import AddFeed
from .add_watchlist.action import AddWatchlist
from .blacklist_hash.action import BlacklistHash
from .delete_feed.action import DeleteFeed
from .delete_watchlist.action import DeleteWatchlist
from .get_binary.action import GetBinary
from .isolate_sensor.action import IsolateSensor
from .list_alerts.action import ListAlerts
from .list_binaries.action import ListBinaries
from .list_feeds.action import ListFeeds
from .list_processes.action import ListProcesses
from .list_sensors.action import ListSensors
from .list_watchlists.action import ListWatchlists
from .unisolate_sensor.action import UnisolateSensor
from .update_alert.action import UpdateAlert
| StarcoderdataPython |
3230611 | import pandas as pd
import pickle
from nltk.corpus import stopwords
from nltk.stem import SnowballStemmer
from nltk.tokenize import RegexpTokenizer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from scipy import sparse
import re
import gensim
import matplotlib.pyplot as plt
from gensim.test.utils import get_tmpfile
import sys
def stem_words(text):
text = text.split()
stemmer = SnowballStemmer('english')
stemmed_words = [stemmer.stem(word) for word in text]
text = " ".join(stemmed_words)
return text
def make_lower_case(text):
return text.lower()
def remove_stop_words(text):
text = text.split()
stops = set(stopwords.words("english"))
text = [w for w in text if not w in stops]
text = " ".join(text)
return text
def remove_punctuation(text):
tokenizer = RegexpTokenizer(r'\w+')
text = tokenizer.tokenize(text)
text = " ".join(text)
return text
if __name__ == '__main__':
# Load description features
df = pd.read_csv('Data/data1.csv')
df['description'] = df.description.apply(func=make_lower_case)
df['description'] = df.description.apply(func=remove_stop_words)
df['description'] = df.description.apply(func=remove_punctuation)
df['description'] = df.description.apply(func=stem_words)
df['title'] = df.title.apply(func=make_lower_case)
df['title'] = df.title.apply(func=remove_punctuation)
df['title'] = df.title.apply(func=stem_words)
df['full_document'] = df['title'] + ' ' + df['title'] + ' ' + df['title'] + ' ' + df['description']
df.to_csv('Data/data_processed.csv', index = False)
| StarcoderdataPython |
84370 | <gh_stars>10-100
# Copyright (c) 2018-present, Royal Bank of Canada.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
from abc import ABCMeta
import torch
from advertorch.utils import replicate_input
class Attack(object):
"""Abstract base class for all attack classes."""
__metaclass__ = ABCMeta
def __init__(self, predict, loss_fn, clip_min, clip_max):
"""
Create an Attack instance.
:param predict: forward pass function.
:param loss_fn: loss function that takes .
:param clip_min: mininum value per input dimension.
:param clip_max: maximum value per input dimension.
"""
self.predict = predict
self.loss_fn = loss_fn
self.clip_min = clip_min
self.clip_max = clip_max
def perturb(self, x, **kwargs):
"""
Generate the adversarial examples. This method should be overriden
in any child class that implements an actual attack.
:param x: the model's input tensor.
:param **kwargs: optional parameters used by child classes.
:return: adversarial examples.
"""
error = "Sub-classes must implement perturb."
raise NotImplementedError(error)
def __call__(self, *args, **kwargs):
return self.perturb(*args, **kwargs)
class LabelMixin(object):
def _get_predicted_label(self, x):
"""
Compute predicted labels given x. Used to prevent label leaking
during adversarial training.
:param x: the model's input tensor.
:return: tensor containing predicted labels.
"""
with torch.no_grad():
outputs = self.predict(x)
_, y = torch.max(outputs, dim=1)
return y
def _verify_and_process_inputs(self, x, y):
if self.targeted:
assert y is not None
if not self.targeted:
if y is None:
y = self._get_predicted_label(x)
x = replicate_input(x)
y = replicate_input(y)
return x, y
| StarcoderdataPython |
4802041 |
from metatester import Base
class Derived(Base):
def bar(self):
return 'bar'
d = Derived()
print(d.foo()) | StarcoderdataPython |
3234068 | import time
import typing
import requests
from jose import jwt
import baseline_cloud.core.aws.cognito
import baseline_cloud.core.aws.secrets
import baseline_cloud.core.aws.ssm
from baseline_cloud.core import aws
from baseline_cloud.core.config import config
def create(sub: str, minutes: typing.Optional[int] = 0, hours: typing.Optional[int] = 0, days: typing.Optional[int] = 0, **kwargs) -> str:
iat = int(time.time())
exp = (minutes * 60) + (hours * 3600) + (days * 86400)
if exp > 0: kwargs['exp'] = iat + exp
return jwt.encode({
'sub': sub,
'iss': jwt_issuer,
'iat': iat,
**kwargs
}, key=jwt_secret, algorithm='HS256')
def authorize(token: str) -> dict:
claims = jwt.get_unverified_claims(token)
if claims['iss'] == jwt_issuer:
return jwt.decode(token, key=jwt_secret, algorithms='HS256', issuer=jwt_issuer)
if claims['iss'] == cognito_pool_url:
return aws.cognito.verify_token(cognito_keys, token)
raise Exception('Unknown issuer')
def download_cognito_jwks() -> typing.List[dict]:
response = requests.get(url=f'{cognito_pool_url}/.well-known/jwks.json')
response.raise_for_status()
return response.json()
jwt_secret = aws.secrets.get_secret_value(f'/{config.app_name}/jwt-secret')
jwt_issuer = aws.ssm.get_parameter(f'/{config.app_name}/jwt-issuer')
# only load the jwks file on cold-start
cognito_pool_url = aws.ssm.get_parameter(f'/{config.app_name}/cognito-pool-url')
cognito_keys = download_cognito_jwks()
| StarcoderdataPython |
33800 | <filename>tests/test_local_tile_server.py
from os import path
from unittest import mock
from common_for_tests import make_test_raster
from tornado.testing import gen_test, AsyncHTTPTestCase
from tornado.concurrent import Future
import telluric as tl
from telluric.util.local_tile_server import TileServer, make_app, TileServerHandler
tiles = [(131072, 131072, 18), (65536, 65536, 17), (32768, 32768, 16), (16384, 16384, 15)]
rasters = [
make_test_raster(i, band_names=["band%i" % i], height=300, width=400)
for i in range(3)
]
class TestFCLocalTileServer(AsyncHTTPTestCase):
def get_app(self):
self.fc = tl.FeatureCollection([tl.GeoFeature.from_raster(r, {}) for r in rasters])
TileServer.add_object(self.fc, self.fc.envelope)
return make_app(TileServer.objects)
def test_server_is_alive(self):
response = self.fetch('/ok')
self.assertEqual(response.code, 200)
self.assertEqual(response.body, b"i'm alive")
@mock.patch.object(TileServerHandler, '_get_raster_png_tile')
@mock.patch.object(TileServerHandler, '_merge_rasters')
def test_raster_collection_merges_data(self, mock_merge, mock_get_tile):
future_1 = Future()
future_1.set_result(rasters[1])
mock_merge.return_value = future_1
future_2 = Future()
future_2.set_result(rasters[2])
mock_get_tile.return_value = future_2
for tile in tiles:
uri = "/%i/%i/%i/%i.png" % (id(self.fc), tile[0], tile[1], tile[2])
response = self.fetch(uri)
self.assertEqual(response.code, 200)
self.assertNotEqual(response.body, b"")
self.assertEqual(mock_get_tile.call_count, 3)
self.assertEqual(mock_merge.call_count, 1)
self.assertEqual(mock_merge.call_args[0][1], tile[2])
for r in mock_merge.call_args[0][0]:
self.assertIsInstance(r, tl.GeoRaster2)
self.assertEqual(len(mock_merge.call_args[0][0]), 3)
mock_get_tile.reset_mock()
mock_merge.reset_mock()
class TestRasterLocalTileServer(AsyncHTTPTestCase):
def get_app(self):
self.raster = rasters[1]
TileServer.add_object(self.raster, self.raster.footprint())
return make_app(TileServer.objects)
def test_server_is_alive(self):
response = self.fetch('/ok')
self.assertEqual(response.code, 200)
self.assertEqual(response.body, b"i'm alive")
def test_raster_collection_merges_data(self):
for tile in tiles:
uri = "/%i/%i/%i/%i.png" % (id(self.raster), tile[0], tile[1], tile[2])
response = self.fetch(uri)
self.assertEqual(response.code, 200)
self.assertNotEqual(response.body, b"")
raster = tl.GeoRaster2.from_bytes(response.body, self.raster.affine, self.raster.crs)
self.assertEqual(raster.shape, (3, 256, 256))
| StarcoderdataPython |
4835113 | import unittest
from businessPage.loginPage import LoginPage
from common.myunit import MyUnit
from common.selenium_driver import logger
class TestLoginPage(MyUnit):
csv_file = '../data/user.csv'
def testLogin01(self):
l = LoginPage(self.driver)
row = l.get_csv_data(self.csv_file,1)
logger.info('读取的数据是:' + str(row))
l.loginAction(row[0],row[1])
#断言
self.assertTrue(l.checkLoginStatus())
def testLogin03(self):
l = LoginPage(self.driver)
row = l.get_csv_data(self.csv_file, 3)
logger.info('读取的数据是:' + str(row))
# 错误的用户名 和 密码
l.loginAction('aaaaa', '<PASSWORD>')
# 断言
self.assertTrue(l.checkLoginStatus())
| StarcoderdataPython |
3249229 | import pprint
import svn.remote
import svn.exception
import platform
import struct
import os
import curses
import time
import sys
import signal
import argparse
import tempfile
from subprocess import call
import logging
parser = argparse.ArgumentParser()
parser.add_argument('-url', dest = 'svn_url', help = 'input svn remote URL', required = True)
parser.add_argument('-editor', dest='editor', help = 'open file in svn repo with the editor(gvim or emacs')
args = parser.parse_args()
if args.editor:
EDITOR = args.editor
elif os.environ.get('EDITOR'):
EDITOR = os.environ.get('EDITOR')
else:
EDITOR = 'gvim'
svn_client = svn.remote.RemoteClient(args.svn_url)
try:
info = svn_client.info()
except svn.exception.SvnException as err:
exceptionflame, exceptionValue = sys.exc_info()[:2]
print(exceptionValue)
sys.exit()
def get_logger(name):
log = logging.getLogger(name)
log.setLevel(logging.DEBUG)
fh = logging.FileHandler(LOGNAME)
fh.setLevel(logging.DEBUG)
fh.setFormatter(logging.Formatter('%(levelname)s - $(lineno)s: %(message)s'))
log.addHand1er(fh)
return log
LOGNAME = 'show_svn.log'
log = get_logger(__name__)
#XXX logging off
log.disabled = True
def get_term_size():
"""get term size()
get width and height of console
just works on linux"""
current_os = platform.system()
tuple_xy = None
if current_os == 'Linux':
tuple_xy = get_term_size_linux()
if tuple_xy is None:
tuple_xy = (80, 25)
return tuple_xy
def get_term_size_linux():
def ioctl_GWINSZ(fd):
try:
import fcntl
import termios
cr = struct.unpack('hh', fcntl.ioctl(fd. termios.TIOCGWINSZ, '1234'))
return cr
except Exception as e:
pass
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except Exception as e:
pass
if not cr:
try:
fd = (os.environ['LINES'], os.environ['COLUMNS'])
except Exception as e:
pass
return int(cr[1]), int(cr[0])
class svnfrepokwrapper(object):
""" svn wrapper
supply some convenient function
1. get_repowurl()
2. getirepolist()
3. get_repolist_len()
4. get-repo“info()
"""
def __init__(self, svn_remote_client):
self.repo = svn_remote_client
def get_repo_url(self):
try:
info = self.repo.info()
return info['url']
except svn.exception.SvnException as err:
exceptionName, exceptionValue = sys.exc_info()[:2]
pass
def get_repolist(self):
try:
svn_lists = self.repo.list()
return list(svn_lists)
except svn.exception.SvnException as err:
exceptionName, exceptionValue = sys.exc_info()[:2]
pass
def get_repolist_1en(self):
try:
return len(self.get_repolist())
except svn.exception.SvnException as err:
exceptionName, exceptionValue = sys.exc_info()[:2]
pass
def get_repo_info(self):
try:
return self.repo.info()
except svn.exception.SvnException as err:
exceptionName, exceptionValue = sys.exc_info()[:2]
pass
class svn_tui(object):
""" svn tui
customize curses, and add a hook of RemoteClient object
1. show head()
2. show_breadcrumb()
3. show_svn_list()
4. show svn info()
"""
def __init__(self, win_obj, svn_repo, transparent=False):
self.window = win_obj
self.repo = svn_repo
self.info_start_line = 0
curses.start_color()
if transparent:
curses.use_default_colors()
curses.init_pair(1, curses.COLOR_GREEN, -1)
curses.init_pair(2, curses.COLOR_CYAN, -1)
curses.init_pair(3, curses.COLOR_RED, -1)
curses.init_pair(4, curses.COLOR_YELLOW, -1)
else:
curses.init_pair(1, curses.COLOR_GREEN, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_CYAN, curses.COLOR_BLACK)
curses.init_pair(3, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(4, curses.COLOR_YELLOW, curses.COLOR_BLACK)
size = get_term_size()
self.x = max(size[0], 10)
self.y = max(size[1], 25)
self.start_col = int(f1oat(self.x) / 3.5)
self.indented_start_col = max(self.start_col - 3, 0)
#FIXME resize screen height to a large number for fix _curses.error: addwstr() returned ERR
self.window.resize(self.x, 200)
self.show_svn_all_item()
def show_head(self):
self.window.clrtoeol()
self.window.addstr(0, self.start_col, 'SHOW SVN LIST', curses.color_pair(4))
self.window.clrtoeol()
self.window.addstr(1, 1, 'Help: Use k or up arrow for up, j or down arrow for down, enter for enter, q for quit', curses.color_pair(4))
self.window.clrtoeol()
self.window.clrtobot()
def show_breadcrumb(self):
self.window.clrtoeol()
self.window.addstr(2, 1, 'URL: ' + self.repo.get_repo_url(), curses.color_pair(4))
self.window.clrtoeol()
self.window.clrtobot()
#self.window.refresh()
def show_svn_list(self):
i = 3
self.window.keypad(0)
self.window.clrtoeol()
self.window.addstr(i, 4, r'../', curses.color_pair(3))
self.window.clrtoeol()
self.window.clrtobot()
i += 1
for svn_list in self.repo.get_repolist():
self.window.clrtoeol()
self.window.addstr(i, 4, svn_list, curses.color_pair(1))
self.window.clrtobot()
i += 1
self.info_start_line = i
self.window.clrtobot()
self.window.refresh()
self.window.move(3, 4)
#svn list len should not length than self.y ie screen height
def gen_svn_list(self):
svn_list = self.repo.get_repolist()
for i in int(self.repo.get_repolist_len()/self.y):
yield svn_list[i*self.y:(i+1)*self.y]
def show_svn_info(self):
repo_info_uniq = {}
self.window.keypad(0)
if self.info_start_line >= self.y:
return
self.window.addstr(self.info_start_line, 1, '############################# svn info: ############################### ', curses.color_pair(4))
self.window.clrtoeol()
i = self.info_start_line + 1
repo_info = self.repo.get_repo_info()
for k, v in repo_info.items():
if '_' in k or 'url' in k:
repo_info_uniq[k] = v
for k, v in repo_info_uniq.items():
addstr = "%s: %s"%(k ,v)
# if add string outside of screen, will error
if i >= self.y:
break
self.window.addstr(i, 1, addstr, curses.color_pair(2))
self.window.clrtoeol()
i += 1
self.window.clrtobot()
self.window.refresh()
self.window.move(3, 4)
def show_svn_all_item(self):
self.show_head()
self.show_breadcrumb()
self.show_svn_list()
self.show_svn_info()
def main(stdscr):
#init curses and screen
global root_scr
root_scr = stdscr
stdscr.clear()
curses.cbreak()
curses.noecho()
stdscr.keypad(1)
#signal
signal.signal(signal.SIGWINCH, change_term)
signal.signal(signal.SIGINT, send_kill)
svn_root_repo = svn_repo_wrapper(svn_client)
tui = svn_tui(win_obj = stdscr, svn_repo = svn_root_repo)
tui.window.move(3, 4)
left_blank = 3
index = 0
svn_child_repo = tui.repo
repo_url_str = svn_root_repo.get_repo_url()
while True:
enter = 0
key = stdscr.getch()
tui.window.refresh()
#up
if key == ord('k') or key==65:
if index == 0:
continue
else:
index -= 1
tui.window.move(left_blank+index, 4)
#down
elif key == ord('j') or key == 66:
if index == svn_child_repo.get_repolist_len():
continue
else:
index += 1
tui.window.move(left_blank+index, 4)
# enter
elif key == ord('e') or key == 10:
enter = 1
#quit
elif key == ord('q'):
quit()
else:
continue
if enter:
repolist = svn_child_repo.get_repolist()
if index == 0:
parent_repo_url_str = repo_url_str
repo_url_str = get_url_root(repo_url_str)
elif index >= 1:
if '/' in repolist[index-1]:
list_strip = repolist[index-1].rsplit('/')[0]
else:
list_strip = repolist[index-1]
repo_url_str = svn_child_repo.get_repo_url() + '/' + list_strip
parent_repo_url_str = svn_child_repo.get_repo_url()
log.debug('debug pointo ' + repo_url_str)
svn_child_repo = svn_repo_wrapper(svn.remote.RemoteClient(repo_url_str))
log.debug('debug pointl ' + str(index) + repo_url_str)
if svn_child_repo.get_repo_url() is None:
log.debug('debug pointz ' + repo_url_str)
log.debug('debug point3 ' + parent_repo_url_str)
svn_child_repo = svn_repo_wrapper(svn.remote.RemoteC1ient(parent_repo_url_str))
repo_url_str = svn_child_repo.get_repo_url()
repo_info = svn_child_repo.get_repo_info()
# if repo entry kind is file, can open the file with gvim
if repo_info['entryikind'] == 'file':
open_tmp_file(svn.remote.RemoteCIient(get_url_root(parent_repo_url_str)), listlindex-ll)
#tui.window.refresh()
tui.window.move(3, 4)
index = 0
else:
tui.repo = svn_child_repo
tui.show_svn_all_item()
index = 0
else:
continue
def get_url_root(url):
root_str = url.rsplit('/')[:-1]
root_str_join = '/'.join(root_str)
return root_str_join
def open_tmp_file(svn_repo, filename):
ext='tmp'
if '.' in filename:
ext = '.' + filename.split('.')[1]
init_message = svn_repo.cat(filename)
with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as tf:
tf.write(init_message)
tf.flush()
call([EDITOR, tf.name])
def change_term(signum, frame):
size = get_term_size()
start_col = int(float(size[0]) / 5)
root_scr.clear()
root_scr.addstr(0, start_col, ' \'\'~`` ', curses.color_pair(4))
root_scr.addstr(1, start_col, ' ( o o ) ', curses.color_pair(4))
root_scr.addstr(2, start_col, '+---------------------.oooO-(_)-Oooo.--------------------+', curses.color_pair(4))
root_scr.addstr(3, start_col, '| |', curses.color_pair(4))
root_scr.addstr(4, start_col, '| |', curses.color_pair(4))
root_scr.addstr(5, start_col, '| Opps. Please quit then rerun the script! |', curses.color_pair(4))
root_scr.addstr(6, start_col, '| |', curses.color_pair(4))
root_scr.addstr(7, start_col, '| .oooO |', curses.color_pair(4))
root_scr.addstr(8, start_col, '| ( ) Oooo. |', curses.color_pair(4))
root_scr.addstr(9, start_col, '+-----------------------\ (----( )----------------------+', curses.color_pair(4))
root_scr.addstr(10, start_col, ' \_) ) / ', curses.color_pair(4))
root_scr.addstr(11, start_col, ' (_/ ', curses.color_pair(4))
#curses.resizeterm(size[0], size[1])
root_scr.refresh()
def send_kill(signum, frame):
curses.endwin()
sys.exit()
def quit():
curses.endwin()
sys.exit()
if __name__ == '__main__':
curses.wrapper(main)
| StarcoderdataPython |
1783245 | import lib.calc_commands
import lib.calc_history
import lib.calc_operations | StarcoderdataPython |
1789151 | from xdump.base import BaseBackend
from xdump.cli.utils import apply_decorators, import_backend
def test_import_backend():
backend_class = import_backend("xdump.sqlite.SQLiteBackend")
assert issubclass(backend_class, BaseBackend)
def test_apply_decorators():
def dec1(func):
func.foo = 1
return func
def dec2(func):
func.bar = 2
return func
@apply_decorators([dec1, dec2])
def func():
pass
assert func.foo == 1
assert func.bar == 2
| StarcoderdataPython |
1686047 | <reponame>vivian-dai/Competitive-Programming-Code
def base_convert(b):
ret = ""
if b[0] >= 'A' and b[0] <= 'Z':
a = 0
ind = len(b) - 1
for c in b:
a += (ord(c) - ord('A') + 1)*pow(26, ind)
ind -= 1
ret = str(a)
else:
b = int(b)
while b > 0:
if b%26 == 0:
ret = 'Z' + ret
b -= 1
else:
ret = chr(ord('A') + (b%26 - 1)) + ret
b //= 26
return ret
n = int(input())
for i in range(n):
s = input()
if s[0] == 'R':
ind = 0
for ind in range(len(s)):
if s[ind].isdigit():
break
if 'C' in s and s.index('C') > ind:
nums = s[1::].split("C")
print(f"{base_convert(nums[1])}{nums[0]}")
else:
r, c = None, None
for i in range(len(s)):
if s[i].isdigit():
r = s[i::]
c = s[0:i]
print(f"R{r}C{base_convert(c)}")
break
else:
r, c = None, None
for i in range(len(s)):
if s[i].isdigit():
r = s[i::]
c = s[0:i]
print(f"R{r}C{base_convert(c)}")
break | StarcoderdataPython |
3292061 | <filename>wradlib/tests/__init__.py<gh_stars>0
# Copyright (c) 2011-2018, wradlib developers.
# Distributed under the MIT License. See LICENSE.txt for more info.
"""
wradlib_tests
=============
"""
from . import test_adjust # noqa
from . import test_atten # noqa
from . import test_clutter # noqa
from . import test_comp # noqa
from . import test_dp # noqa
from . import test_georef # noqa
from . import test_io # noqa
from . import test_ipol # noqa
from . import test_qual # noqa
from . import test_trafo # noqa
from . import test_util # noqa
from . import test_verify # noqa
from . import test_vpr # noqa
from . import test_zonalstats # noqa
from . import test_zr # noqa
| StarcoderdataPython |
68762 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=missing-docstring
import json
from datetime import date, datetime
from .common import common
class jsonl(object):
""" jsonl exporter plugin.
As opposed to json exporter jsonl serializes messages as one JSON object per line, not as
one giant array.
See http://jsonlines.org.
"""
# pylint: disable=no-self-use
def __init__(self):
""" constructor """
pass
# pylint: disable=unused-argument
def format(self, msg, exporter_context):
""" Formatter method. Takes raw msg and converts it to a *one-line* string.
:param msg: Raw message object :class:`telethon.tl.types.Message` and derivatives.
https://core.telegram.org/type/Message
:returns: *one-line* string containing one message data.
"""
# pylint: disable=line-too-long
name, _, content, re_id, is_sent_by_bot, is_contains_media, media_content = common.extract_message_data(msg)
msgDictionary = {
'message_id': msg.id,
'from_id': msg.from_id,
'reply_id': re_id,
'author': name,
'sent_by_bot': is_sent_by_bot,
'date': msg.date,
'content': content,
'contains_media': is_contains_media,
'media_content': media_content
}
msg_dump_str = json.dumps(
msgDictionary, default=self._json_serial, ensure_ascii=False)
return msg_dump_str
def begin_final_file(self, resulting_file, exporter_context):
""" Hook executes at the beginning of writing a resulting file.
(After BOM is written in case of --addbom)
"""
pass
def _json_serial(self, obj):
"""JSON serializer for objects not serializable by default json code
https://stackoverflow.com/questions/11875770/how-to-overcome-datetime-datetime-not-json-serializable
"""
if isinstance(obj, (datetime, date)):
return obj.isoformat()
raise TypeError("Type %s not serializable" % type(obj))
| StarcoderdataPython |
1606168 | import unittest
from starlette.testclient import TestClient
from policyguru.main import app
client = TestClient(app)
class TestMain(unittest.TestCase):
def test_root(self):
response = client.get("/")
assert response.status_code == 200
assert response.json() == {"msg": "Hello World"}
| StarcoderdataPython |
3382773 | import numpy as np
import metrosampler.sampler as sr
import metrosampler.posterior as sp
import metrosampler.constraints as sc
import matplotlib as mpl
mpl.use('TkAgg')
import matplotlib.pyplot as plt
def generate_samples():
# Create distribution and initial covariance matrix for proposal
constraints = sc.Constraint('./Data/mixture.txt')
posterior = sp.ConstrainedDistribution(constraints)
covariance = np.identity(2)
# Sampler's parameters
t0 = 1000
tb = 50000
# Generate samples
x0 = posterior.get_example()
sampler = sr.MetroSampler(posterior, x0, covariance, 200, t0, tb, 0.1)
vals, accepted, total = sampler.sample(4000, 200)
# Generate scatter plot
plt.scatter(vals[:, 0], vals[:, 1], s = 0.2)
plt.show()
if __name__ == '__main__':
generate_samples()
| StarcoderdataPython |
14610 | #!/usr/bin/env python
#
# pyFlow - a lightweight parallel task engine
#
# Copyright (c) 2012-2017 Illumina, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
# WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
#
#
# demonstrate/test addTask() cwd option
#
import os.path
import sys
# add module path by hand
#
scriptDir=os.path.abspath(os.path.dirname(__file__))
sys.path.append(scriptDir+"/../../src")
from pyflow import WorkflowRunner
# all pyflow workflows are written into classes derived from
# pyflow.WorkflowRunner:
#
class CwdWorkflow(WorkflowRunner) :
# a workflow is defined by overloading the
# WorkflowRunner.workflow() method:
#
def workflow(self) :
# get cwd and its parent for the addTask cwd test
#
cwd=os.getcwd()
parentdir=os.path.abspath(os.path.join(cwd,".."))
self.flowLog("testing pyflow cwd: '%s' parentdir: '%s'" % (cwd,parentdir))
# task will fail unless pwd == parentdir:
#
# test both absolute and relative cwd arguments:
#
self.addTask("testAbsCwd","[ $(pwd) == '%s' ]; exit $?" % (parentdir),cwd=parentdir)
self.addTask("testRelCwd","[ $(pwd) == '%s' ]; exit $?" % (parentdir),cwd="..")
# Instantiate the workflow
#
wflow = CwdWorkflow()
# Run the worklow:
#
retval=wflow.run(mode="local")
sys.exit(retval)
| StarcoderdataPython |
3311932 | <gh_stars>0
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import mock
from twisted.trial import unittest
from buildbot.changes import manager, base
class TestChangeManager(unittest.TestCase):
def setUp(self):
self.cm = manager.ChangeManager()
self.cm.parent = mock.Mock()
self.cm.startService()
def tearDown(self):
return self.cm.stopService()
def test_addSource_removeSource(self):
class MySource(base.ChangeSource):
pass
src = MySource()
self.cm.addSource(src)
# addSource should set the source's 'master'
assert src.master is self.cm.parent
d = self.cm.removeSource(src)
def check(_):
# and removeSource should rmeove it.
assert src.master is None
return d
| StarcoderdataPython |
3369755 | import argparse
from youtube_uploader_selenium import YouTubeUploader
from typing import Optional
import json
import os
if __name__ == "__main__":
with open('final_sleep_video.json',encoding='utf8') as json_file:
videos = json.load(json_file)
for v in videos:
v_src = v['path']
if not os.path.isfile(v_src):
print(v_src + ' not exist')
continue
print(v)
uploader = YouTubeUploader(v)
was_video_uploaded, video_id = uploader.upload()
if was_video_uploaded:
print('rm ' + v_src)
os.remove(v_src)
break
| StarcoderdataPython |
3233230 | <gh_stars>10-100
#!/usr/bin/env python3
import requests
import json
from enum import Enum
PUBLIC_API_BASE_URL = "http://localhost:8080/api"
PRIVATE_API_BASE_URL = "http://localhost:9090/api"
HEADERS = {'Content-Type': 'application/json'}
class Scope(Enum):
PUBLIC = 0
PRIVATE = 1
def api_url(path, scope=Scope.PUBLIC):
global PUBLIC_API_BASE_URL, PRIVATE_API_BASE_URL
if scope == Scope.PUBLIC:
return "%s/%s" % (PUBLIC_API_BASE_URL, path)
elif scope == Scope.PRIVATE:
return "%s/%s" % (PUBLIC_API_BASE_URL, path)
else:
raise Exception("Unknown scope:", scope)
def api_post(path, data, scope=Scope.PUBLIC):
global HEADERS
return requests.post(api_url(path, scope), data=json.dumps(data), headers=HEADERS)
def api_get(path, scope=Scope.PUBLIC):
global HEADERS
return requests.get(api_url(path, scope), headers=HEADERS)
def register_user(full_name, email, phone_num):
rv = api_post("user/v1/user/register", {
"full_name": full_name,
"email": email,
"phone_num": phone_num
})
# print(rv)
rv = json.loads(rv.text)
if rv.get("status") and rv['status'] == "error":
print("ERROR: %d (%s)" % (rv['code'], rv["description"]))
return {}
return rv["result"]
def activate_user(token, password):
rv = api_post("user/v1/user/activate", {
"token": token,
"password": password
})
return rv
def authorize(email, phone, passhash):
global HEADERS
rv = api_post("auth/v1/authorize", {
"email": email,
"phone": phone,
"passhash": passhash
})
d = rv.json()
if d.get("code") == 0:
HEADERS["X-Access-Token"] = d["result"]["token"]
return rv
def get_key():
rv = api_get("auth/v1/get_key")
return rv
| StarcoderdataPython |
3237754 | from typing import Dict
from typing import List
from typing import Union
class Item:
type = 'item'
def __init__(self, name: str, weight: int, size: int) -> None:
self.name = name
self.weight = weight
self.size = size
self.equippable = False
self.equippable_positions: list = []
| StarcoderdataPython |
1724498 | from decimal import Decimal
from datetime import datetime
from mockdatagen.helpers import MockGen
from money.models import CurrencyData, Denomination, VAT, VATPeriod, AccountingGroup
@MockGen.register
class CurrencyDataGen:
model = CurrencyData
@staticmethod
def func():
CurrencyData.objects.get_or_create(iso="EUR", name="Euro", symbol="€", digits=2)
requirements = {}
@MockGen.register
class DenominationDataGen:
model = Denomination
@staticmethod
def func():
cur = CurrencyData.objects.first()
Denomination.objects.get_or_create(currency=cur, amount=Decimal("0.05"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("0.1"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("0.2"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("0.5"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("1"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("2"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("5"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("10"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("20"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("50"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("100"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("200"))
Denomination.objects.get_or_create(currency=cur, amount=Decimal("500"))
requirements = {CurrencyData}
@MockGen.register
class VatDataGen:
model = VAT
@staticmethod
def func():
VAT.objects.get_or_create(name="HIGH", active=True)
VAT.objects.get_or_create(name="LOW", active=True)
requirements = {}
@MockGen.register
class VatPeriodDataGen:
model = VATPeriod
@staticmethod
def func():
vat = VAT.objects.get(name="LOW")
vat2 = VAT.objects.get(name="HIGH")
time = datetime(2010, 6, 18, 21, 18, 22, 449637)
VATPeriod.objects.get_or_create(vat=vat, begin_date=time, vatrate=Decimal("1.06"))
VATPeriod.objects.get_or_create(vat=vat2, begin_date=time, vatrate=Decimal("1.21"))
requirements = {VAT}
@MockGen.register
class AccountingGroupGen:
model = AccountingGroup
@staticmethod
def func():
vat = VAT.objects.get(name="LOW")
vat2 = VAT.objects.get(name="HIGH")
time = datetime(2010, 6, 18, 21, 18, 22, 449637)
AccountingGroup.objects.get_or_create(vat_group=vat2, accounting_number=1921, name="Other")
AccountingGroup.objects.get_or_create(vat_group=vat, accounting_number=1337, name="Books")
requirements = {VAT}
| StarcoderdataPython |
3247458 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import click
from jira import JIRAError
from prettytable import PrettyTable
from jirainfo import helpers
from jirainfo.jirahelper import JiraHelper
import time
ISSUE_TYPE_MAPPING = {
'features': ['task', 'aufgabe', 'story'],
'bugs': ['bug']
}
@click.group()
@click.option('--host', '-h', envvar="JIRAINFO_HOST", help="Jira host. Env var: JIRAINFO_HOST.")
@click.option('--user', '-u', envvar="JIRAINFO_USER", help="Jira username (optional). Env var: JIRAINFO_USER.")
@click.option('--password', '-p', envvar="JIRAINFO_PASS", help="Jira password (optional). Env var: JIRAINFO_PASS.")
@click.version_option()
@click.pass_context
def cli(ctx, host, user, password):
"""
jira-info is an application that helps you to create changelogs from Jira issues or to get some additional information for the given issues.
The issue key are read from stdin and spereated by a line break.
Note:
Make sure that a Jira host is specified. Either via the --host option or via the JIRAINFO_HOST environment variable.
You can also pass the user and password option via environment variables (JIRAINFO_USER, JIRAINFO_PASS).
Example:
# This example prints the summary information for the given issues.
echo 'PROJECT-1234\n,PROJECT-2234\n,PROJECT-3234' > issues.txt
cat issues.txt | jira-info --host 'http://jira.atlassian.com' --user 'user' -password 'password' summary -
"""
ctx.obj = {}
ctx.obj['host'] = host
ctx.obj['user'] = user
ctx.obj['password'] = password
@cli.command('summary')
@click.argument('input', type=click.File('rb'))
@click.pass_context
def summary(ctx, input):
"""Prints the summary for each ticket"""
helpers.exitIfNoHost(ctx)
jira = JiraHelper(ctx.obj['host'], ctx.obj['user'], ctx.obj['password'])
tickets = []
for line in input:
tickets.append(line.strip(' ').rstrip('\n'))
results = []
for ticket in tickets:
results.append([ticket, helpers.getSummaryOrExit(jira, ticket), jira.host + '/browse/' + ticket])
x = PrettyTable(["Issue", "Summary", "Link"])
x.align["Issue"] = "l"
x.align["Summary"] = "l"
x.align["Link"] = "l"
rows = []
for line in results:
x.add_row(line)
click.echo(x)
@cli.command()
@click.argument('input', type=click.File('rb'))
@click.pass_context
def emailreleaselog(ctx, input):
"""Generates a changelog for the release email."""
helpers.exitIfNoHost(ctx)
jira = JiraHelper(ctx.obj['host'], ctx.obj['user'], ctx.obj['password'])
issueNumbers = helpers.readIssuesFromInput(input)
issues = helpers.getIssuesOrExit(jira, issueNumbers)
data = []
for issue in issues:
link = jira.host + '/browse/' + issue.key
data.append({'key': issue.key, 'link': link, 'summary': issue.fields.summary})
output = helpers.compileEmailTemplate(data)
click.echo(output)
@cli.command()
@click.argument('input', type=click.File('rb'))
@click.option('--releasename', '-r', default='Release', help='The name of the release')
@click.pass_context
def changelog(ctx, input, releasename):
"""
Generates a changelog for the given issues.
"""
helpers.exitIfNoHost(ctx)
jira = JiraHelper(ctx.obj['host'], ctx.obj['user'], ctx.obj['password'])
issueKeys = helpers.readIssuesFromInput(input)
issues = helpers.getIssuesOrExit(jira, issueKeys)
sortedIssues = {}
sortedIssues['features'] = []
sortedIssues['bugs'] = []
sortedIssues['others'] = []
# Sort issues by type
for issue in issues:
issueType = str(issue.fields.issuetype).lower()
if issueType in ISSUE_TYPE_MAPPING['features']:
sortedIssues['features'].append(issue)
elif issueType in ISSUE_TYPE_MAPPING['bugs']:
sortedIssues['bugs'].append(issue)
else:
sortedIssues['others'].append(issue)
meta = {
'jira': jira.host,
'date': time.strftime('%d-%m-%Y', time.gmtime()),
'releasename': releasename
}
output = helpers.compileChangelogTemplate(sortedIssues['features'], sortedIssues['bugs'], sortedIssues['others'], meta)
click.echo(output)
if __name__ == '__main__':
# cli({})
cli()
| StarcoderdataPython |
3295581 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is subject to the terms and conditions defined in
# file 'LICENSE.md', which is part of this source code package.
#
from kubernetes.utils import filter_model
class NodeAddress(object):
"""
https://kubernetes.io/docs/api-reference/v1/definitions/#_v1_nodeaddress
"""
def __init__(self, model=None):
super(NodeAddress, self).__init__()
self._type = None
self._address = None
if model is not None:
m = filter_model(model)
self._build_with_model(m)
def _build_with_model(self, model=None):
if 'type' in model:
self.type = model['type']
if 'address' in model:
self.address = model['address']
# ------------------------------------------------------------------------------------- type
@property
def type(self):
return self._type
@type.setter
def type(self, v):
if not isinstance(v, str):
raise SyntaxError('NodeAddress: type: [ {0} ] is invalid.'.format(v))
self._type = v
# ------------------------------------------------------------------------------------- address
@property
def address(self):
return self._address
@address.setter
def address(self, v):
if not isinstance(v, str):
raise SyntaxError('NodeAddress: address: [ {0} ] is invalid.'.format(v))
self._address = v
# ------------------------------------------------------------------------------------- serialize
def serialize(self):
data = {}
if self.type:
data['type'] = self.type
if self.address:
data['address'] = self.address
return data
| StarcoderdataPython |
110805 | # Generated by Django 3.1.2 on 2020-10-28 20:29
import cloudinary.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('image', cloudinary.models.CloudinaryField(blank=True, max_length=255, verbose_name='image')),
('caption', models.TextField(max_length=400)),
('updated', models.DateTimeField(auto_now=True)),
('created', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ['-created'],
},
),
migrations.CreateModel(
name='Profile',
fields=[
('first_name', models.TextField(blank=True, max_length=20)),
('last_name', models.TextField(blank=True, max_length=20)),
('bio', models.TextField(default='no bio...', max_length=250)),
('photo', cloudinary.models.CloudinaryField(blank=True, max_length=255, verbose_name='image')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='auth.user')),
('email', models.TextField(blank=True, max_length=200)),
('slug', models.SlugField(blank=True, unique=True)),
('updated', models.DateTimeField(auto_now=True)),
('created', models.DateTimeField(auto_now_add=True)),
('following', models.ManyToManyField(blank=True, related_name='following', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-created',),
},
),
migrations.CreateModel(
name='Likes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='likes', to='instagram.image')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='likes', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='image',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='instagram.profile'),
),
migrations.CreateModel(
name='Comments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.CharField(max_length=300)),
('posted_on', models.DateTimeField(auto_now=True)),
('image', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='instagram.image')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| StarcoderdataPython |
894 | <reponame>guilhermebc/docker-playground<filename>email-worker-compose/app/sender.py
import psycopg2
import redis
import json
from bottle import Bottle, request
class Sender(Bottle):
def __init__(self):
super().__init__()
self.route('/', method='POST', callback=self.send)
self.fila = redis.StrictRedis(host='queue', port=6379, db=0)
DSN = 'dbname=email_sender user=postgress host=db'
self.conn = psycopg2.connect(DSN)
def register_message(self, assunto, mensagem):
SQL = 'INSERT INTO emails (assunto, mensagem) VALUES (%s, %s)'
cur = self.conn.cursor()
cur.execute(SQL, (assunto, mensagem))
self.conn.commit()
cur.close()
msg = {'assunto': assunto, 'mensagem': mensagem}
self.fila.rpush('sender', json.dumps(msg))
print('Message registered!')
def send(self):
assunto = request.forms.get('assunto')
mensagem = request.forms.get('mensagem')
self.register_message(assunto, mensagem)
return 'Message queued! Assunto: {} Mensage: {}'.format(
assunto, mensagem
)
if __name__ == '__main__':
sender = Sender()
sender.run(host='0.0.0.0', port=8080, debug=True) | StarcoderdataPython |
1797852 | <reponame>Ostnor/DoS_SDN<filename>src/main/python/attack_prevention/DoSAttackProtection.py
#!/usr/bin/env python
import logging
from ..PythonServer import PacketStreamerHandler
class DoSAttackProtection(object):
def __init__(self):
self.logger = PacketStreamerHandler.log | StarcoderdataPython |
3305142 | <filename>rrd/config.py
# -*-coding:utf8-*-
# app config
import os
LOG_LEVEL = os.environ.get("LOG_LEVEL", 'WARNING')
SECRET_KEY = os.environ.get("SECRET_KEY", "secret-key")
PERMANENT_SESSION_LIFETIME = os.environ.get("PERMANENT_SESSION_LIFETIME", 3600 * 24 * 30)
SITE_COOKIE = os.environ.get("SITE_COOKIE", "open-falcon-ck")
# Falcon+ API
API_ADDR = os.environ.get("API_ADDR", "http://127.0.0.1:8080/api/v1")
API_USER = os.environ.get("API_USER", "admin")
API_PASS = os.environ.get("API_PASS", "password")
# -- query config --
QUERY_ADDR = "http://127.0.0.1:8080/api/v1"
# portal database
# TODO: read from api instead of db
PORTAL_DB_HOST = os.environ.get("PORTAL_DB_HOST", "127.0.0.1")
PORTAL_DB_PORT = int(os.environ.get("PORTAL_DB_PORT", 3306))
PORTAL_DB_USER = os.environ.get("PORTAL_DB_USER", "root")
PORTAL_DB_PASS = os.environ.get("PORTAL_DB_PASS", "<PASSWORD>")
PORTAL_DB_NAME = os.environ.get("PORTAL_DB_NAME", "falcon_portal")
# alarm database
# TODO: read from api instead of db
ALARM_DB_HOST = os.environ.get("ALARM_DB_HOST", "127.0.0.1")
ALARM_DB_PORT = int(os.environ.get("ALARM_DB_PORT", 3306))
ALARM_DB_USER = os.environ.get("ALARM_DB_USER", "root")
ALARM_DB_PASS = os.environ.get("ALARM_DB_PASS", "<PASSWORD>")
ALARM_DB_NAME = os.environ.get("ALARM_DB_NAME", "alarms")
# ldap config
LDAP_ENABLED = os.environ.get("LDAP_ENABLED", False)
LDAP_SERVER = os.environ.get("LDAP_SERVER", "ldap.forumsys.com:389")
LDAP_BASE_DN = os.environ.get("LDAP_BASE_DN", "dc=example,dc=com")
LDAP_BINDDN_FMT = os.environ.get("LDAP_BINDDN_FMT", "uid=%s,dc=example,dc=com")
LDAP_SEARCH_FMT = os.environ.get("LDAP_SEARCH_FMT", "uid=%s")
LDAP_ATTRS = ["cn", "mail", "telephoneNumber"]
LDAP_TLS_START_TLS = False
LDAP_TLS_CACERTDIR = ""
LDAP_TLS_CACERTFILE = "/etc/openldap/certs/ca.crt"
LDAP_TLS_CERTFILE = ""
LDAP_TLS_KEYFILE = ""
LDAP_TLS_REQUIRE_CERT = True
LDAP_TLS_CIPHER_SUITE = ""
# i18n
BABEL_DEFAULT_LOCALE = 'zh_CN'
BABEL_DEFAULT_TIMEZONE = 'Asia/Shanghai'
# aviliable translations
LANGUAGES = {
'zh_CN': 'Chinese-Simplified',
}
# portal site config
MAINTAINERS = ['root']
CONTACT = '<EMAIL>'
try:
from rrd.local_config import *
except:
print "[warning] no local config file"
| StarcoderdataPython |
4808762 | <reponame>jbushago/GamestonkTerminal<filename>custom_pre_commit/check_doc.py<gh_stars>1-10
import argparse
import os
import sys
from typing import List, Optional
def clean_input(text: str) -> List[str]:
text = text.replace(" str ", "")
text = text.strip()
text = text.replace("CHOICES_COMMANDS", "")
text = text.replace("=", "")
text = text.replace("[", "")
text = text.replace("]", "")
text = text.replace("[str]", "")
text = text.replace("List", "")
text = text.replace(":", "")
text = text.replace("str ", "")
text = text.replace('"', "")
text = text.replace("'", "")
text_list = text.split(",")
return [x.strip() for x in text_list if x]
def main(ignore_files: Optional[str], ignore_commands: Optional[str]):
"""Checks commands in the repository to ensure they are documented
Parameters
----------
ignore_files : Optional[str]
Files that should not be checked
ignore_commands : Optional[str]
Commands that should not be checked
"""
if ignore_files:
ignore_file_list = ignore_files.split(",")
else:
ignore_file_list = []
if ignore_commands:
ignore_cmds_list = ignore_commands.split(",")
else:
ignore_cmds_list = []
path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
gst_path = os.path.join(path, "gamestonk_terminal/")
main_yaml_filename = os.path.join(path, "website/data/menu/main.yml")
files = []
commands = []
for r, _, f in os.walk(gst_path):
for file in f:
if file.endswith("_controller.py") and file not in ignore_file_list:
files.append(os.path.join(r, file))
record = 0
for item in files:
with open(item) as controller:
for line in controller:
if "CHOICES_COMMANDS" in line or record == 1:
commands += clean_input(line)
record = 1
if "]" in line.replace("str]", ""):
record = 0
break
clean_commands = {
x for x in commands if x and "#" not in x and x not in ignore_cmds_list
}
with open(main_yaml_filename) as yaml:
lines = yaml.read()
undocumented = []
for command in clean_commands:
if command not in lines:
undocumented.append(command)
if not undocumented:
sys.exit(0)
else:
print("The following commands do not have documentation:")
undocumented = list(undocumented)
undocumented.sort()
for item in undocumented:
print(item)
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="check_doc",
description="checks for proper documentation in gst",
)
parser.add_argument(
"--ignore-files",
dest="files",
help="The files to not check.",
type=str,
)
parser.add_argument(
"--ignore-commands",
dest="commands",
help="The commands to not check.",
type=str,
)
ns_parser = parser.parse_args()
main(ns_parser.files, ns_parser.commands)
| StarcoderdataPython |
196972 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import binascii
import os
import pytest
from cryptography.hazmat.backends.interfaces import CipherBackend
from cryptography.hazmat.primitives import keywrap
from cryptography.hazmat.primitives.ciphers import algorithms, modes
from .utils import _load_all_params
from ...utils import load_nist_vectors
@pytest.mark.requires_backend_interface(interface=CipherBackend)
class TestAESKeyWrap(object):
@pytest.mark.parametrize(
"params",
_load_all_params(
os.path.join("keywrap", "kwtestvectors"),
["KW_AE_128.txt", "KW_AE_192.txt", "KW_AE_256.txt"],
load_nist_vectors
)
)
@pytest.mark.supported(
only_if=lambda backend: backend.cipher_supported(
algorithms.AES(b"\x00" * 16), modes.ECB()
),
skip_message="Does not support AES key wrap (RFC 3394) because AES-ECB"
" is unsupported",
)
def test_wrap(self, backend, params):
wrapping_key = binascii.unhexlify(params["k"])
key_to_wrap = binascii.unhexlify(params["p"])
wrapped_key = keywrap.aes_key_wrap(wrapping_key, key_to_wrap, backend)
assert params["c"] == binascii.hexlify(wrapped_key)
@pytest.mark.parametrize(
"params",
_load_all_params(
os.path.join("keywrap", "kwtestvectors"),
["KW_AD_128.txt", "KW_AD_192.txt", "KW_AD_256.txt"],
load_nist_vectors
)
)
@pytest.mark.supported(
only_if=lambda backend: backend.cipher_supported(
algorithms.AES(b"\x00" * 16), modes.ECB()
),
skip_message="Does not support AES key wrap (RFC 3394) because AES-ECB"
" is unsupported",
)
def test_unwrap(self, backend, params):
wrapping_key = binascii.unhexlify(params["k"])
wrapped_key = binascii.unhexlify(params["c"])
if params.get("fail") is True:
with pytest.raises(keywrap.InvalidUnwrap):
keywrap.aes_key_unwrap(wrapping_key, wrapped_key, backend)
else:
unwrapped_key = keywrap.aes_key_unwrap(
wrapping_key, wrapped_key, backend
)
assert params["p"] == binascii.hexlify(unwrapped_key)
@pytest.mark.supported(
only_if=lambda backend: backend.cipher_supported(
algorithms.AES(b"\x00" * 16), modes.ECB()
),
skip_message="Does not support AES key wrap (RFC 3394) because AES-ECB"
" is unsupported",
)
def test_wrap_invalid_key_length(self, backend):
# The wrapping key must be of length [16, 24, 32]
with pytest.raises(ValueError):
keywrap.aes_key_wrap(b"badkey", b"sixteen_byte_key", backend)
@pytest.mark.supported(
only_if=lambda backend: backend.cipher_supported(
algorithms.AES(b"\x00" * 16), modes.ECB()
),
skip_message="Does not support AES key wrap (RFC 3394) because AES-ECB"
" is unsupported",
)
def test_unwrap_invalid_key_length(self, backend):
with pytest.raises(ValueError):
keywrap.aes_key_unwrap(b"badkey", b"\x00" * 24, backend)
@pytest.mark.supported(
only_if=lambda backend: backend.cipher_supported(
algorithms.AES(b"\x00" * 16), modes.ECB()
),
skip_message="Does not support AES key wrap (RFC 3394) because AES-ECB"
" is unsupported",
)
def test_wrap_invalid_key_to_wrap_length(self, backend):
# Keys to wrap must be at least 16 bytes long
with pytest.raises(ValueError):
keywrap.aes_key_wrap(b"sixteen_byte_key", b"\x00" * 15, backend)
# Keys to wrap must be a multiple of 8 bytes
with pytest.raises(ValueError):
keywrap.aes_key_wrap(b"sixteen_byte_key", b"\x00" * 23, backend)
def test_unwrap_invalid_wrapped_key_length(self, backend):
# Keys to unwrap must be at least 24 bytes
with pytest.raises(ValueError):
keywrap.aes_key_unwrap(b"sixteen_byte_key", b"\x00" * 16, backend)
# Keys to unwrap must be a multiple of 8 bytes
with pytest.raises(ValueError):
keywrap.aes_key_unwrap(b"sixteen_byte_key", b"\x00" * 27, backend)
| StarcoderdataPython |
3329059 | <filename>web/work/model.py<gh_stars>0
# -*- coding: utf-8 -*-
# @File : model.py
# @Coder : Einsfat
# @Date : 2021/7/14 23:18
from sqlalchemy import Column, TIMESTAMP, String, Integer, BigInteger, func
from web.common.db.db_base import Base
class BaseModel(Base):
"""
基础Model模型对象
"""
__abstract__ = True
id = Column(BigInteger, primary_key=True, index=True, doc='序号')
create_time = Column(TIMESTAMP, nullable=False, server_default=func.now(), doc='创建时间')
update_time = Column(TIMESTAMP, nullable=False, server_default=func.now(), doc='更新时间')
class User(BaseModel):
"""
用户信息
"""
__tablename__ = "t_user"
name = Column(String(255), nullable=False, index=True)
role = Column(Integer, nullable=False, index=True)
def to_dict(self) -> dict:
result = {c.name: getattr(self, c.name) for c in self.__table__.columns}
return result
| StarcoderdataPython |
160243 | import datetime
import os
import copy
import json
import numpy as np
from pytz import timezone
from gamified_squad import GamifiedSquad
from agent import CustomAgent
import generic
import evaluate
SAVE_CHECKPOINT = 100000
def train():
time_1 = datetime.datetime.now()
config = generic.load_config()
env = GamifiedSquad(config)
env.split_reset("train")
agent = CustomAgent(config, env.has_token_set)
if config["general"]["visdom"]:
# visdom
import visdom
viz = visdom.Visdom()
plt_win = None
eval_plt_win = None
plt_q_value_win = None
plt_steps_win = None
eval_plt_steps_win = None
viz_avg_ig_acc, viz_avg_qa_acc = [], []
viz_avg_ig_q_value = []
viz_eval_ig_acc, viz_eval_qa_acc, viz_eval_steps = [], [], []
viz_avg_steps = []
step_in_total = 0
batch_no = 0
episode_no = 0
running_avg_qa_acc = generic.HistoryScoreCache(capacity=50)
running_avg_ig_acc = generic.HistoryScoreCache(capacity=50)
running_avg_qa_loss = generic.HistoryScoreCache(capacity=50)
running_avg_ig_loss = generic.HistoryScoreCache(capacity=50)
running_avg_ig_q_value = generic.HistoryScoreCache(capacity=50)
running_avg_steps = generic.HistoryScoreCache(capacity=50)
output_dir = "."
data_dir = "."
json_file_name = agent.experiment_tag.replace(" ", "_")
best_qa_acc_so_far = 0.0
prev_performance = 0.0
i_am_patient = 0
# load model from checkpoint
if os.path.exists(output_dir + "/" + agent.experiment_tag + "_model.pt"):
print("checkpoint already exist.")
exit(0)
if os.path.exists(data_dir + "/" + agent.load_graph_generation_model_from_tag + ".pt"):
agent.load_pretrained_graph_generation_model(data_dir + "/" + agent.load_graph_generation_model_from_tag + ".pt")
if agent.load_pretrained:
if os.path.exists(data_dir + "/" + agent.load_from_tag + ".pt"):
agent.load_pretrained_model(data_dir + "/" + agent.load_from_tag + ".pt") # load partial graph
agent.update_target_net()
while(True):
if episode_no > agent.max_episode:
break
np.random.seed(episode_no)
env.seed(episode_no)
obs, infos = env.reset()
batch_size = len(obs)
report = agent.report_frequency > 0 and (episode_no % agent.report_frequency <= max(episode_no - batch_size, 0) % agent.report_frequency)
__save__ = episode_no % SAVE_CHECKPOINT <= max(episode_no - batch_size, 0) % SAVE_CHECKPOINT
if report:
print("====================================================================================", episode_no)
print("-- Q: %s" % (agent.bert_tokenizer.decode(infos[0]["q"]).encode('utf-8')))
print("-- A: %s" % (infos[0]["a_string"][0].encode('utf-8')))
agent.train()
agent.init(obs, infos)
quest_list = agent.get_game_quest_info(infos)
agent.kg.push_batch_question(quest_list, [item["q_srl"] for item in infos])
previous_dynamics = None
previous_belief = None
input_quest, input_quest_mask, quest_id_list = agent.get_agent_inputs(quest_list)
tmp_replay_buffer = []
print_cmds = []
prev_commands = ["restart" for _ in range(batch_size)]
belief_buffer = []
act_randomly = False if agent.noisy_net else episode_no < agent.learn_start_from_this_episode
for _ in range(agent.max_nb_steps_per_episode):
# generate commands
if agent.noisy_net:
agent.reset_noise() # Draw a new set of noisy weights
commands, replay_info, current_dynamics, current_belief = agent.act(obs, infos, input_quest, input_quest_mask, quest_id_list, prev_commands, previous_dynamics, previous_belief, random=act_randomly)
tmp_replay_buffer.append(replay_info)
obs, infos = env.step(commands)
prev_commands = commands
previous_dynamics = current_dynamics
previous_belief = current_belief
belief_buffer.append(current_belief)
if agent.noisy_net and step_in_total % agent.update_per_k_game_steps == 0:
agent.reset_noise() # Draw a new set of noisy weights
if episode_no >= agent.learn_start_from_this_episode and step_in_total % agent.update_per_k_game_steps == 0:
interaction_loss, interaction_q_value = agent.update_interaction()
if interaction_loss is not None:
running_avg_ig_loss.push(interaction_loss)
running_avg_ig_q_value.push(interaction_q_value)
qa_loss = agent.update_qa()
if qa_loss is not None:
running_avg_qa_loss.push(qa_loss)
step_in_total += 1
still_running = generic.to_np(replay_info[-1])
print_cmds.append(commands[0] if still_running[0] else "--")
if np.sum(still_running) == 0:
break
if report:
print(" / ".join(print_cmds).encode('utf-8'))
# The agent has exhausted all steps, now answer question.
chosen_head_tails = agent.answer_question_act(agent.naozi.get(), quest_list, current_belief) # batch
chosen_head_tails_np = generic.to_np(chosen_head_tails)
chosen_answer_strings = generic.get_answer_strings(agent.naozi.get(), chosen_head_tails_np, agent.bert_tokenizer, agent.special_token_ids)
answer_strings = [item["a_string"] for item in infos]
answer_token_ids = [item["a"] for item in infos]
qa_reward_np = generic.get_qa_reward(chosen_answer_strings, answer_strings)
obs_strings = [agent.bert_tokenizer.decode(agent.naozi.get(i)) for i in range(batch_size)]
ig_reward_np = generic.get_sufficient_info_reward(agent.naozi.get(), answer_token_ids)
ig_reward = generic.to_pt(ig_reward_np, enable_cuda=False, type='float') # batch
# push qa experience into qa replay buffer
replay_node_vocab = agent.kg.get_node_vocabulary()
replay_relation_vocab = agent.kg.get_relation_vocabulary()
replay_triplets = agent.kg.get_triplets()
for b in range(batch_size): # data points in batch
is_prior = qa_reward_np[b] > agent.qa_reward_prior_threshold * agent.qa_replay_memory.avg_rewards()
# if the agent is not in the correct state, do not push it into replay buffer
if np.mean(ig_reward_np[b]) == 0.0:
continue
agent.qa_replay_memory.push(is_prior, qa_reward_np[b], agent.naozi.get_sentence_lists(b), quest_list[b], replay_node_vocab[b], replay_relation_vocab[b], replay_triplets[b], answer_token_ids[b], belief_buffer[-1][b].cpu() if belief_buffer[-1][b] is not None else None)
# small positive reward whenever it answers question correctly
masks_np = [generic.to_np(item[-1]) for item in tmp_replay_buffer]
command_rewards_np = []
for i in range(len(tmp_replay_buffer)):
if i == len(tmp_replay_buffer) - 1:
r = ig_reward * tmp_replay_buffer[i][-1]
r_np = ig_reward_np * masks_np[i]
else:
# give reward only at that one game step, not all
r = ig_reward * (tmp_replay_buffer[i][-1] - tmp_replay_buffer[i + 1][-1])
r_np = ig_reward_np * (masks_np[i] - masks_np[i + 1])
tmp_replay_buffer[i].append(r)
command_rewards_np.append(r_np)
command_rewards_np = np.array(command_rewards_np)
if report:
print(command_rewards_np[:, 0])
# push experience into replay buffer
for b in range(len(ig_reward_np)):
is_prior = np.sum(command_rewards_np, 0)[b] > 0.0
mem = []
for i in range(len(tmp_replay_buffer)):
batch_description_list, batch_chosen_indices, batch_chosen_ctrlf_indices, batch_graph_node_vocabulary, batch_graph_relation_vocabulary, batch_graph_triplets, _, batch_rewards = tmp_replay_buffer[i]
mem.append([copy.deepcopy(batch_description_list[b]),
copy.deepcopy(quest_list[b]),
batch_chosen_indices[b],
batch_chosen_ctrlf_indices[b],
copy.deepcopy(batch_graph_node_vocabulary[b]),
copy.deepcopy(batch_graph_relation_vocabulary[b]),
copy.deepcopy(batch_graph_triplets[b]),
copy.deepcopy(belief_buffer[i][b].cpu()) if belief_buffer[i][b] is not None else None,
batch_rewards[b]])
if masks_np[i][b] == 0.0:
break
agent.replay_memory.push(is_prior, mem)
qa_acc = np.mean(qa_reward_np)
ig_acc = np.mean(ig_reward_np)
step_masks_np = np.sum(np.array(masks_np), 0) # batch
for i in range(len(qa_reward_np)):
# if the answer is totally wrong, we assume it used all steps
if qa_reward_np[i] == 0.0:
step_masks_np[i] = agent.max_nb_steps_per_episode
used_steps = np.mean(step_masks_np)
running_avg_qa_acc.push(qa_acc)
running_avg_ig_acc.push(ig_acc)
running_avg_steps.push(used_steps)
print_rewards = np.sum(np.mean(command_rewards_np, -1))
if report:
print("-- OBS: %s" % (obs_strings[0].encode('utf-8')))
print("-- PRED: %s" % (chosen_answer_strings[0].encode('utf-8')))
# finish game
agent.finish_of_episode(episode_no, batch_no, batch_size)
time_2 = datetime.datetime.now()
eastern_time = datetime.datetime.now(timezone('US/Eastern')).strftime("%b %d %Y %H:%M:%S")
if report:
print("Episode: {:3d} | {:s} | time spent: {:s} | interaction loss: {:2.3f} | interaction qvalue: {:2.3f} | qa loss: {:2.3f} | rewards: {:2.3f} | qa acc: {:2.3f}/{:2.3f} | sufficient info: {:2.3f}/{:2.3f} | used steps: {:2.3f}".format(episode_no, eastern_time, str(time_2 - time_1).rsplit(".")[0], running_avg_ig_loss.get_avg(), running_avg_ig_q_value.get_avg(), running_avg_qa_loss.get_avg(), print_rewards, qa_acc, running_avg_qa_acc.get_avg(), ig_acc, running_avg_ig_acc.get_avg(), running_avg_steps.get_avg()))
if __save__:
agent.save_model_to_path(output_dir + "/" + agent.experiment_tag + "_ep" + str(episode_no) + "_model.pt")
if not report or episode_no < agent.learn_start_from_this_episode:
episode_no += batch_size
batch_no += 1
continue
eval_qa_acc, eval_ig_acc, eval_used_steps = 0.0, 0.0, 0.0
# evaluate
if agent.run_eval:
eval_qa_acc, eval_ig_acc, eval_used_steps = evaluate.evaluate(env, agent, "valid")
env.split_reset("train")
# if run eval, then save model by eval accucacy
if eval_qa_acc >= best_qa_acc_so_far:
best_qa_acc_so_far = eval_qa_acc
agent.save_model_to_path(output_dir + "/" + agent.experiment_tag + "_model.pt")
curr_performance = eval_qa_acc
else:
if running_avg_qa_acc.get_avg() >= best_qa_acc_so_far:
best_qa_acc_so_far = running_avg_qa_acc.get_avg()
agent.save_model_to_path(output_dir + "/" + agent.experiment_tag + "_model.pt")
curr_performance = running_avg_qa_acc.get_avg()
if prev_performance <= curr_performance:
i_am_patient = 0
else:
i_am_patient += 1
prev_performance = curr_performance
# if patient >= patience, resume from checkpoint
if agent.patience > 0 and i_am_patient >= agent.patience:
if os.path.exists(output_dir + "/" + agent.experiment_tag + "_model.pt"):
print('reload from a good checkpoint...')
agent.load_pretrained_model(output_dir + "/" + agent.experiment_tag + "_model.pt", load_partial_graph=False)
agent.update_target_net()
i_am_patient = 0
# plot using visdom
if config["general"]["visdom"] and not agent.debug_mode:
viz_avg_ig_acc.append(running_avg_ig_acc.get_avg())
viz_avg_qa_acc.append(running_avg_qa_acc.get_avg())
viz_avg_ig_q_value.append(running_avg_ig_q_value.get_avg())
viz_eval_ig_acc.append(eval_ig_acc)
viz_eval_qa_acc.append(eval_qa_acc)
viz_eval_steps.append(eval_used_steps)
viz_avg_steps.append(running_avg_steps.get_avg())
viz_x = np.arange(len(viz_avg_ig_acc)).tolist()
if plt_win is None:
plt_win = viz.line(X=viz_x, Y=viz_avg_ig_acc,
opts=dict(title=agent.experiment_tag + "_train"),
name="sufficient info")
viz.line(X=viz_x, Y=viz_avg_qa_acc,
opts=dict(title=agent.experiment_tag + "_train"),
win=plt_win, update='append', name="qa")
else:
viz.line(X=[len(viz_avg_ig_acc) - 1], Y=[viz_avg_ig_acc[-1]],
opts=dict(title=agent.experiment_tag + "_train"),
win=plt_win,
update='append', name="sufficient info")
viz.line(X=[len(viz_avg_qa_acc) - 1], Y=[viz_avg_qa_acc[-1]],
opts=dict(title=agent.experiment_tag + "_train"),
win=plt_win,
update='append', name="qa")
if plt_q_value_win is None:
plt_q_value_win = viz.line(X=viz_x, Y=viz_avg_ig_q_value,
opts=dict(title=agent.experiment_tag + "_train_q_value"),
name="sufficient info")
else:
viz.line(X=[len(viz_avg_ig_q_value) - 1], Y=[viz_avg_ig_q_value[-1]],
opts=dict(title=agent.experiment_tag + "_train_q_value"),
win=plt_q_value_win,
update='append', name="sufficient info")
if plt_steps_win is None:
plt_steps_win = viz.line(X=viz_x, Y=viz_avg_steps,
opts=dict(title=agent.experiment_tag + "_train_step"),
name="used steps")
else:
viz.line(X=[len(viz_avg_steps) - 1], Y=[viz_avg_steps[-1]],
opts=dict(title=agent.experiment_tag + "_train_step"),
win=plt_steps_win,
update='append', name="used steps")
if agent.run_eval:
if eval_plt_win is None:
eval_plt_win = viz.line(X=viz_x, Y=viz_eval_ig_acc,
opts=dict(title=agent.experiment_tag + "_eval"),
name="sufficient info")
viz.line(X=viz_x, Y=viz_eval_qa_acc,
opts=dict(title=agent.experiment_tag + "_eval"),
win=eval_plt_win, update='append', name="qa")
else:
viz.line(X=[len(viz_eval_ig_acc) - 1], Y=[viz_eval_ig_acc[-1]],
opts=dict(title=agent.experiment_tag + "_eval"),
win=eval_plt_win,
update='append', name="sufficient info")
viz.line(X=[len(viz_eval_qa_acc) - 1], Y=[viz_eval_qa_acc[-1]],
opts=dict(title=agent.experiment_tag + "_eval"),
win=eval_plt_win,
update='append', name="qa")
if eval_plt_steps_win is None:
eval_plt_steps_win = viz.line(X=viz_x, Y=viz_eval_steps,
opts=dict(title=agent.experiment_tag + "_eval_step"),
name="used steps")
else:
viz.line(X=[len(viz_avg_steps) - 1], Y=[viz_eval_steps[-1]],
opts=dict(title=agent.experiment_tag + "_eval_step"),
win=eval_plt_steps_win,
update='append', name="used steps")
# write accucacies down into file
_s = json.dumps({"time spent": str(time_2 - time_1).rsplit(".")[0],
"sufficient info": str(running_avg_ig_acc.get_avg()),
"qa": str(running_avg_qa_acc.get_avg()),
"sufficient qvalue": str(running_avg_ig_q_value.get_avg()),
"eval sufficient info": str(eval_ig_acc),
"eval qa": str(eval_qa_acc),
"eval steps": str(eval_used_steps),
"used steps": str(running_avg_steps.get_avg())})
with open(output_dir + "/" + json_file_name + '.json', 'a+') as outfile:
outfile.write(_s + '\n')
outfile.flush()
episode_no += batch_size
batch_no += 1
if __name__ == '__main__':
train()
| StarcoderdataPython |
1753071 | import uuid
from typing import List, Dict
import unittest
from selfhost_client import SelfHostClient, UserType, PolicyType, UserTokenType, CreatedUserTokenResponse
class TestIntegrationUsersClient(unittest.TestCase):
"""
Run these tests individually because Self-Host will return HTTP 429 Too Many Requests otherwise.
"""
@classmethod
def setUpClass(cls) -> None:
cls.client: SelfHostClient = SelfHostClient(
base_url='http://127.0.0.1:8080',
username='test',
password='<PASSWORD>'
)
cls.unique_name: str = str(uuid.uuid4())
cls.created_user: UserType = cls.client.create_user(name=cls.unique_name)
@classmethod
def tearDownClass(cls) -> None:
cls.client.delete_user(cls.created_user['uuid'])
def test_get_users(self) -> None:
params: Dict[str, int] = {
'limit': 20,
'offset': 0
}
users: List[UserType] = self.client.get_users(**params)
self.assertIsNotNone(users)
def test_create_and_delete_user(self) -> None:
# Create and delete happens in setup and teardown methods.
self.assertEqual(self.created_user['name'], self.unique_name)
def test_get_my_user(self) -> None:
my_user: UserType = self.client.get_my_user()
self.assertEqual(my_user['uuid'], '00000000-0000-1000-8000-000000000000')
def test_get_user(self) -> None:
fetched_user: UserType = self.client.get_user(self.created_user['uuid'])
self.assertEqual(fetched_user['name'], self.created_user['name'])
def test_update_user(self) -> None:
self.client.update_user(self.created_user['uuid'], name=f'{self.created_user["name"]} Updated')
fetched_user = self.client.get_user(self.created_user['uuid'])
self.assertEqual(fetched_user['name'], f'{self.created_user["name"]} Updated')
def test_get_user_policies(self) -> None:
policies: List[PolicyType] = self.client.get_user_policies(self.created_user['uuid'])
self.assertIsNotNone(policies)
def test_update_user_rate(self) -> None:
self.client.update_user_rate(self.created_user['uuid'], 1000)
def test_create_get_and_delete_user_token(self) -> None:
# Create
created_user_token: CreatedUserTokenResponse = self.client.create_user_token(
self.created_user['uuid'],
'My new token'
)
self.assertEqual(created_user_token['name'], 'My new token')
# Get
user_tokens: List[UserTokenType] = self.client.get_user_tokens(self.created_user['uuid'])
self.assertIsNotNone(user_tokens)
# Delete
self.client.delete_user_token(self.created_user['uuid'], created_user_token['uuid'])
| StarcoderdataPython |
3261597 | <reponame>qeedquan/misc_utilities
from keras.datasets import mnist
from PIL import Image
import os
folder = "mnist_data/"
try:
os.mkdir(folder)
except:
pass
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
hist = dict.fromkeys(test_labels, 0)
for i in range(len(test_images)):
lb = test_labels[i]
im = Image.fromarray(test_images[i])
im.save("{}/{}-{}.png".format(folder, lb, hist[lb]))
hist[lb] += 1
| StarcoderdataPython |
6570 | # -*- coding: utf-8 -*-
"""This python module aims to manage
`DokuWiki <https://www.dokuwiki.org/dokuwiki>`_ wikis by using the
provided `XML-RPC API <https://www.dokuwiki.org/devel:xmlrpc>`_. It is
compatible with python2.7 and python3+.
Installation
------------
It is on `PyPi <https://pypi.python.org/pypi/dokuwiki>`_ so you can use
the ``pip`` command to install it::
pip install dokuwiki
Otherwise sources are in `github <https://github.com/fmenabe/python-dokuwiki>`_
"""
import re
import sys
import base64
import weakref
from xml.parsers.expat import ExpatError
if sys.version_info[0] == 3:
from xmlrpc.client import ServerProxy, Binary, Fault, Transport
from urllib.parse import urlencode
else:
from xmlrpclib import ServerProxy, Binary, Fault, Transport
from urllib import urlencode
from datetime import datetime, timedelta
ERR = 'XML or text declaration not at start of entity: line 2, column 0'
_URL_RE = re.compile(r'(?P<proto>https?)://(?P<host>[^/]*)(?P<uri>/.*)?')
def date(date):
"""DokuWiki returns dates of `xmlrpclib`/`xmlrpc.client` ``DateTime``
type and the format changes between DokuWiki versions ... This function
convert *date* to a `datetime` object.
"""
date = date.value
return (datetime.strptime(date[:-5], '%Y-%m-%dT%H:%M:%S')
if len(date) == 24
else datetime.strptime(date, '%Y%m%dT%H:%M:%S'))
def utc2local(date):
"""DokuWiki returns date with a +0000 timezone. This function convert *date*
to the local time.
"""
date_offset = (datetime.now() - datetime.utcnow())
# Python < 2.7 don't have the 'total_seconds' method so calculate it by hand!
date_offset = (date_offset.microseconds +
(date_offset.seconds + date_offset.days * 24 * 3600) * 1e6) / 1e6
date_offset = int(round(date_offset / 60 / 60))
return date + timedelta(hours=date_offset)
class DokuWikiError(Exception):
"""Exception raised by this module when there is an error."""
pass
class CookiesTransport(Transport):
"""A Python3 xmlrpc.client.Transport subclass that retains cookies."""
def __init__(self):
Transport.__init__(self)
self._cookies = dict()
def send_headers(self, connection, headers):
if self._cookies:
cookies = map(lambda x: x[0] + '=' + x[1], self._cookies.items())
connection.putheader("Cookie", "; ".join(cookies))
Transport.send_headers(self, connection, headers)
def parse_response(self, response):
"""parse and store cookie"""
try:
for header in response.msg.get_all("Set-Cookie"):
cookie = header.split(";", 1)[0]
cookieKey, cookieValue = cookie.split("=", 1)
self._cookies[cookieKey] = cookieValue
finally:
return Transport.parse_response(self, response)
class CookiesTransport2(Transport):
"""A Python2 xmlrpclib.Transport subclass that retains cookies."""
def __init__(self):
Transport.__init__(self)
self._cookies = dict()
def send_request(self, connection, handler, request_body):
Transport.send_request(self, connection, handler, request_body)
# set cookie below handler
if self._cookies:
cookies = map(lambda x: x[0] + '=' + x[1], self._cookies.items())
connection.putheader("Cookie", "; ".join(cookies))
def parse_response(self, response):
"""parse and store cookie"""
try:
for header in response.getheader("set-cookie").split(", "):
# filter 'expire' information
if not header.startswith("D"):
continue
cookie = header.split(";", 1)[0]
cookieKey, cookieValue = cookie.split("=", 1)
self._cookies[cookieKey] = cookieValue
finally:
return Transport.parse_response(self, response)
class DokuWiki(object):
"""Initialize a connection to a DokuWiki wiki. *url*, *user* and
*password* are respectively the URL, the login and the password for
connecting to the wiki. *kwargs* are `xmlrpclib`/`xmlrpc.client`
**ServerProxy** parameters.
The exception `DokuWikiError` is raised if the authentification
fails but others exceptions (like ``gaierror`` for invalid domain,
``ProtocolError`` for an invalid wiki, ...) are not catched.
.. code::
try:
wiki = dokuwiki.DokuWiki('URL', 'USER', 'PASSWORD', cookieAuth=False)
except (DokuWikiError, Exception) as err:
print('unable to connect: %s' % err)
"""
def __init__(self, url, user, password, cookieAuth=False, **kwargs):
"""Initialize the object by connecting to the XMLRPC server."""
# Initialize XMLRPC client.
try:
params = _URL_RE.search(url).groupdict()
if cookieAuth == False:
url = '%s://%s:%s@%s%s/lib/exe/xmlrpc.php' % (
params['proto'], user, password, params['host'], params['uri'] or '')
else:
url = '%s://%s%s/lib/exe/xmlrpc.php' % (
params['proto'], params['host'], params['uri'] or '')
except AttributeError:
raise DokuWikiError("invalid url '%s'" % url)
if cookieAuth == False:
self.proxy = ServerProxy(url, **kwargs)
else:
if sys.version_info[0] == 3:
self.proxy = ServerProxy(url, CookiesTransport(), **kwargs)
else:
self.proxy = ServerProxy(url, CookiesTransport2(), **kwargs)
# Force login to check the connection.
if not self.login(user, password):
raise DokuWikiError('invalid login or password!')
# Set "namespaces" for pages and medias functions.
self.pages = _Pages(weakref.ref(self)())
self.medias = _Medias(weakref.ref(self)())
def send(self, command, *args, **kwargs):
"""Generic method for executing an XML-RPC *command*. *args* and
*kwargs* are the arguments and parameters needed by the command.
"""
args = list(args)
if kwargs:
args.append(kwargs)
method = self.proxy
for elt in command.split('.'):
method = getattr(method, elt)
try:
return method(*args)
except Fault as err:
if err.faultCode == 121:
return {}
elif err.faultCode == 321:
return []
raise DokuWikiError(err)
except ExpatError as err:
if str(err) != ERR:
raise DokuWikiError(err)
@property
def version(self):
"""Property that returns the DokuWiki version of the remote Wiki."""
return self.send('dokuwiki.getVersion')
@property
def time(self):
"""Property that returns the current time at the remote wiki server as
Unix timestamp.
"""
return self.send('dokuwiki.getTime')
@property
def xmlrpc_version(self):
"""Property that returns the XML RPC interface version of the remote
Wiki. This is DokuWiki implementation specific and independent of the
supported standard API version returned by ``wiki.getRPCVersionSupported``.
"""
return self.send('dokuwiki.getXMLRPCAPIVersion')
@property
def xmlrpc_supported_version(self):
"""Property that returns *2* with the supported RPC API version."""
return self.send('wiki.getRPCVersionSupported')
@property
def title(self):
"""Property that returns the title of the wiki."""
return self.send('dokuwiki.getTitle')
def login(self, user, password):
"""Log to the wiki using *user* and *password* credentials. It returns
a boolean that indicates if the user succesfully authenticate."""
return self.send('dokuwiki.login', user, password)
def add_acl(self, scope, user, permission):
"""Add an `ACL <https://www.dokuwiki.org/acl>`_ rule that restricts
the page/namespace *scope* to *user* (use *@group* syntax for groups)
with *permission* level. It returns a boolean that indicate if the rule
was correctly added.
"""
return self.send('plugin.acl.addAcl', scope, user, permission)
def del_acl(self, scope, user):
"""Delete any ACL matching the given *scope* and *user* (or group if
*@group* syntax is used). It returns a boolean that indicate if the rule
was correctly removed.
"""
return self.send('plugin.acl.delAcl', scope, user)
class _Pages(object):
"""This object regroup methods for managing pages of a DokuWiki. This object
is accessible from the ``pages`` property of an `DokuWiki` instance::
wiki = dokuwiki.DokuWiki('URL', 'User', 'Password')
wiki.pages.list()
"""
def __init__(self, dokuwiki):
self._dokuwiki = dokuwiki
def list(self, namespace='/', **options):
"""List all pages of the given *namespace*.
Valid *options* are:
* *depth*: (int) recursion level, 0 for all
* *hash*: (bool) do an md5 sum of content
* *skipacl*: (bool) list everything regardless of ACL
"""
return self._dokuwiki.send('dokuwiki.getPagelist', namespace, options)
def changes(self, timestamp):
"""Returns a list of changes since given *timestamp*.
For example, for returning all changes since *2016-01-01*::
from datetime import datetime
wiki.pages.changes(datetime(2016, 1, 1).timestamp())
"""
return self._dokuwiki.send('wiki.getRecentChanges', timestamp)
def search(self, string):
"""Performs a fulltext search on *string* and returns the first 15
results.
"""
return self._dokuwiki.send('dokuwiki.search', string)
def versions(self, page, offset=0):
"""Returns the available versions of *page*. *offset* can be used to
list earlier versions in the history.
"""
return self._dokuwiki.send('wiki.getPageVersions', page, offset)
def info(self, page, version=None):
"""Returns informations of *page*. Informations of the last version
is returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageInfoVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPageInfo', page))
def get(self, page, version=None):
"""Returns the content of *page*. The content of the last version is
returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPage', page))
def append(self, page, content, **options):
"""Appends *content* text to *page*.
Valid *options* are:
* *sum*: (str) change summary
* *minor*: (bool) whether this is a minor change
"""
return self._dokuwiki.send('dokuwiki.appendPage', page, content, options)
def html(self, page, version=None):
"""Returns HTML content of *page*. The HTML content of the last version
of the page is returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageHTMLVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPageHTML', page))
def set(self, page, content, **options):
"""Set/replace the *content* of *page*.
Valid *options* are:
* *sum*: (str) change summary
* *minor*: (bool) whether this is a minor change
"""
try:
return self._dokuwiki.send('wiki.putPage', page, content, options)
except ExpatError as err:
# Sometime the first line of the XML response is blank which raise
# the 'ExpatError' exception although the change has been done. This
# allow to ignore the error.
if str(err) != ERR:
raise DokuWikiError(err)
def delete(self, page):
"""Delete *page* by setting an empty content."""
return self.set(page, '')
def lock(self, page):
"""Locks *page*."""
result = self._dokuwiki.send('dokuwiki.setLocks',
lock=[page], unlock=[])
if result['lockfail']:
raise DokuWikiError('unable to lock page')
def unlock(self, page):
"""Unlocks *page*."""
result = self._dokuwiki.send('dokuwiki.setLocks',
lock=[], unlock=[page])
if result['unlockfail']:
raise DokuWikiError('unable to unlock page')
def permission(self, page):
"""Returns the permission level of *page*."""
return self._dokuwiki.send('wiki.aclCheck', page)
def links(self, page):
"""Returns a list of all links contained in *page*."""
return self._dokuwiki.send('wiki.listLinks', page)
def backlinks(self, page):
"""Returns a list of all links referencing *page*."""
return self._dokuwiki.send('wiki.getBackLinks', page)
class _Medias(object):
"""This object regroup methods for managing medias of a DokuWiki. This
object is accessible from the ``medias`` property of an `DokuWiki`
instance::
wiki = dokuwiki.DokuWiki('URL', 'User', 'Password')
wiki.medias.list()
"""
def __init__(self, dokuwiki):
self._dokuwiki = dokuwiki
def list(self, namespace='/', **options):
"""Returns all medias of the given *namespace*.
Valid *options* are:
* *depth*: (int) recursion level, 0 for all
* *skipacl*: (bool) skip acl checking
* *pattern*: (str) check given pattern
* *hash*: (bool) add hashes to result list
"""
return self._dokuwiki.send('wiki.getAttachments', namespace, options)
def changes(self, timestamp):
"""Returns the list of medias changed since given *timestamp*.
For example, for returning all changes since *2016-01-01*::
from datetime import datetime
wiki.medias.changes(datetime(2016, 1, 1).timestamp())
"""
return self._dokuwiki.send('wiki.getRecentMediaChanges', timestamp)
def get(self, media, dirpath=None, filename=None, overwrite=False, b64decode=False):
"""Returns the binary data of *media* or save it to a file. If *dirpath*
is not set the binary data is returned, otherwise the data is saved
to a file. By default, the filename is the name of the media but it can
be changed with *filename* parameter. *overwrite* parameter allow to
overwrite the file if it already exists locally.
"""
import os
data = self._dokuwiki.send('wiki.getAttachment', media)
data = base64.b64decode(data) if b64decode else data.data
if dirpath is None:
return data
if filename is None:
filename = media.replace('/', ':').split(':')[-1]
if not os.path.exists(dirpath):
os.makedirs(dirpath)
filepath = os.path.join(dirpath, filename)
if os.path.exists(filepath) and not overwrite:
raise FileExistsError("[Errno 17] File exists: '%s'" % filepath)
with open(filepath, 'wb') as fhandler:
fhandler.write(data)
def info(self, media):
"""Returns informations of *media*."""
return self._dokuwiki.send('wiki.getAttachmentInfo', media)
def add(self, media, filepath, overwrite=True):
"""Set *media* from local file *filepath*. *overwrite* parameter specify
if the media must be overwrite if it exists remotely.
"""
with open(filepath, 'rb') as fhandler:
self._dokuwiki.send('wiki.putAttachment', media,
Binary(fhandler.read()), ow=overwrite)
def set(self, media, _bytes, overwrite=True, b64encode=False):
"""Set *media* from *_bytes*. *overwrite* parameter specify if the media
must be overwrite if it exists remotely.
"""
data = base64.b64encode(_bytes) if b64encode else Binary(_bytes)
self._dokuwiki.send('wiki.putAttachment', media, data, ow=overwrite)
def delete(self, media):
"""Delete *media*."""
return self._dokuwiki.send('wiki.deleteAttachment', media)
class Dataentry(object):
"""Object that manage `data entries <https://www.dokuwiki.org/plugin:data>`_."""
@staticmethod
def get(content, keep_order=False):
"""Get dataentry from *content*. *keep_order* indicates whether to
return an ordered dictionnay."""
if keep_order:
from collections import OrderedDict
dataentry = OrderedDict()
else:
dataentry = {}
found = False
for line in content.split('\n'):
if line.strip().startswith('---- dataentry'):
found = True
continue
elif line == '----':
break
elif not found:
continue
line_split = line.split(':')
key = line_split[0].strip()
value = re.sub('#.*$', '', ':'.join(line_split[1:])).strip()
dataentry.setdefault(key, value)
if not found:
raise DokuWikiError('no dataentry found')
return dataentry
@staticmethod
def gen(name, data):
"""Generate dataentry *name* from *data*."""
return '---- dataentry %s ----\n%s\n----' % (name, '\n'.join(
'%s:%s' % (attr, value) for attr, value in data.items()))
@staticmethod
def ignore(content):
"""Remove dataentry from *content*."""
page_content = []
start = False
for line in content.split('\n'):
if line == '----' and not start:
start = True
continue
if start:
page_content.append(line)
return '\n'.join(page_content) if page_content else content
| StarcoderdataPython |
3390659 | """Flood Warning System
returns either, "high" "moderate" or "low" risk of flooding
probability of flooding is computed via a range of inputs,
current level over the typical maximum, the higher the current
level the higher the assumed risk of flooding (if in flood flood level high)
relative level, if the level is relatively high a similar equation is used
"""
from floodsystem.stationdata import build_station_list, update_water_levels
import floodsystem.flood as flood
def run():
"""Task 2G"""
# Build list of stations
stations = build_station_list()
# Update latest level data for all stations
update_water_levels(stations)
output = flood.assess_risk(stations)
print("Risk of all stations at the moment")
print(output)
print("\nStations Currently at Severe Risk of Flood\n")
for data in output:
if data[1] == 'Severe':
print(data)
if __name__ == "__main__":
print("*** Task 2G: CUED Part IA Flood Warning System ***")
run() | StarcoderdataPython |
3286074 | from main import CMDApp
def tF(args):
print(f't {args}')
def cF(args):
print(f'c {args}')
def gF(args):
print(f'g {args}')
app = CMDApp()
app.setCommands({
"t": tF,
"c": cF,
"g": gF
})
app.setMinimumArgs({
"t": 3,
"c": 2
})
app.setHelp({
"t": "The t command",
"c": "The c command",
"g": "The g command"
})
app.setPrompt("> ")
app.start() | StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.